Source file src/cmd/compile/internal/ssa/rewriteLOONG64.go

     1  // Code generated from _gen/LOONG64.rules using 'go generate'; DO NOT EDIT.
     2  
     3  package ssa
     4  
     5  import "cmd/compile/internal/types"
     6  
     7  func rewriteValueLOONG64(v *Value) bool {
     8  	switch v.Op {
     9  	case OpAbs:
    10  		v.Op = OpLOONG64ABSD
    11  		return true
    12  	case OpAdd16:
    13  		v.Op = OpLOONG64ADDV
    14  		return true
    15  	case OpAdd32:
    16  		v.Op = OpLOONG64ADDV
    17  		return true
    18  	case OpAdd32F:
    19  		v.Op = OpLOONG64ADDF
    20  		return true
    21  	case OpAdd64:
    22  		v.Op = OpLOONG64ADDV
    23  		return true
    24  	case OpAdd64F:
    25  		v.Op = OpLOONG64ADDD
    26  		return true
    27  	case OpAdd8:
    28  		v.Op = OpLOONG64ADDV
    29  		return true
    30  	case OpAddPtr:
    31  		v.Op = OpLOONG64ADDV
    32  		return true
    33  	case OpAddr:
    34  		return rewriteValueLOONG64_OpAddr(v)
    35  	case OpAnd16:
    36  		v.Op = OpLOONG64AND
    37  		return true
    38  	case OpAnd32:
    39  		v.Op = OpLOONG64AND
    40  		return true
    41  	case OpAnd64:
    42  		v.Op = OpLOONG64AND
    43  		return true
    44  	case OpAnd8:
    45  		v.Op = OpLOONG64AND
    46  		return true
    47  	case OpAndB:
    48  		v.Op = OpLOONG64AND
    49  		return true
    50  	case OpAtomicAdd32:
    51  		v.Op = OpLOONG64LoweredAtomicAdd32
    52  		return true
    53  	case OpAtomicAdd64:
    54  		v.Op = OpLOONG64LoweredAtomicAdd64
    55  		return true
    56  	case OpAtomicAnd32:
    57  		v.Op = OpLOONG64LoweredAtomicAnd32
    58  		return true
    59  	case OpAtomicAnd32value:
    60  		v.Op = OpLOONG64LoweredAtomicAnd32value
    61  		return true
    62  	case OpAtomicAnd64value:
    63  		v.Op = OpLOONG64LoweredAtomicAnd64value
    64  		return true
    65  	case OpAtomicAnd8:
    66  		return rewriteValueLOONG64_OpAtomicAnd8(v)
    67  	case OpAtomicCompareAndSwap32:
    68  		return rewriteValueLOONG64_OpAtomicCompareAndSwap32(v)
    69  	case OpAtomicCompareAndSwap32Variant:
    70  		return rewriteValueLOONG64_OpAtomicCompareAndSwap32Variant(v)
    71  	case OpAtomicCompareAndSwap64:
    72  		v.Op = OpLOONG64LoweredAtomicCas64
    73  		return true
    74  	case OpAtomicCompareAndSwap64Variant:
    75  		v.Op = OpLOONG64LoweredAtomicCas64Variant
    76  		return true
    77  	case OpAtomicExchange32:
    78  		v.Op = OpLOONG64LoweredAtomicExchange32
    79  		return true
    80  	case OpAtomicExchange64:
    81  		v.Op = OpLOONG64LoweredAtomicExchange64
    82  		return true
    83  	case OpAtomicExchange8Variant:
    84  		v.Op = OpLOONG64LoweredAtomicExchange8Variant
    85  		return true
    86  	case OpAtomicLoad32:
    87  		v.Op = OpLOONG64LoweredAtomicLoad32
    88  		return true
    89  	case OpAtomicLoad64:
    90  		v.Op = OpLOONG64LoweredAtomicLoad64
    91  		return true
    92  	case OpAtomicLoad8:
    93  		v.Op = OpLOONG64LoweredAtomicLoad8
    94  		return true
    95  	case OpAtomicLoadPtr:
    96  		v.Op = OpLOONG64LoweredAtomicLoad64
    97  		return true
    98  	case OpAtomicOr32:
    99  		v.Op = OpLOONG64LoweredAtomicOr32
   100  		return true
   101  	case OpAtomicOr32value:
   102  		v.Op = OpLOONG64LoweredAtomicOr32value
   103  		return true
   104  	case OpAtomicOr64value:
   105  		v.Op = OpLOONG64LoweredAtomicOr64value
   106  		return true
   107  	case OpAtomicOr8:
   108  		return rewriteValueLOONG64_OpAtomicOr8(v)
   109  	case OpAtomicStore32:
   110  		v.Op = OpLOONG64LoweredAtomicStore32
   111  		return true
   112  	case OpAtomicStore32Variant:
   113  		v.Op = OpLOONG64LoweredAtomicStore32Variant
   114  		return true
   115  	case OpAtomicStore64:
   116  		v.Op = OpLOONG64LoweredAtomicStore64
   117  		return true
   118  	case OpAtomicStore64Variant:
   119  		v.Op = OpLOONG64LoweredAtomicStore64Variant
   120  		return true
   121  	case OpAtomicStore8:
   122  		v.Op = OpLOONG64LoweredAtomicStore8
   123  		return true
   124  	case OpAtomicStore8Variant:
   125  		v.Op = OpLOONG64LoweredAtomicStore8Variant
   126  		return true
   127  	case OpAtomicStorePtrNoWB:
   128  		v.Op = OpLOONG64LoweredAtomicStore64
   129  		return true
   130  	case OpAvg64u:
   131  		return rewriteValueLOONG64_OpAvg64u(v)
   132  	case OpBitLen16:
   133  		return rewriteValueLOONG64_OpBitLen16(v)
   134  	case OpBitLen32:
   135  		return rewriteValueLOONG64_OpBitLen32(v)
   136  	case OpBitLen64:
   137  		return rewriteValueLOONG64_OpBitLen64(v)
   138  	case OpBitLen8:
   139  		return rewriteValueLOONG64_OpBitLen8(v)
   140  	case OpBitRev16:
   141  		return rewriteValueLOONG64_OpBitRev16(v)
   142  	case OpBitRev32:
   143  		v.Op = OpLOONG64BITREVW
   144  		return true
   145  	case OpBitRev64:
   146  		v.Op = OpLOONG64BITREVV
   147  		return true
   148  	case OpBitRev8:
   149  		v.Op = OpLOONG64BITREV4B
   150  		return true
   151  	case OpBswap16:
   152  		v.Op = OpLOONG64REVB2H
   153  		return true
   154  	case OpBswap32:
   155  		v.Op = OpLOONG64REVB2W
   156  		return true
   157  	case OpBswap64:
   158  		v.Op = OpLOONG64REVBV
   159  		return true
   160  	case OpClosureCall:
   161  		v.Op = OpLOONG64CALLclosure
   162  		return true
   163  	case OpCom16:
   164  		return rewriteValueLOONG64_OpCom16(v)
   165  	case OpCom32:
   166  		return rewriteValueLOONG64_OpCom32(v)
   167  	case OpCom64:
   168  		return rewriteValueLOONG64_OpCom64(v)
   169  	case OpCom8:
   170  		return rewriteValueLOONG64_OpCom8(v)
   171  	case OpCondSelect:
   172  		return rewriteValueLOONG64_OpCondSelect(v)
   173  	case OpConst16:
   174  		return rewriteValueLOONG64_OpConst16(v)
   175  	case OpConst32:
   176  		return rewriteValueLOONG64_OpConst32(v)
   177  	case OpConst32F:
   178  		return rewriteValueLOONG64_OpConst32F(v)
   179  	case OpConst64:
   180  		return rewriteValueLOONG64_OpConst64(v)
   181  	case OpConst64F:
   182  		return rewriteValueLOONG64_OpConst64F(v)
   183  	case OpConst8:
   184  		return rewriteValueLOONG64_OpConst8(v)
   185  	case OpConstBool:
   186  		return rewriteValueLOONG64_OpConstBool(v)
   187  	case OpConstNil:
   188  		return rewriteValueLOONG64_OpConstNil(v)
   189  	case OpCopysign:
   190  		v.Op = OpLOONG64FCOPYSGD
   191  		return true
   192  	case OpCtz16:
   193  		return rewriteValueLOONG64_OpCtz16(v)
   194  	case OpCtz16NonZero:
   195  		v.Op = OpCtz64
   196  		return true
   197  	case OpCtz32:
   198  		v.Op = OpLOONG64CTZW
   199  		return true
   200  	case OpCtz32NonZero:
   201  		v.Op = OpCtz64
   202  		return true
   203  	case OpCtz64:
   204  		v.Op = OpLOONG64CTZV
   205  		return true
   206  	case OpCtz64NonZero:
   207  		v.Op = OpCtz64
   208  		return true
   209  	case OpCtz8:
   210  		return rewriteValueLOONG64_OpCtz8(v)
   211  	case OpCtz8NonZero:
   212  		v.Op = OpCtz64
   213  		return true
   214  	case OpCvt32Fto32:
   215  		v.Op = OpLOONG64TRUNCFW
   216  		return true
   217  	case OpCvt32Fto64:
   218  		v.Op = OpLOONG64TRUNCFV
   219  		return true
   220  	case OpCvt32Fto64F:
   221  		v.Op = OpLOONG64MOVFD
   222  		return true
   223  	case OpCvt32to32F:
   224  		v.Op = OpLOONG64MOVWF
   225  		return true
   226  	case OpCvt32to64F:
   227  		v.Op = OpLOONG64MOVWD
   228  		return true
   229  	case OpCvt64Fto32:
   230  		v.Op = OpLOONG64TRUNCDW
   231  		return true
   232  	case OpCvt64Fto32F:
   233  		v.Op = OpLOONG64MOVDF
   234  		return true
   235  	case OpCvt64Fto64:
   236  		v.Op = OpLOONG64TRUNCDV
   237  		return true
   238  	case OpCvt64to32F:
   239  		v.Op = OpLOONG64MOVVF
   240  		return true
   241  	case OpCvt64to64F:
   242  		v.Op = OpLOONG64MOVVD
   243  		return true
   244  	case OpCvtBoolToUint8:
   245  		v.Op = OpCopy
   246  		return true
   247  	case OpDiv16:
   248  		return rewriteValueLOONG64_OpDiv16(v)
   249  	case OpDiv16u:
   250  		return rewriteValueLOONG64_OpDiv16u(v)
   251  	case OpDiv32:
   252  		return rewriteValueLOONG64_OpDiv32(v)
   253  	case OpDiv32F:
   254  		v.Op = OpLOONG64DIVF
   255  		return true
   256  	case OpDiv32u:
   257  		return rewriteValueLOONG64_OpDiv32u(v)
   258  	case OpDiv64:
   259  		return rewriteValueLOONG64_OpDiv64(v)
   260  	case OpDiv64F:
   261  		v.Op = OpLOONG64DIVD
   262  		return true
   263  	case OpDiv64u:
   264  		v.Op = OpLOONG64DIVVU
   265  		return true
   266  	case OpDiv8:
   267  		return rewriteValueLOONG64_OpDiv8(v)
   268  	case OpDiv8u:
   269  		return rewriteValueLOONG64_OpDiv8u(v)
   270  	case OpEq16:
   271  		return rewriteValueLOONG64_OpEq16(v)
   272  	case OpEq32:
   273  		return rewriteValueLOONG64_OpEq32(v)
   274  	case OpEq32F:
   275  		return rewriteValueLOONG64_OpEq32F(v)
   276  	case OpEq64:
   277  		return rewriteValueLOONG64_OpEq64(v)
   278  	case OpEq64F:
   279  		return rewriteValueLOONG64_OpEq64F(v)
   280  	case OpEq8:
   281  		return rewriteValueLOONG64_OpEq8(v)
   282  	case OpEqB:
   283  		return rewriteValueLOONG64_OpEqB(v)
   284  	case OpEqPtr:
   285  		return rewriteValueLOONG64_OpEqPtr(v)
   286  	case OpFMA:
   287  		v.Op = OpLOONG64FMADDD
   288  		return true
   289  	case OpGetCallerPC:
   290  		v.Op = OpLOONG64LoweredGetCallerPC
   291  		return true
   292  	case OpGetCallerSP:
   293  		v.Op = OpLOONG64LoweredGetCallerSP
   294  		return true
   295  	case OpGetClosurePtr:
   296  		v.Op = OpLOONG64LoweredGetClosurePtr
   297  		return true
   298  	case OpHmul32:
   299  		v.Op = OpLOONG64MULH
   300  		return true
   301  	case OpHmul32u:
   302  		v.Op = OpLOONG64MULHU
   303  		return true
   304  	case OpHmul64:
   305  		v.Op = OpLOONG64MULHV
   306  		return true
   307  	case OpHmul64u:
   308  		v.Op = OpLOONG64MULHVU
   309  		return true
   310  	case OpInterCall:
   311  		v.Op = OpLOONG64CALLinter
   312  		return true
   313  	case OpIsInBounds:
   314  		return rewriteValueLOONG64_OpIsInBounds(v)
   315  	case OpIsNonNil:
   316  		return rewriteValueLOONG64_OpIsNonNil(v)
   317  	case OpIsSliceInBounds:
   318  		return rewriteValueLOONG64_OpIsSliceInBounds(v)
   319  	case OpLOONG64ADDD:
   320  		return rewriteValueLOONG64_OpLOONG64ADDD(v)
   321  	case OpLOONG64ADDF:
   322  		return rewriteValueLOONG64_OpLOONG64ADDF(v)
   323  	case OpLOONG64ADDV:
   324  		return rewriteValueLOONG64_OpLOONG64ADDV(v)
   325  	case OpLOONG64ADDVconst:
   326  		return rewriteValueLOONG64_OpLOONG64ADDVconst(v)
   327  	case OpLOONG64ADDshiftLLV:
   328  		return rewriteValueLOONG64_OpLOONG64ADDshiftLLV(v)
   329  	case OpLOONG64AND:
   330  		return rewriteValueLOONG64_OpLOONG64AND(v)
   331  	case OpLOONG64ANDconst:
   332  		return rewriteValueLOONG64_OpLOONG64ANDconst(v)
   333  	case OpLOONG64DIVV:
   334  		return rewriteValueLOONG64_OpLOONG64DIVV(v)
   335  	case OpLOONG64DIVVU:
   336  		return rewriteValueLOONG64_OpLOONG64DIVVU(v)
   337  	case OpLOONG64LoweredPanicBoundsCR:
   338  		return rewriteValueLOONG64_OpLOONG64LoweredPanicBoundsCR(v)
   339  	case OpLOONG64LoweredPanicBoundsRC:
   340  		return rewriteValueLOONG64_OpLOONG64LoweredPanicBoundsRC(v)
   341  	case OpLOONG64LoweredPanicBoundsRR:
   342  		return rewriteValueLOONG64_OpLOONG64LoweredPanicBoundsRR(v)
   343  	case OpLOONG64MASKEQZ:
   344  		return rewriteValueLOONG64_OpLOONG64MASKEQZ(v)
   345  	case OpLOONG64MASKNEZ:
   346  		return rewriteValueLOONG64_OpLOONG64MASKNEZ(v)
   347  	case OpLOONG64MOVBUload:
   348  		return rewriteValueLOONG64_OpLOONG64MOVBUload(v)
   349  	case OpLOONG64MOVBUloadidx:
   350  		return rewriteValueLOONG64_OpLOONG64MOVBUloadidx(v)
   351  	case OpLOONG64MOVBUreg:
   352  		return rewriteValueLOONG64_OpLOONG64MOVBUreg(v)
   353  	case OpLOONG64MOVBload:
   354  		return rewriteValueLOONG64_OpLOONG64MOVBload(v)
   355  	case OpLOONG64MOVBloadidx:
   356  		return rewriteValueLOONG64_OpLOONG64MOVBloadidx(v)
   357  	case OpLOONG64MOVBreg:
   358  		return rewriteValueLOONG64_OpLOONG64MOVBreg(v)
   359  	case OpLOONG64MOVBstore:
   360  		return rewriteValueLOONG64_OpLOONG64MOVBstore(v)
   361  	case OpLOONG64MOVBstoreidx:
   362  		return rewriteValueLOONG64_OpLOONG64MOVBstoreidx(v)
   363  	case OpLOONG64MOVDF:
   364  		return rewriteValueLOONG64_OpLOONG64MOVDF(v)
   365  	case OpLOONG64MOVDload:
   366  		return rewriteValueLOONG64_OpLOONG64MOVDload(v)
   367  	case OpLOONG64MOVDloadidx:
   368  		return rewriteValueLOONG64_OpLOONG64MOVDloadidx(v)
   369  	case OpLOONG64MOVDstore:
   370  		return rewriteValueLOONG64_OpLOONG64MOVDstore(v)
   371  	case OpLOONG64MOVDstoreidx:
   372  		return rewriteValueLOONG64_OpLOONG64MOVDstoreidx(v)
   373  	case OpLOONG64MOVFload:
   374  		return rewriteValueLOONG64_OpLOONG64MOVFload(v)
   375  	case OpLOONG64MOVFloadidx:
   376  		return rewriteValueLOONG64_OpLOONG64MOVFloadidx(v)
   377  	case OpLOONG64MOVFstore:
   378  		return rewriteValueLOONG64_OpLOONG64MOVFstore(v)
   379  	case OpLOONG64MOVFstoreidx:
   380  		return rewriteValueLOONG64_OpLOONG64MOVFstoreidx(v)
   381  	case OpLOONG64MOVHUload:
   382  		return rewriteValueLOONG64_OpLOONG64MOVHUload(v)
   383  	case OpLOONG64MOVHUloadidx:
   384  		return rewriteValueLOONG64_OpLOONG64MOVHUloadidx(v)
   385  	case OpLOONG64MOVHUreg:
   386  		return rewriteValueLOONG64_OpLOONG64MOVHUreg(v)
   387  	case OpLOONG64MOVHload:
   388  		return rewriteValueLOONG64_OpLOONG64MOVHload(v)
   389  	case OpLOONG64MOVHloadidx:
   390  		return rewriteValueLOONG64_OpLOONG64MOVHloadidx(v)
   391  	case OpLOONG64MOVHreg:
   392  		return rewriteValueLOONG64_OpLOONG64MOVHreg(v)
   393  	case OpLOONG64MOVHstore:
   394  		return rewriteValueLOONG64_OpLOONG64MOVHstore(v)
   395  	case OpLOONG64MOVHstoreidx:
   396  		return rewriteValueLOONG64_OpLOONG64MOVHstoreidx(v)
   397  	case OpLOONG64MOVVload:
   398  		return rewriteValueLOONG64_OpLOONG64MOVVload(v)
   399  	case OpLOONG64MOVVloadidx:
   400  		return rewriteValueLOONG64_OpLOONG64MOVVloadidx(v)
   401  	case OpLOONG64MOVVnop:
   402  		return rewriteValueLOONG64_OpLOONG64MOVVnop(v)
   403  	case OpLOONG64MOVVreg:
   404  		return rewriteValueLOONG64_OpLOONG64MOVVreg(v)
   405  	case OpLOONG64MOVVstore:
   406  		return rewriteValueLOONG64_OpLOONG64MOVVstore(v)
   407  	case OpLOONG64MOVVstoreidx:
   408  		return rewriteValueLOONG64_OpLOONG64MOVVstoreidx(v)
   409  	case OpLOONG64MOVWUload:
   410  		return rewriteValueLOONG64_OpLOONG64MOVWUload(v)
   411  	case OpLOONG64MOVWUloadidx:
   412  		return rewriteValueLOONG64_OpLOONG64MOVWUloadidx(v)
   413  	case OpLOONG64MOVWUreg:
   414  		return rewriteValueLOONG64_OpLOONG64MOVWUreg(v)
   415  	case OpLOONG64MOVWload:
   416  		return rewriteValueLOONG64_OpLOONG64MOVWload(v)
   417  	case OpLOONG64MOVWloadidx:
   418  		return rewriteValueLOONG64_OpLOONG64MOVWloadidx(v)
   419  	case OpLOONG64MOVWreg:
   420  		return rewriteValueLOONG64_OpLOONG64MOVWreg(v)
   421  	case OpLOONG64MOVWstore:
   422  		return rewriteValueLOONG64_OpLOONG64MOVWstore(v)
   423  	case OpLOONG64MOVWstoreidx:
   424  		return rewriteValueLOONG64_OpLOONG64MOVWstoreidx(v)
   425  	case OpLOONG64MULV:
   426  		return rewriteValueLOONG64_OpLOONG64MULV(v)
   427  	case OpLOONG64NEGV:
   428  		return rewriteValueLOONG64_OpLOONG64NEGV(v)
   429  	case OpLOONG64NOR:
   430  		return rewriteValueLOONG64_OpLOONG64NOR(v)
   431  	case OpLOONG64NORconst:
   432  		return rewriteValueLOONG64_OpLOONG64NORconst(v)
   433  	case OpLOONG64OR:
   434  		return rewriteValueLOONG64_OpLOONG64OR(v)
   435  	case OpLOONG64ORN:
   436  		return rewriteValueLOONG64_OpLOONG64ORN(v)
   437  	case OpLOONG64ORconst:
   438  		return rewriteValueLOONG64_OpLOONG64ORconst(v)
   439  	case OpLOONG64REMV:
   440  		return rewriteValueLOONG64_OpLOONG64REMV(v)
   441  	case OpLOONG64REMVU:
   442  		return rewriteValueLOONG64_OpLOONG64REMVU(v)
   443  	case OpLOONG64ROTR:
   444  		return rewriteValueLOONG64_OpLOONG64ROTR(v)
   445  	case OpLOONG64ROTRV:
   446  		return rewriteValueLOONG64_OpLOONG64ROTRV(v)
   447  	case OpLOONG64SGT:
   448  		return rewriteValueLOONG64_OpLOONG64SGT(v)
   449  	case OpLOONG64SGTU:
   450  		return rewriteValueLOONG64_OpLOONG64SGTU(v)
   451  	case OpLOONG64SGTUconst:
   452  		return rewriteValueLOONG64_OpLOONG64SGTUconst(v)
   453  	case OpLOONG64SGTconst:
   454  		return rewriteValueLOONG64_OpLOONG64SGTconst(v)
   455  	case OpLOONG64SLL:
   456  		return rewriteValueLOONG64_OpLOONG64SLL(v)
   457  	case OpLOONG64SLLV:
   458  		return rewriteValueLOONG64_OpLOONG64SLLV(v)
   459  	case OpLOONG64SLLVconst:
   460  		return rewriteValueLOONG64_OpLOONG64SLLVconst(v)
   461  	case OpLOONG64SLLconst:
   462  		return rewriteValueLOONG64_OpLOONG64SLLconst(v)
   463  	case OpLOONG64SRA:
   464  		return rewriteValueLOONG64_OpLOONG64SRA(v)
   465  	case OpLOONG64SRAV:
   466  		return rewriteValueLOONG64_OpLOONG64SRAV(v)
   467  	case OpLOONG64SRAVconst:
   468  		return rewriteValueLOONG64_OpLOONG64SRAVconst(v)
   469  	case OpLOONG64SRL:
   470  		return rewriteValueLOONG64_OpLOONG64SRL(v)
   471  	case OpLOONG64SRLV:
   472  		return rewriteValueLOONG64_OpLOONG64SRLV(v)
   473  	case OpLOONG64SRLVconst:
   474  		return rewriteValueLOONG64_OpLOONG64SRLVconst(v)
   475  	case OpLOONG64SUBD:
   476  		return rewriteValueLOONG64_OpLOONG64SUBD(v)
   477  	case OpLOONG64SUBF:
   478  		return rewriteValueLOONG64_OpLOONG64SUBF(v)
   479  	case OpLOONG64SUBV:
   480  		return rewriteValueLOONG64_OpLOONG64SUBV(v)
   481  	case OpLOONG64SUBVconst:
   482  		return rewriteValueLOONG64_OpLOONG64SUBVconst(v)
   483  	case OpLOONG64XOR:
   484  		return rewriteValueLOONG64_OpLOONG64XOR(v)
   485  	case OpLOONG64XORconst:
   486  		return rewriteValueLOONG64_OpLOONG64XORconst(v)
   487  	case OpLeq16:
   488  		return rewriteValueLOONG64_OpLeq16(v)
   489  	case OpLeq16U:
   490  		return rewriteValueLOONG64_OpLeq16U(v)
   491  	case OpLeq32:
   492  		return rewriteValueLOONG64_OpLeq32(v)
   493  	case OpLeq32F:
   494  		return rewriteValueLOONG64_OpLeq32F(v)
   495  	case OpLeq32U:
   496  		return rewriteValueLOONG64_OpLeq32U(v)
   497  	case OpLeq64:
   498  		return rewriteValueLOONG64_OpLeq64(v)
   499  	case OpLeq64F:
   500  		return rewriteValueLOONG64_OpLeq64F(v)
   501  	case OpLeq64U:
   502  		return rewriteValueLOONG64_OpLeq64U(v)
   503  	case OpLeq8:
   504  		return rewriteValueLOONG64_OpLeq8(v)
   505  	case OpLeq8U:
   506  		return rewriteValueLOONG64_OpLeq8U(v)
   507  	case OpLess16:
   508  		return rewriteValueLOONG64_OpLess16(v)
   509  	case OpLess16U:
   510  		return rewriteValueLOONG64_OpLess16U(v)
   511  	case OpLess32:
   512  		return rewriteValueLOONG64_OpLess32(v)
   513  	case OpLess32F:
   514  		return rewriteValueLOONG64_OpLess32F(v)
   515  	case OpLess32U:
   516  		return rewriteValueLOONG64_OpLess32U(v)
   517  	case OpLess64:
   518  		return rewriteValueLOONG64_OpLess64(v)
   519  	case OpLess64F:
   520  		return rewriteValueLOONG64_OpLess64F(v)
   521  	case OpLess64U:
   522  		return rewriteValueLOONG64_OpLess64U(v)
   523  	case OpLess8:
   524  		return rewriteValueLOONG64_OpLess8(v)
   525  	case OpLess8U:
   526  		return rewriteValueLOONG64_OpLess8U(v)
   527  	case OpLoad:
   528  		return rewriteValueLOONG64_OpLoad(v)
   529  	case OpLocalAddr:
   530  		return rewriteValueLOONG64_OpLocalAddr(v)
   531  	case OpLsh16x16:
   532  		return rewriteValueLOONG64_OpLsh16x16(v)
   533  	case OpLsh16x32:
   534  		return rewriteValueLOONG64_OpLsh16x32(v)
   535  	case OpLsh16x64:
   536  		return rewriteValueLOONG64_OpLsh16x64(v)
   537  	case OpLsh16x8:
   538  		return rewriteValueLOONG64_OpLsh16x8(v)
   539  	case OpLsh32x16:
   540  		return rewriteValueLOONG64_OpLsh32x16(v)
   541  	case OpLsh32x32:
   542  		return rewriteValueLOONG64_OpLsh32x32(v)
   543  	case OpLsh32x64:
   544  		return rewriteValueLOONG64_OpLsh32x64(v)
   545  	case OpLsh32x8:
   546  		return rewriteValueLOONG64_OpLsh32x8(v)
   547  	case OpLsh64x16:
   548  		return rewriteValueLOONG64_OpLsh64x16(v)
   549  	case OpLsh64x32:
   550  		return rewriteValueLOONG64_OpLsh64x32(v)
   551  	case OpLsh64x64:
   552  		return rewriteValueLOONG64_OpLsh64x64(v)
   553  	case OpLsh64x8:
   554  		return rewriteValueLOONG64_OpLsh64x8(v)
   555  	case OpLsh8x16:
   556  		return rewriteValueLOONG64_OpLsh8x16(v)
   557  	case OpLsh8x32:
   558  		return rewriteValueLOONG64_OpLsh8x32(v)
   559  	case OpLsh8x64:
   560  		return rewriteValueLOONG64_OpLsh8x64(v)
   561  	case OpLsh8x8:
   562  		return rewriteValueLOONG64_OpLsh8x8(v)
   563  	case OpMax32F:
   564  		v.Op = OpLOONG64FMAXF
   565  		return true
   566  	case OpMax64F:
   567  		v.Op = OpLOONG64FMAXD
   568  		return true
   569  	case OpMin32F:
   570  		v.Op = OpLOONG64FMINF
   571  		return true
   572  	case OpMin64F:
   573  		v.Op = OpLOONG64FMIND
   574  		return true
   575  	case OpMod16:
   576  		return rewriteValueLOONG64_OpMod16(v)
   577  	case OpMod16u:
   578  		return rewriteValueLOONG64_OpMod16u(v)
   579  	case OpMod32:
   580  		return rewriteValueLOONG64_OpMod32(v)
   581  	case OpMod32u:
   582  		return rewriteValueLOONG64_OpMod32u(v)
   583  	case OpMod64:
   584  		return rewriteValueLOONG64_OpMod64(v)
   585  	case OpMod64u:
   586  		v.Op = OpLOONG64REMVU
   587  		return true
   588  	case OpMod8:
   589  		return rewriteValueLOONG64_OpMod8(v)
   590  	case OpMod8u:
   591  		return rewriteValueLOONG64_OpMod8u(v)
   592  	case OpMove:
   593  		return rewriteValueLOONG64_OpMove(v)
   594  	case OpMul16:
   595  		v.Op = OpLOONG64MULV
   596  		return true
   597  	case OpMul32:
   598  		v.Op = OpLOONG64MULV
   599  		return true
   600  	case OpMul32F:
   601  		v.Op = OpLOONG64MULF
   602  		return true
   603  	case OpMul64:
   604  		v.Op = OpLOONG64MULV
   605  		return true
   606  	case OpMul64F:
   607  		v.Op = OpLOONG64MULD
   608  		return true
   609  	case OpMul8:
   610  		v.Op = OpLOONG64MULV
   611  		return true
   612  	case OpNeg16:
   613  		v.Op = OpLOONG64NEGV
   614  		return true
   615  	case OpNeg32:
   616  		v.Op = OpLOONG64NEGV
   617  		return true
   618  	case OpNeg32F:
   619  		v.Op = OpLOONG64NEGF
   620  		return true
   621  	case OpNeg64:
   622  		v.Op = OpLOONG64NEGV
   623  		return true
   624  	case OpNeg64F:
   625  		v.Op = OpLOONG64NEGD
   626  		return true
   627  	case OpNeg8:
   628  		v.Op = OpLOONG64NEGV
   629  		return true
   630  	case OpNeq16:
   631  		return rewriteValueLOONG64_OpNeq16(v)
   632  	case OpNeq32:
   633  		return rewriteValueLOONG64_OpNeq32(v)
   634  	case OpNeq32F:
   635  		return rewriteValueLOONG64_OpNeq32F(v)
   636  	case OpNeq64:
   637  		return rewriteValueLOONG64_OpNeq64(v)
   638  	case OpNeq64F:
   639  		return rewriteValueLOONG64_OpNeq64F(v)
   640  	case OpNeq8:
   641  		return rewriteValueLOONG64_OpNeq8(v)
   642  	case OpNeqB:
   643  		v.Op = OpLOONG64XOR
   644  		return true
   645  	case OpNeqPtr:
   646  		return rewriteValueLOONG64_OpNeqPtr(v)
   647  	case OpNilCheck:
   648  		v.Op = OpLOONG64LoweredNilCheck
   649  		return true
   650  	case OpNot:
   651  		return rewriteValueLOONG64_OpNot(v)
   652  	case OpOffPtr:
   653  		return rewriteValueLOONG64_OpOffPtr(v)
   654  	case OpOr16:
   655  		v.Op = OpLOONG64OR
   656  		return true
   657  	case OpOr32:
   658  		v.Op = OpLOONG64OR
   659  		return true
   660  	case OpOr64:
   661  		v.Op = OpLOONG64OR
   662  		return true
   663  	case OpOr8:
   664  		v.Op = OpLOONG64OR
   665  		return true
   666  	case OpOrB:
   667  		v.Op = OpLOONG64OR
   668  		return true
   669  	case OpPanicBounds:
   670  		v.Op = OpLOONG64LoweredPanicBoundsRR
   671  		return true
   672  	case OpPopCount16:
   673  		return rewriteValueLOONG64_OpPopCount16(v)
   674  	case OpPopCount32:
   675  		return rewriteValueLOONG64_OpPopCount32(v)
   676  	case OpPopCount64:
   677  		return rewriteValueLOONG64_OpPopCount64(v)
   678  	case OpPrefetchCache:
   679  		return rewriteValueLOONG64_OpPrefetchCache(v)
   680  	case OpPrefetchCacheStreamed:
   681  		return rewriteValueLOONG64_OpPrefetchCacheStreamed(v)
   682  	case OpPubBarrier:
   683  		v.Op = OpLOONG64LoweredPubBarrier
   684  		return true
   685  	case OpRotateLeft16:
   686  		return rewriteValueLOONG64_OpRotateLeft16(v)
   687  	case OpRotateLeft32:
   688  		return rewriteValueLOONG64_OpRotateLeft32(v)
   689  	case OpRotateLeft64:
   690  		return rewriteValueLOONG64_OpRotateLeft64(v)
   691  	case OpRotateLeft8:
   692  		return rewriteValueLOONG64_OpRotateLeft8(v)
   693  	case OpRound32F:
   694  		v.Op = OpLOONG64LoweredRound32F
   695  		return true
   696  	case OpRound64F:
   697  		v.Op = OpLOONG64LoweredRound64F
   698  		return true
   699  	case OpRsh16Ux16:
   700  		return rewriteValueLOONG64_OpRsh16Ux16(v)
   701  	case OpRsh16Ux32:
   702  		return rewriteValueLOONG64_OpRsh16Ux32(v)
   703  	case OpRsh16Ux64:
   704  		return rewriteValueLOONG64_OpRsh16Ux64(v)
   705  	case OpRsh16Ux8:
   706  		return rewriteValueLOONG64_OpRsh16Ux8(v)
   707  	case OpRsh16x16:
   708  		return rewriteValueLOONG64_OpRsh16x16(v)
   709  	case OpRsh16x32:
   710  		return rewriteValueLOONG64_OpRsh16x32(v)
   711  	case OpRsh16x64:
   712  		return rewriteValueLOONG64_OpRsh16x64(v)
   713  	case OpRsh16x8:
   714  		return rewriteValueLOONG64_OpRsh16x8(v)
   715  	case OpRsh32Ux16:
   716  		return rewriteValueLOONG64_OpRsh32Ux16(v)
   717  	case OpRsh32Ux32:
   718  		return rewriteValueLOONG64_OpRsh32Ux32(v)
   719  	case OpRsh32Ux64:
   720  		return rewriteValueLOONG64_OpRsh32Ux64(v)
   721  	case OpRsh32Ux8:
   722  		return rewriteValueLOONG64_OpRsh32Ux8(v)
   723  	case OpRsh32x16:
   724  		return rewriteValueLOONG64_OpRsh32x16(v)
   725  	case OpRsh32x32:
   726  		return rewriteValueLOONG64_OpRsh32x32(v)
   727  	case OpRsh32x64:
   728  		return rewriteValueLOONG64_OpRsh32x64(v)
   729  	case OpRsh32x8:
   730  		return rewriteValueLOONG64_OpRsh32x8(v)
   731  	case OpRsh64Ux16:
   732  		return rewriteValueLOONG64_OpRsh64Ux16(v)
   733  	case OpRsh64Ux32:
   734  		return rewriteValueLOONG64_OpRsh64Ux32(v)
   735  	case OpRsh64Ux64:
   736  		return rewriteValueLOONG64_OpRsh64Ux64(v)
   737  	case OpRsh64Ux8:
   738  		return rewriteValueLOONG64_OpRsh64Ux8(v)
   739  	case OpRsh64x16:
   740  		return rewriteValueLOONG64_OpRsh64x16(v)
   741  	case OpRsh64x32:
   742  		return rewriteValueLOONG64_OpRsh64x32(v)
   743  	case OpRsh64x64:
   744  		return rewriteValueLOONG64_OpRsh64x64(v)
   745  	case OpRsh64x8:
   746  		return rewriteValueLOONG64_OpRsh64x8(v)
   747  	case OpRsh8Ux16:
   748  		return rewriteValueLOONG64_OpRsh8Ux16(v)
   749  	case OpRsh8Ux32:
   750  		return rewriteValueLOONG64_OpRsh8Ux32(v)
   751  	case OpRsh8Ux64:
   752  		return rewriteValueLOONG64_OpRsh8Ux64(v)
   753  	case OpRsh8Ux8:
   754  		return rewriteValueLOONG64_OpRsh8Ux8(v)
   755  	case OpRsh8x16:
   756  		return rewriteValueLOONG64_OpRsh8x16(v)
   757  	case OpRsh8x32:
   758  		return rewriteValueLOONG64_OpRsh8x32(v)
   759  	case OpRsh8x64:
   760  		return rewriteValueLOONG64_OpRsh8x64(v)
   761  	case OpRsh8x8:
   762  		return rewriteValueLOONG64_OpRsh8x8(v)
   763  	case OpSelect0:
   764  		return rewriteValueLOONG64_OpSelect0(v)
   765  	case OpSelect1:
   766  		return rewriteValueLOONG64_OpSelect1(v)
   767  	case OpSelectN:
   768  		return rewriteValueLOONG64_OpSelectN(v)
   769  	case OpSignExt16to32:
   770  		v.Op = OpLOONG64MOVHreg
   771  		return true
   772  	case OpSignExt16to64:
   773  		v.Op = OpLOONG64MOVHreg
   774  		return true
   775  	case OpSignExt32to64:
   776  		v.Op = OpLOONG64MOVWreg
   777  		return true
   778  	case OpSignExt8to16:
   779  		v.Op = OpLOONG64MOVBreg
   780  		return true
   781  	case OpSignExt8to32:
   782  		v.Op = OpLOONG64MOVBreg
   783  		return true
   784  	case OpSignExt8to64:
   785  		v.Op = OpLOONG64MOVBreg
   786  		return true
   787  	case OpSlicemask:
   788  		return rewriteValueLOONG64_OpSlicemask(v)
   789  	case OpSqrt:
   790  		v.Op = OpLOONG64SQRTD
   791  		return true
   792  	case OpSqrt32:
   793  		v.Op = OpLOONG64SQRTF
   794  		return true
   795  	case OpStaticCall:
   796  		v.Op = OpLOONG64CALLstatic
   797  		return true
   798  	case OpStore:
   799  		return rewriteValueLOONG64_OpStore(v)
   800  	case OpSub16:
   801  		v.Op = OpLOONG64SUBV
   802  		return true
   803  	case OpSub32:
   804  		v.Op = OpLOONG64SUBV
   805  		return true
   806  	case OpSub32F:
   807  		v.Op = OpLOONG64SUBF
   808  		return true
   809  	case OpSub64:
   810  		v.Op = OpLOONG64SUBV
   811  		return true
   812  	case OpSub64F:
   813  		v.Op = OpLOONG64SUBD
   814  		return true
   815  	case OpSub8:
   816  		v.Op = OpLOONG64SUBV
   817  		return true
   818  	case OpSubPtr:
   819  		v.Op = OpLOONG64SUBV
   820  		return true
   821  	case OpTailCall:
   822  		v.Op = OpLOONG64CALLtail
   823  		return true
   824  	case OpTrunc16to8:
   825  		v.Op = OpCopy
   826  		return true
   827  	case OpTrunc32to16:
   828  		v.Op = OpCopy
   829  		return true
   830  	case OpTrunc32to8:
   831  		v.Op = OpCopy
   832  		return true
   833  	case OpTrunc64to16:
   834  		v.Op = OpCopy
   835  		return true
   836  	case OpTrunc64to32:
   837  		v.Op = OpCopy
   838  		return true
   839  	case OpTrunc64to8:
   840  		v.Op = OpCopy
   841  		return true
   842  	case OpWB:
   843  		v.Op = OpLOONG64LoweredWB
   844  		return true
   845  	case OpXor16:
   846  		v.Op = OpLOONG64XOR
   847  		return true
   848  	case OpXor32:
   849  		v.Op = OpLOONG64XOR
   850  		return true
   851  	case OpXor64:
   852  		v.Op = OpLOONG64XOR
   853  		return true
   854  	case OpXor8:
   855  		v.Op = OpLOONG64XOR
   856  		return true
   857  	case OpZero:
   858  		return rewriteValueLOONG64_OpZero(v)
   859  	case OpZeroExt16to32:
   860  		v.Op = OpLOONG64MOVHUreg
   861  		return true
   862  	case OpZeroExt16to64:
   863  		v.Op = OpLOONG64MOVHUreg
   864  		return true
   865  	case OpZeroExt32to64:
   866  		v.Op = OpLOONG64MOVWUreg
   867  		return true
   868  	case OpZeroExt8to16:
   869  		v.Op = OpLOONG64MOVBUreg
   870  		return true
   871  	case OpZeroExt8to32:
   872  		v.Op = OpLOONG64MOVBUreg
   873  		return true
   874  	case OpZeroExt8to64:
   875  		v.Op = OpLOONG64MOVBUreg
   876  		return true
   877  	}
   878  	return false
   879  }
   880  func rewriteValueLOONG64_OpAddr(v *Value) bool {
   881  	v_0 := v.Args[0]
   882  	// match: (Addr {sym} base)
   883  	// result: (MOVVaddr {sym} base)
   884  	for {
   885  		sym := auxToSym(v.Aux)
   886  		base := v_0
   887  		v.reset(OpLOONG64MOVVaddr)
   888  		v.Aux = symToAux(sym)
   889  		v.AddArg(base)
   890  		return true
   891  	}
   892  }
   893  func rewriteValueLOONG64_OpAtomicAnd8(v *Value) bool {
   894  	v_2 := v.Args[2]
   895  	v_1 := v.Args[1]
   896  	v_0 := v.Args[0]
   897  	b := v.Block
   898  	typ := &b.Func.Config.Types
   899  	// match: (AtomicAnd8 ptr val mem)
   900  	// result: (LoweredAtomicAnd32 (AND <typ.Uintptr> (MOVVconst [^3]) ptr) (NORconst [0] <typ.UInt32> (SLLV <typ.UInt32> (XORconst <typ.UInt32> [0xff] (ZeroExt8to32 val)) (SLLVconst <typ.UInt64> [3] (ANDconst <typ.UInt64> [3] ptr)))) mem)
   901  	for {
   902  		ptr := v_0
   903  		val := v_1
   904  		mem := v_2
   905  		v.reset(OpLOONG64LoweredAtomicAnd32)
   906  		v0 := b.NewValue0(v.Pos, OpLOONG64AND, typ.Uintptr)
   907  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
   908  		v1.AuxInt = int64ToAuxInt(^3)
   909  		v0.AddArg2(v1, ptr)
   910  		v2 := b.NewValue0(v.Pos, OpLOONG64NORconst, typ.UInt32)
   911  		v2.AuxInt = int64ToAuxInt(0)
   912  		v3 := b.NewValue0(v.Pos, OpLOONG64SLLV, typ.UInt32)
   913  		v4 := b.NewValue0(v.Pos, OpLOONG64XORconst, typ.UInt32)
   914  		v4.AuxInt = int64ToAuxInt(0xff)
   915  		v5 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   916  		v5.AddArg(val)
   917  		v4.AddArg(v5)
   918  		v6 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, typ.UInt64)
   919  		v6.AuxInt = int64ToAuxInt(3)
   920  		v7 := b.NewValue0(v.Pos, OpLOONG64ANDconst, typ.UInt64)
   921  		v7.AuxInt = int64ToAuxInt(3)
   922  		v7.AddArg(ptr)
   923  		v6.AddArg(v7)
   924  		v3.AddArg2(v4, v6)
   925  		v2.AddArg(v3)
   926  		v.AddArg3(v0, v2, mem)
   927  		return true
   928  	}
   929  }
   930  func rewriteValueLOONG64_OpAtomicCompareAndSwap32(v *Value) bool {
   931  	v_3 := v.Args[3]
   932  	v_2 := v.Args[2]
   933  	v_1 := v.Args[1]
   934  	v_0 := v.Args[0]
   935  	b := v.Block
   936  	typ := &b.Func.Config.Types
   937  	// match: (AtomicCompareAndSwap32 ptr old new mem)
   938  	// result: (LoweredAtomicCas32 ptr (SignExt32to64 old) new mem)
   939  	for {
   940  		ptr := v_0
   941  		old := v_1
   942  		new := v_2
   943  		mem := v_3
   944  		v.reset(OpLOONG64LoweredAtomicCas32)
   945  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
   946  		v0.AddArg(old)
   947  		v.AddArg4(ptr, v0, new, mem)
   948  		return true
   949  	}
   950  }
   951  func rewriteValueLOONG64_OpAtomicCompareAndSwap32Variant(v *Value) bool {
   952  	v_3 := v.Args[3]
   953  	v_2 := v.Args[2]
   954  	v_1 := v.Args[1]
   955  	v_0 := v.Args[0]
   956  	b := v.Block
   957  	typ := &b.Func.Config.Types
   958  	// match: (AtomicCompareAndSwap32Variant ptr old new mem)
   959  	// result: (LoweredAtomicCas32Variant ptr (SignExt32to64 old) new mem)
   960  	for {
   961  		ptr := v_0
   962  		old := v_1
   963  		new := v_2
   964  		mem := v_3
   965  		v.reset(OpLOONG64LoweredAtomicCas32Variant)
   966  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
   967  		v0.AddArg(old)
   968  		v.AddArg4(ptr, v0, new, mem)
   969  		return true
   970  	}
   971  }
   972  func rewriteValueLOONG64_OpAtomicOr8(v *Value) bool {
   973  	v_2 := v.Args[2]
   974  	v_1 := v.Args[1]
   975  	v_0 := v.Args[0]
   976  	b := v.Block
   977  	typ := &b.Func.Config.Types
   978  	// match: (AtomicOr8 ptr val mem)
   979  	// result: (LoweredAtomicOr32 (AND <typ.Uintptr> (MOVVconst [^3]) ptr) (SLLV <typ.UInt32> (ZeroExt8to32 val) (SLLVconst <typ.UInt64> [3] (ANDconst <typ.UInt64> [3] ptr))) mem)
   980  	for {
   981  		ptr := v_0
   982  		val := v_1
   983  		mem := v_2
   984  		v.reset(OpLOONG64LoweredAtomicOr32)
   985  		v0 := b.NewValue0(v.Pos, OpLOONG64AND, typ.Uintptr)
   986  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
   987  		v1.AuxInt = int64ToAuxInt(^3)
   988  		v0.AddArg2(v1, ptr)
   989  		v2 := b.NewValue0(v.Pos, OpLOONG64SLLV, typ.UInt32)
   990  		v3 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   991  		v3.AddArg(val)
   992  		v4 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, typ.UInt64)
   993  		v4.AuxInt = int64ToAuxInt(3)
   994  		v5 := b.NewValue0(v.Pos, OpLOONG64ANDconst, typ.UInt64)
   995  		v5.AuxInt = int64ToAuxInt(3)
   996  		v5.AddArg(ptr)
   997  		v4.AddArg(v5)
   998  		v2.AddArg2(v3, v4)
   999  		v.AddArg3(v0, v2, mem)
  1000  		return true
  1001  	}
  1002  }
  1003  func rewriteValueLOONG64_OpAvg64u(v *Value) bool {
  1004  	v_1 := v.Args[1]
  1005  	v_0 := v.Args[0]
  1006  	b := v.Block
  1007  	// match: (Avg64u <t> x y)
  1008  	// result: (ADDV (SRLVconst <t> (SUBV <t> x y) [1]) y)
  1009  	for {
  1010  		t := v.Type
  1011  		x := v_0
  1012  		y := v_1
  1013  		v.reset(OpLOONG64ADDV)
  1014  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLVconst, t)
  1015  		v0.AuxInt = int64ToAuxInt(1)
  1016  		v1 := b.NewValue0(v.Pos, OpLOONG64SUBV, t)
  1017  		v1.AddArg2(x, y)
  1018  		v0.AddArg(v1)
  1019  		v.AddArg2(v0, y)
  1020  		return true
  1021  	}
  1022  }
  1023  func rewriteValueLOONG64_OpBitLen16(v *Value) bool {
  1024  	v_0 := v.Args[0]
  1025  	b := v.Block
  1026  	typ := &b.Func.Config.Types
  1027  	// match: (BitLen16 x)
  1028  	// result: (BitLen64 (ZeroExt16to64 x))
  1029  	for {
  1030  		x := v_0
  1031  		v.reset(OpBitLen64)
  1032  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1033  		v0.AddArg(x)
  1034  		v.AddArg(v0)
  1035  		return true
  1036  	}
  1037  }
  1038  func rewriteValueLOONG64_OpBitLen32(v *Value) bool {
  1039  	v_0 := v.Args[0]
  1040  	b := v.Block
  1041  	// match: (BitLen32 <t> x)
  1042  	// result: (NEGV <t> (SUBVconst <t> [32] (CLZW <t> x)))
  1043  	for {
  1044  		t := v.Type
  1045  		x := v_0
  1046  		v.reset(OpLOONG64NEGV)
  1047  		v.Type = t
  1048  		v0 := b.NewValue0(v.Pos, OpLOONG64SUBVconst, t)
  1049  		v0.AuxInt = int64ToAuxInt(32)
  1050  		v1 := b.NewValue0(v.Pos, OpLOONG64CLZW, t)
  1051  		v1.AddArg(x)
  1052  		v0.AddArg(v1)
  1053  		v.AddArg(v0)
  1054  		return true
  1055  	}
  1056  }
  1057  func rewriteValueLOONG64_OpBitLen64(v *Value) bool {
  1058  	v_0 := v.Args[0]
  1059  	b := v.Block
  1060  	// match: (BitLen64 <t> x)
  1061  	// result: (NEGV <t> (SUBVconst <t> [64] (CLZV <t> x)))
  1062  	for {
  1063  		t := v.Type
  1064  		x := v_0
  1065  		v.reset(OpLOONG64NEGV)
  1066  		v.Type = t
  1067  		v0 := b.NewValue0(v.Pos, OpLOONG64SUBVconst, t)
  1068  		v0.AuxInt = int64ToAuxInt(64)
  1069  		v1 := b.NewValue0(v.Pos, OpLOONG64CLZV, t)
  1070  		v1.AddArg(x)
  1071  		v0.AddArg(v1)
  1072  		v.AddArg(v0)
  1073  		return true
  1074  	}
  1075  }
  1076  func rewriteValueLOONG64_OpBitLen8(v *Value) bool {
  1077  	v_0 := v.Args[0]
  1078  	b := v.Block
  1079  	typ := &b.Func.Config.Types
  1080  	// match: (BitLen8 x)
  1081  	// result: (BitLen64 (ZeroExt8to64 x))
  1082  	for {
  1083  		x := v_0
  1084  		v.reset(OpBitLen64)
  1085  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1086  		v0.AddArg(x)
  1087  		v.AddArg(v0)
  1088  		return true
  1089  	}
  1090  }
  1091  func rewriteValueLOONG64_OpBitRev16(v *Value) bool {
  1092  	v_0 := v.Args[0]
  1093  	b := v.Block
  1094  	// match: (BitRev16 <t> x)
  1095  	// result: (REVB2H (BITREV4B <t> x))
  1096  	for {
  1097  		t := v.Type
  1098  		x := v_0
  1099  		v.reset(OpLOONG64REVB2H)
  1100  		v0 := b.NewValue0(v.Pos, OpLOONG64BITREV4B, t)
  1101  		v0.AddArg(x)
  1102  		v.AddArg(v0)
  1103  		return true
  1104  	}
  1105  }
  1106  func rewriteValueLOONG64_OpCom16(v *Value) bool {
  1107  	v_0 := v.Args[0]
  1108  	b := v.Block
  1109  	typ := &b.Func.Config.Types
  1110  	// match: (Com16 x)
  1111  	// result: (NOR (MOVVconst [0]) x)
  1112  	for {
  1113  		x := v_0
  1114  		v.reset(OpLOONG64NOR)
  1115  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1116  		v0.AuxInt = int64ToAuxInt(0)
  1117  		v.AddArg2(v0, x)
  1118  		return true
  1119  	}
  1120  }
  1121  func rewriteValueLOONG64_OpCom32(v *Value) bool {
  1122  	v_0 := v.Args[0]
  1123  	b := v.Block
  1124  	typ := &b.Func.Config.Types
  1125  	// match: (Com32 x)
  1126  	// result: (NOR (MOVVconst [0]) x)
  1127  	for {
  1128  		x := v_0
  1129  		v.reset(OpLOONG64NOR)
  1130  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1131  		v0.AuxInt = int64ToAuxInt(0)
  1132  		v.AddArg2(v0, x)
  1133  		return true
  1134  	}
  1135  }
  1136  func rewriteValueLOONG64_OpCom64(v *Value) bool {
  1137  	v_0 := v.Args[0]
  1138  	b := v.Block
  1139  	typ := &b.Func.Config.Types
  1140  	// match: (Com64 x)
  1141  	// result: (NOR (MOVVconst [0]) x)
  1142  	for {
  1143  		x := v_0
  1144  		v.reset(OpLOONG64NOR)
  1145  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1146  		v0.AuxInt = int64ToAuxInt(0)
  1147  		v.AddArg2(v0, x)
  1148  		return true
  1149  	}
  1150  }
  1151  func rewriteValueLOONG64_OpCom8(v *Value) bool {
  1152  	v_0 := v.Args[0]
  1153  	b := v.Block
  1154  	typ := &b.Func.Config.Types
  1155  	// match: (Com8 x)
  1156  	// result: (NOR (MOVVconst [0]) x)
  1157  	for {
  1158  		x := v_0
  1159  		v.reset(OpLOONG64NOR)
  1160  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1161  		v0.AuxInt = int64ToAuxInt(0)
  1162  		v.AddArg2(v0, x)
  1163  		return true
  1164  	}
  1165  }
  1166  func rewriteValueLOONG64_OpCondSelect(v *Value) bool {
  1167  	v_2 := v.Args[2]
  1168  	v_1 := v.Args[1]
  1169  	v_0 := v.Args[0]
  1170  	b := v.Block
  1171  	// match: (CondSelect <t> x y cond)
  1172  	// result: (OR (MASKEQZ <t> x cond) (MASKNEZ <t> y cond))
  1173  	for {
  1174  		t := v.Type
  1175  		x := v_0
  1176  		y := v_1
  1177  		cond := v_2
  1178  		v.reset(OpLOONG64OR)
  1179  		v0 := b.NewValue0(v.Pos, OpLOONG64MASKEQZ, t)
  1180  		v0.AddArg2(x, cond)
  1181  		v1 := b.NewValue0(v.Pos, OpLOONG64MASKNEZ, t)
  1182  		v1.AddArg2(y, cond)
  1183  		v.AddArg2(v0, v1)
  1184  		return true
  1185  	}
  1186  }
  1187  func rewriteValueLOONG64_OpConst16(v *Value) bool {
  1188  	// match: (Const16 [val])
  1189  	// result: (MOVVconst [int64(val)])
  1190  	for {
  1191  		val := auxIntToInt16(v.AuxInt)
  1192  		v.reset(OpLOONG64MOVVconst)
  1193  		v.AuxInt = int64ToAuxInt(int64(val))
  1194  		return true
  1195  	}
  1196  }
  1197  func rewriteValueLOONG64_OpConst32(v *Value) bool {
  1198  	// match: (Const32 [val])
  1199  	// result: (MOVVconst [int64(val)])
  1200  	for {
  1201  		val := auxIntToInt32(v.AuxInt)
  1202  		v.reset(OpLOONG64MOVVconst)
  1203  		v.AuxInt = int64ToAuxInt(int64(val))
  1204  		return true
  1205  	}
  1206  }
  1207  func rewriteValueLOONG64_OpConst32F(v *Value) bool {
  1208  	// match: (Const32F [val])
  1209  	// result: (MOVFconst [float64(val)])
  1210  	for {
  1211  		val := auxIntToFloat32(v.AuxInt)
  1212  		v.reset(OpLOONG64MOVFconst)
  1213  		v.AuxInt = float64ToAuxInt(float64(val))
  1214  		return true
  1215  	}
  1216  }
  1217  func rewriteValueLOONG64_OpConst64(v *Value) bool {
  1218  	// match: (Const64 [val])
  1219  	// result: (MOVVconst [int64(val)])
  1220  	for {
  1221  		val := auxIntToInt64(v.AuxInt)
  1222  		v.reset(OpLOONG64MOVVconst)
  1223  		v.AuxInt = int64ToAuxInt(int64(val))
  1224  		return true
  1225  	}
  1226  }
  1227  func rewriteValueLOONG64_OpConst64F(v *Value) bool {
  1228  	// match: (Const64F [val])
  1229  	// result: (MOVDconst [float64(val)])
  1230  	for {
  1231  		val := auxIntToFloat64(v.AuxInt)
  1232  		v.reset(OpLOONG64MOVDconst)
  1233  		v.AuxInt = float64ToAuxInt(float64(val))
  1234  		return true
  1235  	}
  1236  }
  1237  func rewriteValueLOONG64_OpConst8(v *Value) bool {
  1238  	// match: (Const8 [val])
  1239  	// result: (MOVVconst [int64(val)])
  1240  	for {
  1241  		val := auxIntToInt8(v.AuxInt)
  1242  		v.reset(OpLOONG64MOVVconst)
  1243  		v.AuxInt = int64ToAuxInt(int64(val))
  1244  		return true
  1245  	}
  1246  }
  1247  func rewriteValueLOONG64_OpConstBool(v *Value) bool {
  1248  	// match: (ConstBool [t])
  1249  	// result: (MOVVconst [int64(b2i(t))])
  1250  	for {
  1251  		t := auxIntToBool(v.AuxInt)
  1252  		v.reset(OpLOONG64MOVVconst)
  1253  		v.AuxInt = int64ToAuxInt(int64(b2i(t)))
  1254  		return true
  1255  	}
  1256  }
  1257  func rewriteValueLOONG64_OpConstNil(v *Value) bool {
  1258  	// match: (ConstNil)
  1259  	// result: (MOVVconst [0])
  1260  	for {
  1261  		v.reset(OpLOONG64MOVVconst)
  1262  		v.AuxInt = int64ToAuxInt(0)
  1263  		return true
  1264  	}
  1265  }
  1266  func rewriteValueLOONG64_OpCtz16(v *Value) bool {
  1267  	v_0 := v.Args[0]
  1268  	b := v.Block
  1269  	typ := &b.Func.Config.Types
  1270  	// match: (Ctz16 x)
  1271  	// result: (CTZV (OR <typ.UInt64> x (MOVVconst [1<<16])))
  1272  	for {
  1273  		x := v_0
  1274  		v.reset(OpLOONG64CTZV)
  1275  		v0 := b.NewValue0(v.Pos, OpLOONG64OR, typ.UInt64)
  1276  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1277  		v1.AuxInt = int64ToAuxInt(1 << 16)
  1278  		v0.AddArg2(x, v1)
  1279  		v.AddArg(v0)
  1280  		return true
  1281  	}
  1282  }
  1283  func rewriteValueLOONG64_OpCtz8(v *Value) bool {
  1284  	v_0 := v.Args[0]
  1285  	b := v.Block
  1286  	typ := &b.Func.Config.Types
  1287  	// match: (Ctz8 x)
  1288  	// result: (CTZV (OR <typ.UInt64> x (MOVVconst [1<<8])))
  1289  	for {
  1290  		x := v_0
  1291  		v.reset(OpLOONG64CTZV)
  1292  		v0 := b.NewValue0(v.Pos, OpLOONG64OR, typ.UInt64)
  1293  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1294  		v1.AuxInt = int64ToAuxInt(1 << 8)
  1295  		v0.AddArg2(x, v1)
  1296  		v.AddArg(v0)
  1297  		return true
  1298  	}
  1299  }
  1300  func rewriteValueLOONG64_OpDiv16(v *Value) bool {
  1301  	v_1 := v.Args[1]
  1302  	v_0 := v.Args[0]
  1303  	b := v.Block
  1304  	typ := &b.Func.Config.Types
  1305  	// match: (Div16 x y)
  1306  	// result: (DIVV (SignExt16to64 x) (SignExt16to64 y))
  1307  	for {
  1308  		x := v_0
  1309  		y := v_1
  1310  		v.reset(OpLOONG64DIVV)
  1311  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  1312  		v0.AddArg(x)
  1313  		v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  1314  		v1.AddArg(y)
  1315  		v.AddArg2(v0, v1)
  1316  		return true
  1317  	}
  1318  }
  1319  func rewriteValueLOONG64_OpDiv16u(v *Value) bool {
  1320  	v_1 := v.Args[1]
  1321  	v_0 := v.Args[0]
  1322  	b := v.Block
  1323  	typ := &b.Func.Config.Types
  1324  	// match: (Div16u x y)
  1325  	// result: (DIVVU (ZeroExt16to64 x) (ZeroExt16to64 y))
  1326  	for {
  1327  		x := v_0
  1328  		y := v_1
  1329  		v.reset(OpLOONG64DIVVU)
  1330  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1331  		v0.AddArg(x)
  1332  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1333  		v1.AddArg(y)
  1334  		v.AddArg2(v0, v1)
  1335  		return true
  1336  	}
  1337  }
  1338  func rewriteValueLOONG64_OpDiv32(v *Value) bool {
  1339  	v_1 := v.Args[1]
  1340  	v_0 := v.Args[0]
  1341  	b := v.Block
  1342  	typ := &b.Func.Config.Types
  1343  	// match: (Div32 x y)
  1344  	// result: (DIVV (SignExt32to64 x) (SignExt32to64 y))
  1345  	for {
  1346  		x := v_0
  1347  		y := v_1
  1348  		v.reset(OpLOONG64DIVV)
  1349  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1350  		v0.AddArg(x)
  1351  		v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1352  		v1.AddArg(y)
  1353  		v.AddArg2(v0, v1)
  1354  		return true
  1355  	}
  1356  }
  1357  func rewriteValueLOONG64_OpDiv32u(v *Value) bool {
  1358  	v_1 := v.Args[1]
  1359  	v_0 := v.Args[0]
  1360  	b := v.Block
  1361  	typ := &b.Func.Config.Types
  1362  	// match: (Div32u x y)
  1363  	// result: (DIVVU (ZeroExt32to64 x) (ZeroExt32to64 y))
  1364  	for {
  1365  		x := v_0
  1366  		y := v_1
  1367  		v.reset(OpLOONG64DIVVU)
  1368  		v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1369  		v0.AddArg(x)
  1370  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1371  		v1.AddArg(y)
  1372  		v.AddArg2(v0, v1)
  1373  		return true
  1374  	}
  1375  }
  1376  func rewriteValueLOONG64_OpDiv64(v *Value) bool {
  1377  	v_1 := v.Args[1]
  1378  	v_0 := v.Args[0]
  1379  	// match: (Div64 x y)
  1380  	// result: (DIVV x y)
  1381  	for {
  1382  		x := v_0
  1383  		y := v_1
  1384  		v.reset(OpLOONG64DIVV)
  1385  		v.AddArg2(x, y)
  1386  		return true
  1387  	}
  1388  }
  1389  func rewriteValueLOONG64_OpDiv8(v *Value) bool {
  1390  	v_1 := v.Args[1]
  1391  	v_0 := v.Args[0]
  1392  	b := v.Block
  1393  	typ := &b.Func.Config.Types
  1394  	// match: (Div8 x y)
  1395  	// result: (DIVV (SignExt8to64 x) (SignExt8to64 y))
  1396  	for {
  1397  		x := v_0
  1398  		y := v_1
  1399  		v.reset(OpLOONG64DIVV)
  1400  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  1401  		v0.AddArg(x)
  1402  		v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  1403  		v1.AddArg(y)
  1404  		v.AddArg2(v0, v1)
  1405  		return true
  1406  	}
  1407  }
  1408  func rewriteValueLOONG64_OpDiv8u(v *Value) bool {
  1409  	v_1 := v.Args[1]
  1410  	v_0 := v.Args[0]
  1411  	b := v.Block
  1412  	typ := &b.Func.Config.Types
  1413  	// match: (Div8u x y)
  1414  	// result: (DIVVU (ZeroExt8to64 x) (ZeroExt8to64 y))
  1415  	for {
  1416  		x := v_0
  1417  		y := v_1
  1418  		v.reset(OpLOONG64DIVVU)
  1419  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1420  		v0.AddArg(x)
  1421  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1422  		v1.AddArg(y)
  1423  		v.AddArg2(v0, v1)
  1424  		return true
  1425  	}
  1426  }
  1427  func rewriteValueLOONG64_OpEq16(v *Value) bool {
  1428  	v_1 := v.Args[1]
  1429  	v_0 := v.Args[0]
  1430  	b := v.Block
  1431  	typ := &b.Func.Config.Types
  1432  	// match: (Eq16 x y)
  1433  	// result: (SGTU (MOVVconst [1]) (XOR (ZeroExt16to64 x) (ZeroExt16to64 y)))
  1434  	for {
  1435  		x := v_0
  1436  		y := v_1
  1437  		v.reset(OpLOONG64SGTU)
  1438  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1439  		v0.AuxInt = int64ToAuxInt(1)
  1440  		v1 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.UInt64)
  1441  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1442  		v2.AddArg(x)
  1443  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1444  		v3.AddArg(y)
  1445  		v1.AddArg2(v2, v3)
  1446  		v.AddArg2(v0, v1)
  1447  		return true
  1448  	}
  1449  }
  1450  func rewriteValueLOONG64_OpEq32(v *Value) bool {
  1451  	v_1 := v.Args[1]
  1452  	v_0 := v.Args[0]
  1453  	b := v.Block
  1454  	typ := &b.Func.Config.Types
  1455  	// match: (Eq32 x y)
  1456  	// result: (SGTU (MOVVconst [1]) (XOR (ZeroExt32to64 x) (ZeroExt32to64 y)))
  1457  	for {
  1458  		x := v_0
  1459  		y := v_1
  1460  		v.reset(OpLOONG64SGTU)
  1461  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1462  		v0.AuxInt = int64ToAuxInt(1)
  1463  		v1 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.UInt64)
  1464  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1465  		v2.AddArg(x)
  1466  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1467  		v3.AddArg(y)
  1468  		v1.AddArg2(v2, v3)
  1469  		v.AddArg2(v0, v1)
  1470  		return true
  1471  	}
  1472  }
  1473  func rewriteValueLOONG64_OpEq32F(v *Value) bool {
  1474  	v_1 := v.Args[1]
  1475  	v_0 := v.Args[0]
  1476  	b := v.Block
  1477  	// match: (Eq32F x y)
  1478  	// result: (FPFlagTrue (CMPEQF x y))
  1479  	for {
  1480  		x := v_0
  1481  		y := v_1
  1482  		v.reset(OpLOONG64FPFlagTrue)
  1483  		v0 := b.NewValue0(v.Pos, OpLOONG64CMPEQF, types.TypeFlags)
  1484  		v0.AddArg2(x, y)
  1485  		v.AddArg(v0)
  1486  		return true
  1487  	}
  1488  }
  1489  func rewriteValueLOONG64_OpEq64(v *Value) bool {
  1490  	v_1 := v.Args[1]
  1491  	v_0 := v.Args[0]
  1492  	b := v.Block
  1493  	typ := &b.Func.Config.Types
  1494  	// match: (Eq64 x y)
  1495  	// result: (SGTU (MOVVconst [1]) (XOR x y))
  1496  	for {
  1497  		x := v_0
  1498  		y := v_1
  1499  		v.reset(OpLOONG64SGTU)
  1500  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1501  		v0.AuxInt = int64ToAuxInt(1)
  1502  		v1 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.UInt64)
  1503  		v1.AddArg2(x, y)
  1504  		v.AddArg2(v0, v1)
  1505  		return true
  1506  	}
  1507  }
  1508  func rewriteValueLOONG64_OpEq64F(v *Value) bool {
  1509  	v_1 := v.Args[1]
  1510  	v_0 := v.Args[0]
  1511  	b := v.Block
  1512  	// match: (Eq64F x y)
  1513  	// result: (FPFlagTrue (CMPEQD x y))
  1514  	for {
  1515  		x := v_0
  1516  		y := v_1
  1517  		v.reset(OpLOONG64FPFlagTrue)
  1518  		v0 := b.NewValue0(v.Pos, OpLOONG64CMPEQD, types.TypeFlags)
  1519  		v0.AddArg2(x, y)
  1520  		v.AddArg(v0)
  1521  		return true
  1522  	}
  1523  }
  1524  func rewriteValueLOONG64_OpEq8(v *Value) bool {
  1525  	v_1 := v.Args[1]
  1526  	v_0 := v.Args[0]
  1527  	b := v.Block
  1528  	typ := &b.Func.Config.Types
  1529  	// match: (Eq8 x y)
  1530  	// result: (SGTU (MOVVconst [1]) (XOR (ZeroExt8to64 x) (ZeroExt8to64 y)))
  1531  	for {
  1532  		x := v_0
  1533  		y := v_1
  1534  		v.reset(OpLOONG64SGTU)
  1535  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1536  		v0.AuxInt = int64ToAuxInt(1)
  1537  		v1 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.UInt64)
  1538  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1539  		v2.AddArg(x)
  1540  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1541  		v3.AddArg(y)
  1542  		v1.AddArg2(v2, v3)
  1543  		v.AddArg2(v0, v1)
  1544  		return true
  1545  	}
  1546  }
  1547  func rewriteValueLOONG64_OpEqB(v *Value) bool {
  1548  	v_1 := v.Args[1]
  1549  	v_0 := v.Args[0]
  1550  	b := v.Block
  1551  	typ := &b.Func.Config.Types
  1552  	// match: (EqB x y)
  1553  	// result: (XOR (MOVVconst [1]) (XOR <typ.Bool> x y))
  1554  	for {
  1555  		x := v_0
  1556  		y := v_1
  1557  		v.reset(OpLOONG64XOR)
  1558  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1559  		v0.AuxInt = int64ToAuxInt(1)
  1560  		v1 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.Bool)
  1561  		v1.AddArg2(x, y)
  1562  		v.AddArg2(v0, v1)
  1563  		return true
  1564  	}
  1565  }
  1566  func rewriteValueLOONG64_OpEqPtr(v *Value) bool {
  1567  	v_1 := v.Args[1]
  1568  	v_0 := v.Args[0]
  1569  	b := v.Block
  1570  	typ := &b.Func.Config.Types
  1571  	// match: (EqPtr x y)
  1572  	// result: (SGTU (MOVVconst [1]) (XOR x y))
  1573  	for {
  1574  		x := v_0
  1575  		y := v_1
  1576  		v.reset(OpLOONG64SGTU)
  1577  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1578  		v0.AuxInt = int64ToAuxInt(1)
  1579  		v1 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.UInt64)
  1580  		v1.AddArg2(x, y)
  1581  		v.AddArg2(v0, v1)
  1582  		return true
  1583  	}
  1584  }
  1585  func rewriteValueLOONG64_OpIsInBounds(v *Value) bool {
  1586  	v_1 := v.Args[1]
  1587  	v_0 := v.Args[0]
  1588  	// match: (IsInBounds idx len)
  1589  	// result: (SGTU len idx)
  1590  	for {
  1591  		idx := v_0
  1592  		len := v_1
  1593  		v.reset(OpLOONG64SGTU)
  1594  		v.AddArg2(len, idx)
  1595  		return true
  1596  	}
  1597  }
  1598  func rewriteValueLOONG64_OpIsNonNil(v *Value) bool {
  1599  	v_0 := v.Args[0]
  1600  	b := v.Block
  1601  	typ := &b.Func.Config.Types
  1602  	// match: (IsNonNil ptr)
  1603  	// result: (SGTU ptr (MOVVconst [0]))
  1604  	for {
  1605  		ptr := v_0
  1606  		v.reset(OpLOONG64SGTU)
  1607  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1608  		v0.AuxInt = int64ToAuxInt(0)
  1609  		v.AddArg2(ptr, v0)
  1610  		return true
  1611  	}
  1612  }
  1613  func rewriteValueLOONG64_OpIsSliceInBounds(v *Value) bool {
  1614  	v_1 := v.Args[1]
  1615  	v_0 := v.Args[0]
  1616  	b := v.Block
  1617  	typ := &b.Func.Config.Types
  1618  	// match: (IsSliceInBounds idx len)
  1619  	// result: (XOR (MOVVconst [1]) (SGTU idx len))
  1620  	for {
  1621  		idx := v_0
  1622  		len := v_1
  1623  		v.reset(OpLOONG64XOR)
  1624  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1625  		v0.AuxInt = int64ToAuxInt(1)
  1626  		v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  1627  		v1.AddArg2(idx, len)
  1628  		v.AddArg2(v0, v1)
  1629  		return true
  1630  	}
  1631  }
  1632  func rewriteValueLOONG64_OpLOONG64ADDD(v *Value) bool {
  1633  	v_1 := v.Args[1]
  1634  	v_0 := v.Args[0]
  1635  	// match: (ADDD (MULD x y) z)
  1636  	// cond: z.Block.Func.useFMA(v)
  1637  	// result: (FMADDD x y z)
  1638  	for {
  1639  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1640  			if v_0.Op != OpLOONG64MULD {
  1641  				continue
  1642  			}
  1643  			y := v_0.Args[1]
  1644  			x := v_0.Args[0]
  1645  			z := v_1
  1646  			if !(z.Block.Func.useFMA(v)) {
  1647  				continue
  1648  			}
  1649  			v.reset(OpLOONG64FMADDD)
  1650  			v.AddArg3(x, y, z)
  1651  			return true
  1652  		}
  1653  		break
  1654  	}
  1655  	// match: (ADDD z (NEGD (MULD x y)))
  1656  	// cond: z.Block.Func.useFMA(v)
  1657  	// result: (FNMSUBD x y z)
  1658  	for {
  1659  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1660  			z := v_0
  1661  			if v_1.Op != OpLOONG64NEGD {
  1662  				continue
  1663  			}
  1664  			v_1_0 := v_1.Args[0]
  1665  			if v_1_0.Op != OpLOONG64MULD {
  1666  				continue
  1667  			}
  1668  			y := v_1_0.Args[1]
  1669  			x := v_1_0.Args[0]
  1670  			if !(z.Block.Func.useFMA(v)) {
  1671  				continue
  1672  			}
  1673  			v.reset(OpLOONG64FNMSUBD)
  1674  			v.AddArg3(x, y, z)
  1675  			return true
  1676  		}
  1677  		break
  1678  	}
  1679  	return false
  1680  }
  1681  func rewriteValueLOONG64_OpLOONG64ADDF(v *Value) bool {
  1682  	v_1 := v.Args[1]
  1683  	v_0 := v.Args[0]
  1684  	// match: (ADDF (MULF x y) z)
  1685  	// cond: z.Block.Func.useFMA(v)
  1686  	// result: (FMADDF x y z)
  1687  	for {
  1688  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1689  			if v_0.Op != OpLOONG64MULF {
  1690  				continue
  1691  			}
  1692  			y := v_0.Args[1]
  1693  			x := v_0.Args[0]
  1694  			z := v_1
  1695  			if !(z.Block.Func.useFMA(v)) {
  1696  				continue
  1697  			}
  1698  			v.reset(OpLOONG64FMADDF)
  1699  			v.AddArg3(x, y, z)
  1700  			return true
  1701  		}
  1702  		break
  1703  	}
  1704  	// match: (ADDF z (NEGF (MULF x y)))
  1705  	// cond: z.Block.Func.useFMA(v)
  1706  	// result: (FNMSUBF x y z)
  1707  	for {
  1708  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1709  			z := v_0
  1710  			if v_1.Op != OpLOONG64NEGF {
  1711  				continue
  1712  			}
  1713  			v_1_0 := v_1.Args[0]
  1714  			if v_1_0.Op != OpLOONG64MULF {
  1715  				continue
  1716  			}
  1717  			y := v_1_0.Args[1]
  1718  			x := v_1_0.Args[0]
  1719  			if !(z.Block.Func.useFMA(v)) {
  1720  				continue
  1721  			}
  1722  			v.reset(OpLOONG64FNMSUBF)
  1723  			v.AddArg3(x, y, z)
  1724  			return true
  1725  		}
  1726  		break
  1727  	}
  1728  	return false
  1729  }
  1730  func rewriteValueLOONG64_OpLOONG64ADDV(v *Value) bool {
  1731  	v_1 := v.Args[1]
  1732  	v_0 := v.Args[0]
  1733  	b := v.Block
  1734  	typ := &b.Func.Config.Types
  1735  	// match: (ADDV <typ.UInt16> (SRLVconst [8] <typ.UInt16> x) (SLLVconst [8] <typ.UInt16> x))
  1736  	// result: (REVB2H x)
  1737  	for {
  1738  		if v.Type != typ.UInt16 {
  1739  			break
  1740  		}
  1741  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1742  			if v_0.Op != OpLOONG64SRLVconst || v_0.Type != typ.UInt16 || auxIntToInt64(v_0.AuxInt) != 8 {
  1743  				continue
  1744  			}
  1745  			x := v_0.Args[0]
  1746  			if v_1.Op != OpLOONG64SLLVconst || v_1.Type != typ.UInt16 || auxIntToInt64(v_1.AuxInt) != 8 || x != v_1.Args[0] {
  1747  				continue
  1748  			}
  1749  			v.reset(OpLOONG64REVB2H)
  1750  			v.AddArg(x)
  1751  			return true
  1752  		}
  1753  		break
  1754  	}
  1755  	// match: (ADDV (SRLconst [8] (ANDconst [c1] x)) (SLLconst [8] (ANDconst [c2] x)))
  1756  	// cond: uint32(c1) == 0xff00ff00 && uint32(c2) == 0x00ff00ff
  1757  	// result: (REVB2H x)
  1758  	for {
  1759  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1760  			if v_0.Op != OpLOONG64SRLconst || auxIntToInt64(v_0.AuxInt) != 8 {
  1761  				continue
  1762  			}
  1763  			v_0_0 := v_0.Args[0]
  1764  			if v_0_0.Op != OpLOONG64ANDconst {
  1765  				continue
  1766  			}
  1767  			c1 := auxIntToInt64(v_0_0.AuxInt)
  1768  			x := v_0_0.Args[0]
  1769  			if v_1.Op != OpLOONG64SLLconst || auxIntToInt64(v_1.AuxInt) != 8 {
  1770  				continue
  1771  			}
  1772  			v_1_0 := v_1.Args[0]
  1773  			if v_1_0.Op != OpLOONG64ANDconst {
  1774  				continue
  1775  			}
  1776  			c2 := auxIntToInt64(v_1_0.AuxInt)
  1777  			if x != v_1_0.Args[0] || !(uint32(c1) == 0xff00ff00 && uint32(c2) == 0x00ff00ff) {
  1778  				continue
  1779  			}
  1780  			v.reset(OpLOONG64REVB2H)
  1781  			v.AddArg(x)
  1782  			return true
  1783  		}
  1784  		break
  1785  	}
  1786  	// match: (ADDV (SRLVconst [8] (AND (MOVVconst [c1]) x)) (SLLVconst [8] (AND (MOVVconst [c2]) x)))
  1787  	// cond: uint64(c1) == 0xff00ff00ff00ff00 && uint64(c2) == 0x00ff00ff00ff00ff
  1788  	// result: (REVB4H x)
  1789  	for {
  1790  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1791  			if v_0.Op != OpLOONG64SRLVconst || auxIntToInt64(v_0.AuxInt) != 8 {
  1792  				continue
  1793  			}
  1794  			v_0_0 := v_0.Args[0]
  1795  			if v_0_0.Op != OpLOONG64AND {
  1796  				continue
  1797  			}
  1798  			_ = v_0_0.Args[1]
  1799  			v_0_0_0 := v_0_0.Args[0]
  1800  			v_0_0_1 := v_0_0.Args[1]
  1801  			for _i1 := 0; _i1 <= 1; _i1, v_0_0_0, v_0_0_1 = _i1+1, v_0_0_1, v_0_0_0 {
  1802  				if v_0_0_0.Op != OpLOONG64MOVVconst {
  1803  					continue
  1804  				}
  1805  				c1 := auxIntToInt64(v_0_0_0.AuxInt)
  1806  				x := v_0_0_1
  1807  				if v_1.Op != OpLOONG64SLLVconst || auxIntToInt64(v_1.AuxInt) != 8 {
  1808  					continue
  1809  				}
  1810  				v_1_0 := v_1.Args[0]
  1811  				if v_1_0.Op != OpLOONG64AND {
  1812  					continue
  1813  				}
  1814  				_ = v_1_0.Args[1]
  1815  				v_1_0_0 := v_1_0.Args[0]
  1816  				v_1_0_1 := v_1_0.Args[1]
  1817  				for _i2 := 0; _i2 <= 1; _i2, v_1_0_0, v_1_0_1 = _i2+1, v_1_0_1, v_1_0_0 {
  1818  					if v_1_0_0.Op != OpLOONG64MOVVconst {
  1819  						continue
  1820  					}
  1821  					c2 := auxIntToInt64(v_1_0_0.AuxInt)
  1822  					if x != v_1_0_1 || !(uint64(c1) == 0xff00ff00ff00ff00 && uint64(c2) == 0x00ff00ff00ff00ff) {
  1823  						continue
  1824  					}
  1825  					v.reset(OpLOONG64REVB4H)
  1826  					v.AddArg(x)
  1827  					return true
  1828  				}
  1829  			}
  1830  		}
  1831  		break
  1832  	}
  1833  	// match: (ADDV (SRLVconst [8] (AND (MOVVconst [c1]) x)) (SLLVconst [8] (ANDconst [c2] x)))
  1834  	// cond: uint64(c1) == 0xff00ff00 && uint64(c2) == 0x00ff00ff
  1835  	// result: (REVB4H (ANDconst <x.Type> [0xffffffff] x))
  1836  	for {
  1837  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1838  			if v_0.Op != OpLOONG64SRLVconst || auxIntToInt64(v_0.AuxInt) != 8 {
  1839  				continue
  1840  			}
  1841  			v_0_0 := v_0.Args[0]
  1842  			if v_0_0.Op != OpLOONG64AND {
  1843  				continue
  1844  			}
  1845  			_ = v_0_0.Args[1]
  1846  			v_0_0_0 := v_0_0.Args[0]
  1847  			v_0_0_1 := v_0_0.Args[1]
  1848  			for _i1 := 0; _i1 <= 1; _i1, v_0_0_0, v_0_0_1 = _i1+1, v_0_0_1, v_0_0_0 {
  1849  				if v_0_0_0.Op != OpLOONG64MOVVconst {
  1850  					continue
  1851  				}
  1852  				c1 := auxIntToInt64(v_0_0_0.AuxInt)
  1853  				x := v_0_0_1
  1854  				if v_1.Op != OpLOONG64SLLVconst || auxIntToInt64(v_1.AuxInt) != 8 {
  1855  					continue
  1856  				}
  1857  				v_1_0 := v_1.Args[0]
  1858  				if v_1_0.Op != OpLOONG64ANDconst {
  1859  					continue
  1860  				}
  1861  				c2 := auxIntToInt64(v_1_0.AuxInt)
  1862  				if x != v_1_0.Args[0] || !(uint64(c1) == 0xff00ff00 && uint64(c2) == 0x00ff00ff) {
  1863  					continue
  1864  				}
  1865  				v.reset(OpLOONG64REVB4H)
  1866  				v0 := b.NewValue0(v.Pos, OpLOONG64ANDconst, x.Type)
  1867  				v0.AuxInt = int64ToAuxInt(0xffffffff)
  1868  				v0.AddArg(x)
  1869  				v.AddArg(v0)
  1870  				return true
  1871  			}
  1872  		}
  1873  		break
  1874  	}
  1875  	// match: (ADDV x (MOVVconst <t> [c]))
  1876  	// cond: is32Bit(c) && !t.IsPtr()
  1877  	// result: (ADDVconst [c] x)
  1878  	for {
  1879  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1880  			x := v_0
  1881  			if v_1.Op != OpLOONG64MOVVconst {
  1882  				continue
  1883  			}
  1884  			t := v_1.Type
  1885  			c := auxIntToInt64(v_1.AuxInt)
  1886  			if !(is32Bit(c) && !t.IsPtr()) {
  1887  				continue
  1888  			}
  1889  			v.reset(OpLOONG64ADDVconst)
  1890  			v.AuxInt = int64ToAuxInt(c)
  1891  			v.AddArg(x)
  1892  			return true
  1893  		}
  1894  		break
  1895  	}
  1896  	// match: (ADDV x0 x1:(SLLVconst [c] y))
  1897  	// cond: x1.Uses == 1 && c > 0 && c <= 4
  1898  	// result: (ADDshiftLLV x0 y [c])
  1899  	for {
  1900  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1901  			x0 := v_0
  1902  			x1 := v_1
  1903  			if x1.Op != OpLOONG64SLLVconst {
  1904  				continue
  1905  			}
  1906  			c := auxIntToInt64(x1.AuxInt)
  1907  			y := x1.Args[0]
  1908  			if !(x1.Uses == 1 && c > 0 && c <= 4) {
  1909  				continue
  1910  			}
  1911  			v.reset(OpLOONG64ADDshiftLLV)
  1912  			v.AuxInt = int64ToAuxInt(c)
  1913  			v.AddArg2(x0, y)
  1914  			return true
  1915  		}
  1916  		break
  1917  	}
  1918  	// match: (ADDV x (NEGV y))
  1919  	// result: (SUBV x y)
  1920  	for {
  1921  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1922  			x := v_0
  1923  			if v_1.Op != OpLOONG64NEGV {
  1924  				continue
  1925  			}
  1926  			y := v_1.Args[0]
  1927  			v.reset(OpLOONG64SUBV)
  1928  			v.AddArg2(x, y)
  1929  			return true
  1930  		}
  1931  		break
  1932  	}
  1933  	return false
  1934  }
  1935  func rewriteValueLOONG64_OpLOONG64ADDVconst(v *Value) bool {
  1936  	v_0 := v.Args[0]
  1937  	// match: (ADDVconst [off1] (MOVVaddr [off2] {sym} ptr))
  1938  	// cond: is32Bit(off1+int64(off2))
  1939  	// result: (MOVVaddr [int32(off1)+int32(off2)] {sym} ptr)
  1940  	for {
  1941  		off1 := auxIntToInt64(v.AuxInt)
  1942  		if v_0.Op != OpLOONG64MOVVaddr {
  1943  			break
  1944  		}
  1945  		off2 := auxIntToInt32(v_0.AuxInt)
  1946  		sym := auxToSym(v_0.Aux)
  1947  		ptr := v_0.Args[0]
  1948  		if !(is32Bit(off1 + int64(off2))) {
  1949  			break
  1950  		}
  1951  		v.reset(OpLOONG64MOVVaddr)
  1952  		v.AuxInt = int32ToAuxInt(int32(off1) + int32(off2))
  1953  		v.Aux = symToAux(sym)
  1954  		v.AddArg(ptr)
  1955  		return true
  1956  	}
  1957  	// match: (ADDVconst [0] x)
  1958  	// result: x
  1959  	for {
  1960  		if auxIntToInt64(v.AuxInt) != 0 {
  1961  			break
  1962  		}
  1963  		x := v_0
  1964  		v.copyOf(x)
  1965  		return true
  1966  	}
  1967  	// match: (ADDVconst [c] (MOVVconst [d]))
  1968  	// result: (MOVVconst [c+d])
  1969  	for {
  1970  		c := auxIntToInt64(v.AuxInt)
  1971  		if v_0.Op != OpLOONG64MOVVconst {
  1972  			break
  1973  		}
  1974  		d := auxIntToInt64(v_0.AuxInt)
  1975  		v.reset(OpLOONG64MOVVconst)
  1976  		v.AuxInt = int64ToAuxInt(c + d)
  1977  		return true
  1978  	}
  1979  	// match: (ADDVconst [c] (ADDVconst [d] x))
  1980  	// cond: is32Bit(c+d)
  1981  	// result: (ADDVconst [c+d] x)
  1982  	for {
  1983  		c := auxIntToInt64(v.AuxInt)
  1984  		if v_0.Op != OpLOONG64ADDVconst {
  1985  			break
  1986  		}
  1987  		d := auxIntToInt64(v_0.AuxInt)
  1988  		x := v_0.Args[0]
  1989  		if !(is32Bit(c + d)) {
  1990  			break
  1991  		}
  1992  		v.reset(OpLOONG64ADDVconst)
  1993  		v.AuxInt = int64ToAuxInt(c + d)
  1994  		v.AddArg(x)
  1995  		return true
  1996  	}
  1997  	// match: (ADDVconst [c] (SUBVconst [d] x))
  1998  	// cond: is32Bit(c-d)
  1999  	// result: (ADDVconst [c-d] x)
  2000  	for {
  2001  		c := auxIntToInt64(v.AuxInt)
  2002  		if v_0.Op != OpLOONG64SUBVconst {
  2003  			break
  2004  		}
  2005  		d := auxIntToInt64(v_0.AuxInt)
  2006  		x := v_0.Args[0]
  2007  		if !(is32Bit(c - d)) {
  2008  			break
  2009  		}
  2010  		v.reset(OpLOONG64ADDVconst)
  2011  		v.AuxInt = int64ToAuxInt(c - d)
  2012  		v.AddArg(x)
  2013  		return true
  2014  	}
  2015  	// match: (ADDVconst [c] x)
  2016  	// cond: is32Bit(c) && c&0xffff == 0 && c != 0
  2017  	// result: (ADDV16const [c] x)
  2018  	for {
  2019  		c := auxIntToInt64(v.AuxInt)
  2020  		x := v_0
  2021  		if !(is32Bit(c) && c&0xffff == 0 && c != 0) {
  2022  			break
  2023  		}
  2024  		v.reset(OpLOONG64ADDV16const)
  2025  		v.AuxInt = int64ToAuxInt(c)
  2026  		v.AddArg(x)
  2027  		return true
  2028  	}
  2029  	return false
  2030  }
  2031  func rewriteValueLOONG64_OpLOONG64ADDshiftLLV(v *Value) bool {
  2032  	v_1 := v.Args[1]
  2033  	v_0 := v.Args[0]
  2034  	// match: (ADDshiftLLV x (MOVVconst [c]) [d])
  2035  	// cond: is12Bit(c<<d)
  2036  	// result: (ADDVconst x [c<<d])
  2037  	for {
  2038  		d := auxIntToInt64(v.AuxInt)
  2039  		x := v_0
  2040  		if v_1.Op != OpLOONG64MOVVconst {
  2041  			break
  2042  		}
  2043  		c := auxIntToInt64(v_1.AuxInt)
  2044  		if !(is12Bit(c << d)) {
  2045  			break
  2046  		}
  2047  		v.reset(OpLOONG64ADDVconst)
  2048  		v.AuxInt = int64ToAuxInt(c << d)
  2049  		v.AddArg(x)
  2050  		return true
  2051  	}
  2052  	return false
  2053  }
  2054  func rewriteValueLOONG64_OpLOONG64AND(v *Value) bool {
  2055  	v_1 := v.Args[1]
  2056  	v_0 := v.Args[0]
  2057  	// match: (AND x (MOVVconst [c]))
  2058  	// cond: is32Bit(c)
  2059  	// result: (ANDconst [c] x)
  2060  	for {
  2061  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2062  			x := v_0
  2063  			if v_1.Op != OpLOONG64MOVVconst {
  2064  				continue
  2065  			}
  2066  			c := auxIntToInt64(v_1.AuxInt)
  2067  			if !(is32Bit(c)) {
  2068  				continue
  2069  			}
  2070  			v.reset(OpLOONG64ANDconst)
  2071  			v.AuxInt = int64ToAuxInt(c)
  2072  			v.AddArg(x)
  2073  			return true
  2074  		}
  2075  		break
  2076  	}
  2077  	// match: (AND x x)
  2078  	// result: x
  2079  	for {
  2080  		x := v_0
  2081  		if x != v_1 {
  2082  			break
  2083  		}
  2084  		v.copyOf(x)
  2085  		return true
  2086  	}
  2087  	// match: (AND x (NORconst [0] y))
  2088  	// result: (ANDN x y)
  2089  	for {
  2090  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2091  			x := v_0
  2092  			if v_1.Op != OpLOONG64NORconst || auxIntToInt64(v_1.AuxInt) != 0 {
  2093  				continue
  2094  			}
  2095  			y := v_1.Args[0]
  2096  			v.reset(OpLOONG64ANDN)
  2097  			v.AddArg2(x, y)
  2098  			return true
  2099  		}
  2100  		break
  2101  	}
  2102  	return false
  2103  }
  2104  func rewriteValueLOONG64_OpLOONG64ANDconst(v *Value) bool {
  2105  	v_0 := v.Args[0]
  2106  	// match: (ANDconst [0] _)
  2107  	// result: (MOVVconst [0])
  2108  	for {
  2109  		if auxIntToInt64(v.AuxInt) != 0 {
  2110  			break
  2111  		}
  2112  		v.reset(OpLOONG64MOVVconst)
  2113  		v.AuxInt = int64ToAuxInt(0)
  2114  		return true
  2115  	}
  2116  	// match: (ANDconst [-1] x)
  2117  	// result: x
  2118  	for {
  2119  		if auxIntToInt64(v.AuxInt) != -1 {
  2120  			break
  2121  		}
  2122  		x := v_0
  2123  		v.copyOf(x)
  2124  		return true
  2125  	}
  2126  	// match: (ANDconst [c] (MOVVconst [d]))
  2127  	// result: (MOVVconst [c&d])
  2128  	for {
  2129  		c := auxIntToInt64(v.AuxInt)
  2130  		if v_0.Op != OpLOONG64MOVVconst {
  2131  			break
  2132  		}
  2133  		d := auxIntToInt64(v_0.AuxInt)
  2134  		v.reset(OpLOONG64MOVVconst)
  2135  		v.AuxInt = int64ToAuxInt(c & d)
  2136  		return true
  2137  	}
  2138  	// match: (ANDconst [c] (ANDconst [d] x))
  2139  	// result: (ANDconst [c&d] x)
  2140  	for {
  2141  		c := auxIntToInt64(v.AuxInt)
  2142  		if v_0.Op != OpLOONG64ANDconst {
  2143  			break
  2144  		}
  2145  		d := auxIntToInt64(v_0.AuxInt)
  2146  		x := v_0.Args[0]
  2147  		v.reset(OpLOONG64ANDconst)
  2148  		v.AuxInt = int64ToAuxInt(c & d)
  2149  		v.AddArg(x)
  2150  		return true
  2151  	}
  2152  	return false
  2153  }
  2154  func rewriteValueLOONG64_OpLOONG64DIVV(v *Value) bool {
  2155  	v_1 := v.Args[1]
  2156  	v_0 := v.Args[0]
  2157  	// match: (DIVV (MOVVconst [c]) (MOVVconst [d]))
  2158  	// cond: d != 0
  2159  	// result: (MOVVconst [c/d])
  2160  	for {
  2161  		if v_0.Op != OpLOONG64MOVVconst {
  2162  			break
  2163  		}
  2164  		c := auxIntToInt64(v_0.AuxInt)
  2165  		if v_1.Op != OpLOONG64MOVVconst {
  2166  			break
  2167  		}
  2168  		d := auxIntToInt64(v_1.AuxInt)
  2169  		if !(d != 0) {
  2170  			break
  2171  		}
  2172  		v.reset(OpLOONG64MOVVconst)
  2173  		v.AuxInt = int64ToAuxInt(c / d)
  2174  		return true
  2175  	}
  2176  	return false
  2177  }
  2178  func rewriteValueLOONG64_OpLOONG64DIVVU(v *Value) bool {
  2179  	v_1 := v.Args[1]
  2180  	v_0 := v.Args[0]
  2181  	// match: (DIVVU x (MOVVconst [1]))
  2182  	// result: x
  2183  	for {
  2184  		x := v_0
  2185  		if v_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_1.AuxInt) != 1 {
  2186  			break
  2187  		}
  2188  		v.copyOf(x)
  2189  		return true
  2190  	}
  2191  	// match: (DIVVU x (MOVVconst [c]))
  2192  	// cond: isPowerOfTwo(c)
  2193  	// result: (SRLVconst [log64(c)] x)
  2194  	for {
  2195  		x := v_0
  2196  		if v_1.Op != OpLOONG64MOVVconst {
  2197  			break
  2198  		}
  2199  		c := auxIntToInt64(v_1.AuxInt)
  2200  		if !(isPowerOfTwo(c)) {
  2201  			break
  2202  		}
  2203  		v.reset(OpLOONG64SRLVconst)
  2204  		v.AuxInt = int64ToAuxInt(log64(c))
  2205  		v.AddArg(x)
  2206  		return true
  2207  	}
  2208  	// match: (DIVVU (MOVVconst [c]) (MOVVconst [d]))
  2209  	// cond: d != 0
  2210  	// result: (MOVVconst [int64(uint64(c)/uint64(d))])
  2211  	for {
  2212  		if v_0.Op != OpLOONG64MOVVconst {
  2213  			break
  2214  		}
  2215  		c := auxIntToInt64(v_0.AuxInt)
  2216  		if v_1.Op != OpLOONG64MOVVconst {
  2217  			break
  2218  		}
  2219  		d := auxIntToInt64(v_1.AuxInt)
  2220  		if !(d != 0) {
  2221  			break
  2222  		}
  2223  		v.reset(OpLOONG64MOVVconst)
  2224  		v.AuxInt = int64ToAuxInt(int64(uint64(c) / uint64(d)))
  2225  		return true
  2226  	}
  2227  	return false
  2228  }
  2229  func rewriteValueLOONG64_OpLOONG64LoweredPanicBoundsCR(v *Value) bool {
  2230  	v_1 := v.Args[1]
  2231  	v_0 := v.Args[0]
  2232  	// match: (LoweredPanicBoundsCR [kind] {p} (MOVVconst [c]) mem)
  2233  	// result: (LoweredPanicBoundsCC [kind] {PanicBoundsCC{Cx:p.C, Cy:c}} mem)
  2234  	for {
  2235  		kind := auxIntToInt64(v.AuxInt)
  2236  		p := auxToPanicBoundsC(v.Aux)
  2237  		if v_0.Op != OpLOONG64MOVVconst {
  2238  			break
  2239  		}
  2240  		c := auxIntToInt64(v_0.AuxInt)
  2241  		mem := v_1
  2242  		v.reset(OpLOONG64LoweredPanicBoundsCC)
  2243  		v.AuxInt = int64ToAuxInt(kind)
  2244  		v.Aux = panicBoundsCCToAux(PanicBoundsCC{Cx: p.C, Cy: c})
  2245  		v.AddArg(mem)
  2246  		return true
  2247  	}
  2248  	return false
  2249  }
  2250  func rewriteValueLOONG64_OpLOONG64LoweredPanicBoundsRC(v *Value) bool {
  2251  	v_1 := v.Args[1]
  2252  	v_0 := v.Args[0]
  2253  	// match: (LoweredPanicBoundsRC [kind] {p} (MOVVconst [c]) mem)
  2254  	// result: (LoweredPanicBoundsCC [kind] {PanicBoundsCC{Cx:c, Cy:p.C}} mem)
  2255  	for {
  2256  		kind := auxIntToInt64(v.AuxInt)
  2257  		p := auxToPanicBoundsC(v.Aux)
  2258  		if v_0.Op != OpLOONG64MOVVconst {
  2259  			break
  2260  		}
  2261  		c := auxIntToInt64(v_0.AuxInt)
  2262  		mem := v_1
  2263  		v.reset(OpLOONG64LoweredPanicBoundsCC)
  2264  		v.AuxInt = int64ToAuxInt(kind)
  2265  		v.Aux = panicBoundsCCToAux(PanicBoundsCC{Cx: c, Cy: p.C})
  2266  		v.AddArg(mem)
  2267  		return true
  2268  	}
  2269  	return false
  2270  }
  2271  func rewriteValueLOONG64_OpLOONG64LoweredPanicBoundsRR(v *Value) bool {
  2272  	v_2 := v.Args[2]
  2273  	v_1 := v.Args[1]
  2274  	v_0 := v.Args[0]
  2275  	// match: (LoweredPanicBoundsRR [kind] x (MOVVconst [c]) mem)
  2276  	// result: (LoweredPanicBoundsRC [kind] x {PanicBoundsC{C:c}} mem)
  2277  	for {
  2278  		kind := auxIntToInt64(v.AuxInt)
  2279  		x := v_0
  2280  		if v_1.Op != OpLOONG64MOVVconst {
  2281  			break
  2282  		}
  2283  		c := auxIntToInt64(v_1.AuxInt)
  2284  		mem := v_2
  2285  		v.reset(OpLOONG64LoweredPanicBoundsRC)
  2286  		v.AuxInt = int64ToAuxInt(kind)
  2287  		v.Aux = panicBoundsCToAux(PanicBoundsC{C: c})
  2288  		v.AddArg2(x, mem)
  2289  		return true
  2290  	}
  2291  	// match: (LoweredPanicBoundsRR [kind] (MOVVconst [c]) y mem)
  2292  	// result: (LoweredPanicBoundsCR [kind] {PanicBoundsC{C:c}} y mem)
  2293  	for {
  2294  		kind := auxIntToInt64(v.AuxInt)
  2295  		if v_0.Op != OpLOONG64MOVVconst {
  2296  			break
  2297  		}
  2298  		c := auxIntToInt64(v_0.AuxInt)
  2299  		y := v_1
  2300  		mem := v_2
  2301  		v.reset(OpLOONG64LoweredPanicBoundsCR)
  2302  		v.AuxInt = int64ToAuxInt(kind)
  2303  		v.Aux = panicBoundsCToAux(PanicBoundsC{C: c})
  2304  		v.AddArg2(y, mem)
  2305  		return true
  2306  	}
  2307  	return false
  2308  }
  2309  func rewriteValueLOONG64_OpLOONG64MASKEQZ(v *Value) bool {
  2310  	v_1 := v.Args[1]
  2311  	v_0 := v.Args[0]
  2312  	// match: (MASKEQZ (MOVVconst [0]) cond)
  2313  	// result: (MOVVconst [0])
  2314  	for {
  2315  		if v_0.Op != OpLOONG64MOVVconst || auxIntToInt64(v_0.AuxInt) != 0 {
  2316  			break
  2317  		}
  2318  		v.reset(OpLOONG64MOVVconst)
  2319  		v.AuxInt = int64ToAuxInt(0)
  2320  		return true
  2321  	}
  2322  	// match: (MASKEQZ x (MOVVconst [c]))
  2323  	// cond: c == 0
  2324  	// result: (MOVVconst [0])
  2325  	for {
  2326  		if v_1.Op != OpLOONG64MOVVconst {
  2327  			break
  2328  		}
  2329  		c := auxIntToInt64(v_1.AuxInt)
  2330  		if !(c == 0) {
  2331  			break
  2332  		}
  2333  		v.reset(OpLOONG64MOVVconst)
  2334  		v.AuxInt = int64ToAuxInt(0)
  2335  		return true
  2336  	}
  2337  	// match: (MASKEQZ x (MOVVconst [c]))
  2338  	// cond: c != 0
  2339  	// result: x
  2340  	for {
  2341  		x := v_0
  2342  		if v_1.Op != OpLOONG64MOVVconst {
  2343  			break
  2344  		}
  2345  		c := auxIntToInt64(v_1.AuxInt)
  2346  		if !(c != 0) {
  2347  			break
  2348  		}
  2349  		v.copyOf(x)
  2350  		return true
  2351  	}
  2352  	return false
  2353  }
  2354  func rewriteValueLOONG64_OpLOONG64MASKNEZ(v *Value) bool {
  2355  	v_0 := v.Args[0]
  2356  	// match: (MASKNEZ (MOVVconst [0]) cond)
  2357  	// result: (MOVVconst [0])
  2358  	for {
  2359  		if v_0.Op != OpLOONG64MOVVconst || auxIntToInt64(v_0.AuxInt) != 0 {
  2360  			break
  2361  		}
  2362  		v.reset(OpLOONG64MOVVconst)
  2363  		v.AuxInt = int64ToAuxInt(0)
  2364  		return true
  2365  	}
  2366  	return false
  2367  }
  2368  func rewriteValueLOONG64_OpLOONG64MOVBUload(v *Value) bool {
  2369  	v_1 := v.Args[1]
  2370  	v_0 := v.Args[0]
  2371  	b := v.Block
  2372  	config := b.Func.Config
  2373  	typ := &b.Func.Config.Types
  2374  	// match: (MOVBUload [off] {sym} ptr (MOVBstore [off] {sym} ptr x _))
  2375  	// result: (MOVBUreg x)
  2376  	for {
  2377  		off := auxIntToInt32(v.AuxInt)
  2378  		sym := auxToSym(v.Aux)
  2379  		ptr := v_0
  2380  		if v_1.Op != OpLOONG64MOVBstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  2381  			break
  2382  		}
  2383  		x := v_1.Args[1]
  2384  		if ptr != v_1.Args[0] {
  2385  			break
  2386  		}
  2387  		v.reset(OpLOONG64MOVBUreg)
  2388  		v.AddArg(x)
  2389  		return true
  2390  	}
  2391  	// match: (MOVBUload [off1] {sym} (ADDVconst [off2] ptr) mem)
  2392  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  2393  	// result: (MOVBUload [off1+int32(off2)] {sym} ptr mem)
  2394  	for {
  2395  		off1 := auxIntToInt32(v.AuxInt)
  2396  		sym := auxToSym(v.Aux)
  2397  		if v_0.Op != OpLOONG64ADDVconst {
  2398  			break
  2399  		}
  2400  		off2 := auxIntToInt64(v_0.AuxInt)
  2401  		ptr := v_0.Args[0]
  2402  		mem := v_1
  2403  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  2404  			break
  2405  		}
  2406  		v.reset(OpLOONG64MOVBUload)
  2407  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2408  		v.Aux = symToAux(sym)
  2409  		v.AddArg2(ptr, mem)
  2410  		return true
  2411  	}
  2412  	// match: (MOVBUload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  2413  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  2414  	// result: (MOVBUload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  2415  	for {
  2416  		off1 := auxIntToInt32(v.AuxInt)
  2417  		sym1 := auxToSym(v.Aux)
  2418  		if v_0.Op != OpLOONG64MOVVaddr {
  2419  			break
  2420  		}
  2421  		off2 := auxIntToInt32(v_0.AuxInt)
  2422  		sym2 := auxToSym(v_0.Aux)
  2423  		ptr := v_0.Args[0]
  2424  		mem := v_1
  2425  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  2426  			break
  2427  		}
  2428  		v.reset(OpLOONG64MOVBUload)
  2429  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2430  		v.Aux = symToAux(mergeSym(sym1, sym2))
  2431  		v.AddArg2(ptr, mem)
  2432  		return true
  2433  	}
  2434  	// match: (MOVBUload [off] {sym} (ADDV ptr idx) mem)
  2435  	// cond: off == 0 && sym == nil
  2436  	// result: (MOVBUloadidx ptr idx mem)
  2437  	for {
  2438  		off := auxIntToInt32(v.AuxInt)
  2439  		sym := auxToSym(v.Aux)
  2440  		if v_0.Op != OpLOONG64ADDV {
  2441  			break
  2442  		}
  2443  		idx := v_0.Args[1]
  2444  		ptr := v_0.Args[0]
  2445  		mem := v_1
  2446  		if !(off == 0 && sym == nil) {
  2447  			break
  2448  		}
  2449  		v.reset(OpLOONG64MOVBUloadidx)
  2450  		v.AddArg3(ptr, idx, mem)
  2451  		return true
  2452  	}
  2453  	// match: (MOVBUload [off] {sym} (ADDshiftLLV [shift] ptr idx) mem)
  2454  	// cond: off == 0 && sym == nil
  2455  	// result: (MOVBUloadidx ptr (SLLVconst <typ.Int64> [shift] idx) mem)
  2456  	for {
  2457  		off := auxIntToInt32(v.AuxInt)
  2458  		sym := auxToSym(v.Aux)
  2459  		if v_0.Op != OpLOONG64ADDshiftLLV {
  2460  			break
  2461  		}
  2462  		shift := auxIntToInt64(v_0.AuxInt)
  2463  		idx := v_0.Args[1]
  2464  		ptr := v_0.Args[0]
  2465  		mem := v_1
  2466  		if !(off == 0 && sym == nil) {
  2467  			break
  2468  		}
  2469  		v.reset(OpLOONG64MOVBUloadidx)
  2470  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, typ.Int64)
  2471  		v0.AuxInt = int64ToAuxInt(shift)
  2472  		v0.AddArg(idx)
  2473  		v.AddArg3(ptr, v0, mem)
  2474  		return true
  2475  	}
  2476  	// match: (MOVBUload [off] {sym} (SB) _)
  2477  	// cond: symIsRO(sym)
  2478  	// result: (MOVVconst [int64(read8(sym, int64(off)))])
  2479  	for {
  2480  		off := auxIntToInt32(v.AuxInt)
  2481  		sym := auxToSym(v.Aux)
  2482  		if v_0.Op != OpSB || !(symIsRO(sym)) {
  2483  			break
  2484  		}
  2485  		v.reset(OpLOONG64MOVVconst)
  2486  		v.AuxInt = int64ToAuxInt(int64(read8(sym, int64(off))))
  2487  		return true
  2488  	}
  2489  	return false
  2490  }
  2491  func rewriteValueLOONG64_OpLOONG64MOVBUloadidx(v *Value) bool {
  2492  	v_2 := v.Args[2]
  2493  	v_1 := v.Args[1]
  2494  	v_0 := v.Args[0]
  2495  	// match: (MOVBUloadidx ptr (MOVVconst [c]) mem)
  2496  	// cond: is32Bit(c)
  2497  	// result: (MOVBUload [int32(c)] ptr mem)
  2498  	for {
  2499  		ptr := v_0
  2500  		if v_1.Op != OpLOONG64MOVVconst {
  2501  			break
  2502  		}
  2503  		c := auxIntToInt64(v_1.AuxInt)
  2504  		mem := v_2
  2505  		if !(is32Bit(c)) {
  2506  			break
  2507  		}
  2508  		v.reset(OpLOONG64MOVBUload)
  2509  		v.AuxInt = int32ToAuxInt(int32(c))
  2510  		v.AddArg2(ptr, mem)
  2511  		return true
  2512  	}
  2513  	// match: (MOVBUloadidx (MOVVconst [c]) ptr mem)
  2514  	// cond: is32Bit(c)
  2515  	// result: (MOVBUload [int32(c)] ptr mem)
  2516  	for {
  2517  		if v_0.Op != OpLOONG64MOVVconst {
  2518  			break
  2519  		}
  2520  		c := auxIntToInt64(v_0.AuxInt)
  2521  		ptr := v_1
  2522  		mem := v_2
  2523  		if !(is32Bit(c)) {
  2524  			break
  2525  		}
  2526  		v.reset(OpLOONG64MOVBUload)
  2527  		v.AuxInt = int32ToAuxInt(int32(c))
  2528  		v.AddArg2(ptr, mem)
  2529  		return true
  2530  	}
  2531  	return false
  2532  }
  2533  func rewriteValueLOONG64_OpLOONG64MOVBUreg(v *Value) bool {
  2534  	v_0 := v.Args[0]
  2535  	// match: (MOVBUreg (SRLVconst [rc] x))
  2536  	// cond: rc < 8
  2537  	// result: (BSTRPICKV [rc + (7+rc)<<6] x)
  2538  	for {
  2539  		if v_0.Op != OpLOONG64SRLVconst {
  2540  			break
  2541  		}
  2542  		rc := auxIntToInt64(v_0.AuxInt)
  2543  		x := v_0.Args[0]
  2544  		if !(rc < 8) {
  2545  			break
  2546  		}
  2547  		v.reset(OpLOONG64BSTRPICKV)
  2548  		v.AuxInt = int64ToAuxInt(rc + (7+rc)<<6)
  2549  		v.AddArg(x)
  2550  		return true
  2551  	}
  2552  	// match: (MOVBUreg x:(SGT _ _))
  2553  	// result: x
  2554  	for {
  2555  		x := v_0
  2556  		if x.Op != OpLOONG64SGT {
  2557  			break
  2558  		}
  2559  		v.copyOf(x)
  2560  		return true
  2561  	}
  2562  	// match: (MOVBUreg x:(SGTU _ _))
  2563  	// result: x
  2564  	for {
  2565  		x := v_0
  2566  		if x.Op != OpLOONG64SGTU {
  2567  			break
  2568  		}
  2569  		v.copyOf(x)
  2570  		return true
  2571  	}
  2572  	// match: (MOVBUreg x:(XOR (MOVVconst [1]) (SGT _ _)))
  2573  	// result: x
  2574  	for {
  2575  		x := v_0
  2576  		if x.Op != OpLOONG64XOR {
  2577  			break
  2578  		}
  2579  		_ = x.Args[1]
  2580  		x_0 := x.Args[0]
  2581  		x_1 := x.Args[1]
  2582  		for _i0 := 0; _i0 <= 1; _i0, x_0, x_1 = _i0+1, x_1, x_0 {
  2583  			if x_0.Op != OpLOONG64MOVVconst || auxIntToInt64(x_0.AuxInt) != 1 || x_1.Op != OpLOONG64SGT {
  2584  				continue
  2585  			}
  2586  			v.copyOf(x)
  2587  			return true
  2588  		}
  2589  		break
  2590  	}
  2591  	// match: (MOVBUreg x:(XOR (MOVVconst [1]) (SGTU _ _)))
  2592  	// result: x
  2593  	for {
  2594  		x := v_0
  2595  		if x.Op != OpLOONG64XOR {
  2596  			break
  2597  		}
  2598  		_ = x.Args[1]
  2599  		x_0 := x.Args[0]
  2600  		x_1 := x.Args[1]
  2601  		for _i0 := 0; _i0 <= 1; _i0, x_0, x_1 = _i0+1, x_1, x_0 {
  2602  			if x_0.Op != OpLOONG64MOVVconst || auxIntToInt64(x_0.AuxInt) != 1 || x_1.Op != OpLOONG64SGTU {
  2603  				continue
  2604  			}
  2605  			v.copyOf(x)
  2606  			return true
  2607  		}
  2608  		break
  2609  	}
  2610  	// match: (MOVBUreg x:(MOVBUload _ _))
  2611  	// result: (MOVVreg x)
  2612  	for {
  2613  		x := v_0
  2614  		if x.Op != OpLOONG64MOVBUload {
  2615  			break
  2616  		}
  2617  		v.reset(OpLOONG64MOVVreg)
  2618  		v.AddArg(x)
  2619  		return true
  2620  	}
  2621  	// match: (MOVBUreg x:(MOVBUloadidx _ _ _))
  2622  	// result: (MOVVreg x)
  2623  	for {
  2624  		x := v_0
  2625  		if x.Op != OpLOONG64MOVBUloadidx {
  2626  			break
  2627  		}
  2628  		v.reset(OpLOONG64MOVVreg)
  2629  		v.AddArg(x)
  2630  		return true
  2631  	}
  2632  	// match: (MOVBUreg x:(MOVBUreg _))
  2633  	// result: (MOVVreg x)
  2634  	for {
  2635  		x := v_0
  2636  		if x.Op != OpLOONG64MOVBUreg {
  2637  			break
  2638  		}
  2639  		v.reset(OpLOONG64MOVVreg)
  2640  		v.AddArg(x)
  2641  		return true
  2642  	}
  2643  	// match: (MOVBUreg (SLLVconst [lc] x))
  2644  	// cond: lc >= 8
  2645  	// result: (MOVVconst [0])
  2646  	for {
  2647  		if v_0.Op != OpLOONG64SLLVconst {
  2648  			break
  2649  		}
  2650  		lc := auxIntToInt64(v_0.AuxInt)
  2651  		if !(lc >= 8) {
  2652  			break
  2653  		}
  2654  		v.reset(OpLOONG64MOVVconst)
  2655  		v.AuxInt = int64ToAuxInt(0)
  2656  		return true
  2657  	}
  2658  	// match: (MOVBUreg (MOVVconst [c]))
  2659  	// result: (MOVVconst [int64(uint8(c))])
  2660  	for {
  2661  		if v_0.Op != OpLOONG64MOVVconst {
  2662  			break
  2663  		}
  2664  		c := auxIntToInt64(v_0.AuxInt)
  2665  		v.reset(OpLOONG64MOVVconst)
  2666  		v.AuxInt = int64ToAuxInt(int64(uint8(c)))
  2667  		return true
  2668  	}
  2669  	// match: (MOVBUreg (ANDconst [c] x))
  2670  	// result: (ANDconst [c&0xff] x)
  2671  	for {
  2672  		if v_0.Op != OpLOONG64ANDconst {
  2673  			break
  2674  		}
  2675  		c := auxIntToInt64(v_0.AuxInt)
  2676  		x := v_0.Args[0]
  2677  		v.reset(OpLOONG64ANDconst)
  2678  		v.AuxInt = int64ToAuxInt(c & 0xff)
  2679  		v.AddArg(x)
  2680  		return true
  2681  	}
  2682  	// match: (MOVBUreg x:(SRLconst [c] y))
  2683  	// cond: c >= 24
  2684  	// result: x
  2685  	for {
  2686  		x := v_0
  2687  		if x.Op != OpLOONG64SRLconst {
  2688  			break
  2689  		}
  2690  		c := auxIntToInt64(x.AuxInt)
  2691  		if !(c >= 24) {
  2692  			break
  2693  		}
  2694  		v.copyOf(x)
  2695  		return true
  2696  	}
  2697  	// match: (MOVBUreg x:(ANDconst [c] y))
  2698  	// cond: c >= 0 && int64(uint8(c)) == c
  2699  	// result: x
  2700  	for {
  2701  		x := v_0
  2702  		if x.Op != OpLOONG64ANDconst {
  2703  			break
  2704  		}
  2705  		c := auxIntToInt64(x.AuxInt)
  2706  		if !(c >= 0 && int64(uint8(c)) == c) {
  2707  			break
  2708  		}
  2709  		v.copyOf(x)
  2710  		return true
  2711  	}
  2712  	return false
  2713  }
  2714  func rewriteValueLOONG64_OpLOONG64MOVBload(v *Value) bool {
  2715  	v_1 := v.Args[1]
  2716  	v_0 := v.Args[0]
  2717  	b := v.Block
  2718  	config := b.Func.Config
  2719  	typ := &b.Func.Config.Types
  2720  	// match: (MOVBload [off] {sym} ptr (MOVBstore [off] {sym} ptr x _))
  2721  	// result: (MOVBreg x)
  2722  	for {
  2723  		off := auxIntToInt32(v.AuxInt)
  2724  		sym := auxToSym(v.Aux)
  2725  		ptr := v_0
  2726  		if v_1.Op != OpLOONG64MOVBstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  2727  			break
  2728  		}
  2729  		x := v_1.Args[1]
  2730  		if ptr != v_1.Args[0] {
  2731  			break
  2732  		}
  2733  		v.reset(OpLOONG64MOVBreg)
  2734  		v.AddArg(x)
  2735  		return true
  2736  	}
  2737  	// match: (MOVBload [off1] {sym} (ADDVconst [off2] ptr) mem)
  2738  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  2739  	// result: (MOVBload [off1+int32(off2)] {sym} ptr mem)
  2740  	for {
  2741  		off1 := auxIntToInt32(v.AuxInt)
  2742  		sym := auxToSym(v.Aux)
  2743  		if v_0.Op != OpLOONG64ADDVconst {
  2744  			break
  2745  		}
  2746  		off2 := auxIntToInt64(v_0.AuxInt)
  2747  		ptr := v_0.Args[0]
  2748  		mem := v_1
  2749  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  2750  			break
  2751  		}
  2752  		v.reset(OpLOONG64MOVBload)
  2753  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2754  		v.Aux = symToAux(sym)
  2755  		v.AddArg2(ptr, mem)
  2756  		return true
  2757  	}
  2758  	// match: (MOVBload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  2759  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  2760  	// result: (MOVBload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  2761  	for {
  2762  		off1 := auxIntToInt32(v.AuxInt)
  2763  		sym1 := auxToSym(v.Aux)
  2764  		if v_0.Op != OpLOONG64MOVVaddr {
  2765  			break
  2766  		}
  2767  		off2 := auxIntToInt32(v_0.AuxInt)
  2768  		sym2 := auxToSym(v_0.Aux)
  2769  		ptr := v_0.Args[0]
  2770  		mem := v_1
  2771  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  2772  			break
  2773  		}
  2774  		v.reset(OpLOONG64MOVBload)
  2775  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2776  		v.Aux = symToAux(mergeSym(sym1, sym2))
  2777  		v.AddArg2(ptr, mem)
  2778  		return true
  2779  	}
  2780  	// match: (MOVBload [off] {sym} (ADDV ptr idx) mem)
  2781  	// cond: off == 0 && sym == nil
  2782  	// result: (MOVBloadidx ptr idx mem)
  2783  	for {
  2784  		off := auxIntToInt32(v.AuxInt)
  2785  		sym := auxToSym(v.Aux)
  2786  		if v_0.Op != OpLOONG64ADDV {
  2787  			break
  2788  		}
  2789  		idx := v_0.Args[1]
  2790  		ptr := v_0.Args[0]
  2791  		mem := v_1
  2792  		if !(off == 0 && sym == nil) {
  2793  			break
  2794  		}
  2795  		v.reset(OpLOONG64MOVBloadidx)
  2796  		v.AddArg3(ptr, idx, mem)
  2797  		return true
  2798  	}
  2799  	// match: (MOVBload [off] {sym} (ADDshiftLLV [shift] ptr idx) mem)
  2800  	// cond: off == 0 && sym == nil
  2801  	// result: (MOVBloadidx ptr (SLLVconst <typ.Int64> [shift] idx) mem)
  2802  	for {
  2803  		off := auxIntToInt32(v.AuxInt)
  2804  		sym := auxToSym(v.Aux)
  2805  		if v_0.Op != OpLOONG64ADDshiftLLV {
  2806  			break
  2807  		}
  2808  		shift := auxIntToInt64(v_0.AuxInt)
  2809  		idx := v_0.Args[1]
  2810  		ptr := v_0.Args[0]
  2811  		mem := v_1
  2812  		if !(off == 0 && sym == nil) {
  2813  			break
  2814  		}
  2815  		v.reset(OpLOONG64MOVBloadidx)
  2816  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, typ.Int64)
  2817  		v0.AuxInt = int64ToAuxInt(shift)
  2818  		v0.AddArg(idx)
  2819  		v.AddArg3(ptr, v0, mem)
  2820  		return true
  2821  	}
  2822  	// match: (MOVBload [off] {sym} (SB) _)
  2823  	// cond: symIsRO(sym)
  2824  	// result: (MOVVconst [int64(int8(read8(sym, int64(off))))])
  2825  	for {
  2826  		off := auxIntToInt32(v.AuxInt)
  2827  		sym := auxToSym(v.Aux)
  2828  		if v_0.Op != OpSB || !(symIsRO(sym)) {
  2829  			break
  2830  		}
  2831  		v.reset(OpLOONG64MOVVconst)
  2832  		v.AuxInt = int64ToAuxInt(int64(int8(read8(sym, int64(off)))))
  2833  		return true
  2834  	}
  2835  	return false
  2836  }
  2837  func rewriteValueLOONG64_OpLOONG64MOVBloadidx(v *Value) bool {
  2838  	v_2 := v.Args[2]
  2839  	v_1 := v.Args[1]
  2840  	v_0 := v.Args[0]
  2841  	// match: (MOVBloadidx ptr (MOVVconst [c]) mem)
  2842  	// cond: is32Bit(c)
  2843  	// result: (MOVBload [int32(c)] ptr mem)
  2844  	for {
  2845  		ptr := v_0
  2846  		if v_1.Op != OpLOONG64MOVVconst {
  2847  			break
  2848  		}
  2849  		c := auxIntToInt64(v_1.AuxInt)
  2850  		mem := v_2
  2851  		if !(is32Bit(c)) {
  2852  			break
  2853  		}
  2854  		v.reset(OpLOONG64MOVBload)
  2855  		v.AuxInt = int32ToAuxInt(int32(c))
  2856  		v.AddArg2(ptr, mem)
  2857  		return true
  2858  	}
  2859  	// match: (MOVBloadidx (MOVVconst [c]) ptr mem)
  2860  	// cond: is32Bit(c)
  2861  	// result: (MOVBload [int32(c)] ptr mem)
  2862  	for {
  2863  		if v_0.Op != OpLOONG64MOVVconst {
  2864  			break
  2865  		}
  2866  		c := auxIntToInt64(v_0.AuxInt)
  2867  		ptr := v_1
  2868  		mem := v_2
  2869  		if !(is32Bit(c)) {
  2870  			break
  2871  		}
  2872  		v.reset(OpLOONG64MOVBload)
  2873  		v.AuxInt = int32ToAuxInt(int32(c))
  2874  		v.AddArg2(ptr, mem)
  2875  		return true
  2876  	}
  2877  	return false
  2878  }
  2879  func rewriteValueLOONG64_OpLOONG64MOVBreg(v *Value) bool {
  2880  	v_0 := v.Args[0]
  2881  	// match: (MOVBreg x:(MOVBload _ _))
  2882  	// result: (MOVVreg x)
  2883  	for {
  2884  		x := v_0
  2885  		if x.Op != OpLOONG64MOVBload {
  2886  			break
  2887  		}
  2888  		v.reset(OpLOONG64MOVVreg)
  2889  		v.AddArg(x)
  2890  		return true
  2891  	}
  2892  	// match: (MOVBreg x:(MOVBloadidx _ _ _))
  2893  	// result: (MOVVreg x)
  2894  	for {
  2895  		x := v_0
  2896  		if x.Op != OpLOONG64MOVBloadidx {
  2897  			break
  2898  		}
  2899  		v.reset(OpLOONG64MOVVreg)
  2900  		v.AddArg(x)
  2901  		return true
  2902  	}
  2903  	// match: (MOVBreg x:(MOVBreg _))
  2904  	// result: (MOVVreg x)
  2905  	for {
  2906  		x := v_0
  2907  		if x.Op != OpLOONG64MOVBreg {
  2908  			break
  2909  		}
  2910  		v.reset(OpLOONG64MOVVreg)
  2911  		v.AddArg(x)
  2912  		return true
  2913  	}
  2914  	// match: (MOVBreg (MOVVconst [c]))
  2915  	// result: (MOVVconst [int64(int8(c))])
  2916  	for {
  2917  		if v_0.Op != OpLOONG64MOVVconst {
  2918  			break
  2919  		}
  2920  		c := auxIntToInt64(v_0.AuxInt)
  2921  		v.reset(OpLOONG64MOVVconst)
  2922  		v.AuxInt = int64ToAuxInt(int64(int8(c)))
  2923  		return true
  2924  	}
  2925  	// match: (MOVBreg x:(ANDconst [c] y))
  2926  	// cond: c >= 0 && int64(int8(c)) == c
  2927  	// result: x
  2928  	for {
  2929  		x := v_0
  2930  		if x.Op != OpLOONG64ANDconst {
  2931  			break
  2932  		}
  2933  		c := auxIntToInt64(x.AuxInt)
  2934  		if !(c >= 0 && int64(int8(c)) == c) {
  2935  			break
  2936  		}
  2937  		v.copyOf(x)
  2938  		return true
  2939  	}
  2940  	return false
  2941  }
  2942  func rewriteValueLOONG64_OpLOONG64MOVBstore(v *Value) bool {
  2943  	v_2 := v.Args[2]
  2944  	v_1 := v.Args[1]
  2945  	v_0 := v.Args[0]
  2946  	b := v.Block
  2947  	config := b.Func.Config
  2948  	typ := &b.Func.Config.Types
  2949  	// match: (MOVBstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  2950  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  2951  	// result: (MOVBstore [off1+int32(off2)] {sym} ptr val mem)
  2952  	for {
  2953  		off1 := auxIntToInt32(v.AuxInt)
  2954  		sym := auxToSym(v.Aux)
  2955  		if v_0.Op != OpLOONG64ADDVconst {
  2956  			break
  2957  		}
  2958  		off2 := auxIntToInt64(v_0.AuxInt)
  2959  		ptr := v_0.Args[0]
  2960  		val := v_1
  2961  		mem := v_2
  2962  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  2963  			break
  2964  		}
  2965  		v.reset(OpLOONG64MOVBstore)
  2966  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2967  		v.Aux = symToAux(sym)
  2968  		v.AddArg3(ptr, val, mem)
  2969  		return true
  2970  	}
  2971  	// match: (MOVBstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  2972  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  2973  	// result: (MOVBstore [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr val mem)
  2974  	for {
  2975  		off1 := auxIntToInt32(v.AuxInt)
  2976  		sym1 := auxToSym(v.Aux)
  2977  		if v_0.Op != OpLOONG64MOVVaddr {
  2978  			break
  2979  		}
  2980  		off2 := auxIntToInt32(v_0.AuxInt)
  2981  		sym2 := auxToSym(v_0.Aux)
  2982  		ptr := v_0.Args[0]
  2983  		val := v_1
  2984  		mem := v_2
  2985  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  2986  			break
  2987  		}
  2988  		v.reset(OpLOONG64MOVBstore)
  2989  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2990  		v.Aux = symToAux(mergeSym(sym1, sym2))
  2991  		v.AddArg3(ptr, val, mem)
  2992  		return true
  2993  	}
  2994  	// match: (MOVBstore [off] {sym} ptr (MOVBreg x) mem)
  2995  	// result: (MOVBstore [off] {sym} ptr x mem)
  2996  	for {
  2997  		off := auxIntToInt32(v.AuxInt)
  2998  		sym := auxToSym(v.Aux)
  2999  		ptr := v_0
  3000  		if v_1.Op != OpLOONG64MOVBreg {
  3001  			break
  3002  		}
  3003  		x := v_1.Args[0]
  3004  		mem := v_2
  3005  		v.reset(OpLOONG64MOVBstore)
  3006  		v.AuxInt = int32ToAuxInt(off)
  3007  		v.Aux = symToAux(sym)
  3008  		v.AddArg3(ptr, x, mem)
  3009  		return true
  3010  	}
  3011  	// match: (MOVBstore [off] {sym} ptr (MOVBUreg x) mem)
  3012  	// result: (MOVBstore [off] {sym} ptr x mem)
  3013  	for {
  3014  		off := auxIntToInt32(v.AuxInt)
  3015  		sym := auxToSym(v.Aux)
  3016  		ptr := v_0
  3017  		if v_1.Op != OpLOONG64MOVBUreg {
  3018  			break
  3019  		}
  3020  		x := v_1.Args[0]
  3021  		mem := v_2
  3022  		v.reset(OpLOONG64MOVBstore)
  3023  		v.AuxInt = int32ToAuxInt(off)
  3024  		v.Aux = symToAux(sym)
  3025  		v.AddArg3(ptr, x, mem)
  3026  		return true
  3027  	}
  3028  	// match: (MOVBstore [off] {sym} ptr (MOVHreg x) mem)
  3029  	// result: (MOVBstore [off] {sym} ptr x mem)
  3030  	for {
  3031  		off := auxIntToInt32(v.AuxInt)
  3032  		sym := auxToSym(v.Aux)
  3033  		ptr := v_0
  3034  		if v_1.Op != OpLOONG64MOVHreg {
  3035  			break
  3036  		}
  3037  		x := v_1.Args[0]
  3038  		mem := v_2
  3039  		v.reset(OpLOONG64MOVBstore)
  3040  		v.AuxInt = int32ToAuxInt(off)
  3041  		v.Aux = symToAux(sym)
  3042  		v.AddArg3(ptr, x, mem)
  3043  		return true
  3044  	}
  3045  	// match: (MOVBstore [off] {sym} ptr (MOVHUreg x) mem)
  3046  	// result: (MOVBstore [off] {sym} ptr x mem)
  3047  	for {
  3048  		off := auxIntToInt32(v.AuxInt)
  3049  		sym := auxToSym(v.Aux)
  3050  		ptr := v_0
  3051  		if v_1.Op != OpLOONG64MOVHUreg {
  3052  			break
  3053  		}
  3054  		x := v_1.Args[0]
  3055  		mem := v_2
  3056  		v.reset(OpLOONG64MOVBstore)
  3057  		v.AuxInt = int32ToAuxInt(off)
  3058  		v.Aux = symToAux(sym)
  3059  		v.AddArg3(ptr, x, mem)
  3060  		return true
  3061  	}
  3062  	// match: (MOVBstore [off] {sym} ptr (MOVWreg x) mem)
  3063  	// result: (MOVBstore [off] {sym} ptr x mem)
  3064  	for {
  3065  		off := auxIntToInt32(v.AuxInt)
  3066  		sym := auxToSym(v.Aux)
  3067  		ptr := v_0
  3068  		if v_1.Op != OpLOONG64MOVWreg {
  3069  			break
  3070  		}
  3071  		x := v_1.Args[0]
  3072  		mem := v_2
  3073  		v.reset(OpLOONG64MOVBstore)
  3074  		v.AuxInt = int32ToAuxInt(off)
  3075  		v.Aux = symToAux(sym)
  3076  		v.AddArg3(ptr, x, mem)
  3077  		return true
  3078  	}
  3079  	// match: (MOVBstore [off] {sym} ptr (MOVWUreg x) mem)
  3080  	// result: (MOVBstore [off] {sym} ptr x mem)
  3081  	for {
  3082  		off := auxIntToInt32(v.AuxInt)
  3083  		sym := auxToSym(v.Aux)
  3084  		ptr := v_0
  3085  		if v_1.Op != OpLOONG64MOVWUreg {
  3086  			break
  3087  		}
  3088  		x := v_1.Args[0]
  3089  		mem := v_2
  3090  		v.reset(OpLOONG64MOVBstore)
  3091  		v.AuxInt = int32ToAuxInt(off)
  3092  		v.Aux = symToAux(sym)
  3093  		v.AddArg3(ptr, x, mem)
  3094  		return true
  3095  	}
  3096  	// match: (MOVBstore [off] {sym} (ADDV ptr idx) val mem)
  3097  	// cond: off == 0 && sym == nil
  3098  	// result: (MOVBstoreidx ptr idx val mem)
  3099  	for {
  3100  		off := auxIntToInt32(v.AuxInt)
  3101  		sym := auxToSym(v.Aux)
  3102  		if v_0.Op != OpLOONG64ADDV {
  3103  			break
  3104  		}
  3105  		idx := v_0.Args[1]
  3106  		ptr := v_0.Args[0]
  3107  		val := v_1
  3108  		mem := v_2
  3109  		if !(off == 0 && sym == nil) {
  3110  			break
  3111  		}
  3112  		v.reset(OpLOONG64MOVBstoreidx)
  3113  		v.AddArg4(ptr, idx, val, mem)
  3114  		return true
  3115  	}
  3116  	// match: (MOVBstore [off] {sym} (ADDshiftLLV [shift] ptr idx) val mem)
  3117  	// cond: off == 0 && sym == nil
  3118  	// result: (MOVBstoreidx ptr (SLLVconst <typ.Int64> [shift] idx) val mem)
  3119  	for {
  3120  		off := auxIntToInt32(v.AuxInt)
  3121  		sym := auxToSym(v.Aux)
  3122  		if v_0.Op != OpLOONG64ADDshiftLLV {
  3123  			break
  3124  		}
  3125  		shift := auxIntToInt64(v_0.AuxInt)
  3126  		idx := v_0.Args[1]
  3127  		ptr := v_0.Args[0]
  3128  		val := v_1
  3129  		mem := v_2
  3130  		if !(off == 0 && sym == nil) {
  3131  			break
  3132  		}
  3133  		v.reset(OpLOONG64MOVBstoreidx)
  3134  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, typ.Int64)
  3135  		v0.AuxInt = int64ToAuxInt(shift)
  3136  		v0.AddArg(idx)
  3137  		v.AddArg4(ptr, v0, val, mem)
  3138  		return true
  3139  	}
  3140  	return false
  3141  }
  3142  func rewriteValueLOONG64_OpLOONG64MOVBstoreidx(v *Value) bool {
  3143  	v_3 := v.Args[3]
  3144  	v_2 := v.Args[2]
  3145  	v_1 := v.Args[1]
  3146  	v_0 := v.Args[0]
  3147  	// match: (MOVBstoreidx ptr (MOVVconst [c]) val mem)
  3148  	// cond: is32Bit(c)
  3149  	// result: (MOVBstore [int32(c)] ptr val mem)
  3150  	for {
  3151  		ptr := v_0
  3152  		if v_1.Op != OpLOONG64MOVVconst {
  3153  			break
  3154  		}
  3155  		c := auxIntToInt64(v_1.AuxInt)
  3156  		val := v_2
  3157  		mem := v_3
  3158  		if !(is32Bit(c)) {
  3159  			break
  3160  		}
  3161  		v.reset(OpLOONG64MOVBstore)
  3162  		v.AuxInt = int32ToAuxInt(int32(c))
  3163  		v.AddArg3(ptr, val, mem)
  3164  		return true
  3165  	}
  3166  	// match: (MOVBstoreidx (MOVVconst [c]) idx val mem)
  3167  	// cond: is32Bit(c)
  3168  	// result: (MOVBstore [int32(c)] idx val mem)
  3169  	for {
  3170  		if v_0.Op != OpLOONG64MOVVconst {
  3171  			break
  3172  		}
  3173  		c := auxIntToInt64(v_0.AuxInt)
  3174  		idx := v_1
  3175  		val := v_2
  3176  		mem := v_3
  3177  		if !(is32Bit(c)) {
  3178  			break
  3179  		}
  3180  		v.reset(OpLOONG64MOVBstore)
  3181  		v.AuxInt = int32ToAuxInt(int32(c))
  3182  		v.AddArg3(idx, val, mem)
  3183  		return true
  3184  	}
  3185  	return false
  3186  }
  3187  func rewriteValueLOONG64_OpLOONG64MOVDF(v *Value) bool {
  3188  	v_0 := v.Args[0]
  3189  	// match: (MOVDF (ABSD (MOVFD x)))
  3190  	// result: (ABSF x)
  3191  	for {
  3192  		if v_0.Op != OpLOONG64ABSD {
  3193  			break
  3194  		}
  3195  		v_0_0 := v_0.Args[0]
  3196  		if v_0_0.Op != OpLOONG64MOVFD {
  3197  			break
  3198  		}
  3199  		x := v_0_0.Args[0]
  3200  		v.reset(OpLOONG64ABSF)
  3201  		v.AddArg(x)
  3202  		return true
  3203  	}
  3204  	// match: (MOVDF (SQRTD (MOVFD x)))
  3205  	// result: (SQRTF x)
  3206  	for {
  3207  		if v_0.Op != OpLOONG64SQRTD {
  3208  			break
  3209  		}
  3210  		v_0_0 := v_0.Args[0]
  3211  		if v_0_0.Op != OpLOONG64MOVFD {
  3212  			break
  3213  		}
  3214  		x := v_0_0.Args[0]
  3215  		v.reset(OpLOONG64SQRTF)
  3216  		v.AddArg(x)
  3217  		return true
  3218  	}
  3219  	return false
  3220  }
  3221  func rewriteValueLOONG64_OpLOONG64MOVDload(v *Value) bool {
  3222  	v_1 := v.Args[1]
  3223  	v_0 := v.Args[0]
  3224  	b := v.Block
  3225  	config := b.Func.Config
  3226  	typ := &b.Func.Config.Types
  3227  	// match: (MOVDload [off] {sym} ptr (MOVVstore [off] {sym} ptr val _))
  3228  	// result: (MOVVgpfp val)
  3229  	for {
  3230  		off := auxIntToInt32(v.AuxInt)
  3231  		sym := auxToSym(v.Aux)
  3232  		ptr := v_0
  3233  		if v_1.Op != OpLOONG64MOVVstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  3234  			break
  3235  		}
  3236  		val := v_1.Args[1]
  3237  		if ptr != v_1.Args[0] {
  3238  			break
  3239  		}
  3240  		v.reset(OpLOONG64MOVVgpfp)
  3241  		v.AddArg(val)
  3242  		return true
  3243  	}
  3244  	// match: (MOVDload [off1] {sym} (ADDVconst [off2] ptr) mem)
  3245  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  3246  	// result: (MOVDload [off1+int32(off2)] {sym} ptr mem)
  3247  	for {
  3248  		off1 := auxIntToInt32(v.AuxInt)
  3249  		sym := auxToSym(v.Aux)
  3250  		if v_0.Op != OpLOONG64ADDVconst {
  3251  			break
  3252  		}
  3253  		off2 := auxIntToInt64(v_0.AuxInt)
  3254  		ptr := v_0.Args[0]
  3255  		mem := v_1
  3256  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  3257  			break
  3258  		}
  3259  		v.reset(OpLOONG64MOVDload)
  3260  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3261  		v.Aux = symToAux(sym)
  3262  		v.AddArg2(ptr, mem)
  3263  		return true
  3264  	}
  3265  	// match: (MOVDload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  3266  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  3267  	// result: (MOVDload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  3268  	for {
  3269  		off1 := auxIntToInt32(v.AuxInt)
  3270  		sym1 := auxToSym(v.Aux)
  3271  		if v_0.Op != OpLOONG64MOVVaddr {
  3272  			break
  3273  		}
  3274  		off2 := auxIntToInt32(v_0.AuxInt)
  3275  		sym2 := auxToSym(v_0.Aux)
  3276  		ptr := v_0.Args[0]
  3277  		mem := v_1
  3278  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  3279  			break
  3280  		}
  3281  		v.reset(OpLOONG64MOVDload)
  3282  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3283  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3284  		v.AddArg2(ptr, mem)
  3285  		return true
  3286  	}
  3287  	// match: (MOVDload [off] {sym} (ADDV ptr idx) mem)
  3288  	// cond: off == 0 && sym == nil
  3289  	// result: (MOVDloadidx ptr idx mem)
  3290  	for {
  3291  		off := auxIntToInt32(v.AuxInt)
  3292  		sym := auxToSym(v.Aux)
  3293  		if v_0.Op != OpLOONG64ADDV {
  3294  			break
  3295  		}
  3296  		idx := v_0.Args[1]
  3297  		ptr := v_0.Args[0]
  3298  		mem := v_1
  3299  		if !(off == 0 && sym == nil) {
  3300  			break
  3301  		}
  3302  		v.reset(OpLOONG64MOVDloadidx)
  3303  		v.AddArg3(ptr, idx, mem)
  3304  		return true
  3305  	}
  3306  	// match: (MOVDload [off] {sym} (ADDshiftLLV [shift] ptr idx) mem)
  3307  	// cond: off == 0 && sym == nil
  3308  	// result: (MOVDloadidx ptr (SLLVconst <typ.Int64> [shift] idx) mem)
  3309  	for {
  3310  		off := auxIntToInt32(v.AuxInt)
  3311  		sym := auxToSym(v.Aux)
  3312  		if v_0.Op != OpLOONG64ADDshiftLLV {
  3313  			break
  3314  		}
  3315  		shift := auxIntToInt64(v_0.AuxInt)
  3316  		idx := v_0.Args[1]
  3317  		ptr := v_0.Args[0]
  3318  		mem := v_1
  3319  		if !(off == 0 && sym == nil) {
  3320  			break
  3321  		}
  3322  		v.reset(OpLOONG64MOVDloadidx)
  3323  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, typ.Int64)
  3324  		v0.AuxInt = int64ToAuxInt(shift)
  3325  		v0.AddArg(idx)
  3326  		v.AddArg3(ptr, v0, mem)
  3327  		return true
  3328  	}
  3329  	return false
  3330  }
  3331  func rewriteValueLOONG64_OpLOONG64MOVDloadidx(v *Value) bool {
  3332  	v_2 := v.Args[2]
  3333  	v_1 := v.Args[1]
  3334  	v_0 := v.Args[0]
  3335  	// match: (MOVDloadidx ptr (MOVVconst [c]) mem)
  3336  	// cond: is32Bit(c)
  3337  	// result: (MOVDload [int32(c)] ptr mem)
  3338  	for {
  3339  		ptr := v_0
  3340  		if v_1.Op != OpLOONG64MOVVconst {
  3341  			break
  3342  		}
  3343  		c := auxIntToInt64(v_1.AuxInt)
  3344  		mem := v_2
  3345  		if !(is32Bit(c)) {
  3346  			break
  3347  		}
  3348  		v.reset(OpLOONG64MOVDload)
  3349  		v.AuxInt = int32ToAuxInt(int32(c))
  3350  		v.AddArg2(ptr, mem)
  3351  		return true
  3352  	}
  3353  	// match: (MOVDloadidx (MOVVconst [c]) ptr mem)
  3354  	// cond: is32Bit(c)
  3355  	// result: (MOVDload [int32(c)] ptr mem)
  3356  	for {
  3357  		if v_0.Op != OpLOONG64MOVVconst {
  3358  			break
  3359  		}
  3360  		c := auxIntToInt64(v_0.AuxInt)
  3361  		ptr := v_1
  3362  		mem := v_2
  3363  		if !(is32Bit(c)) {
  3364  			break
  3365  		}
  3366  		v.reset(OpLOONG64MOVDload)
  3367  		v.AuxInt = int32ToAuxInt(int32(c))
  3368  		v.AddArg2(ptr, mem)
  3369  		return true
  3370  	}
  3371  	return false
  3372  }
  3373  func rewriteValueLOONG64_OpLOONG64MOVDstore(v *Value) bool {
  3374  	v_2 := v.Args[2]
  3375  	v_1 := v.Args[1]
  3376  	v_0 := v.Args[0]
  3377  	b := v.Block
  3378  	config := b.Func.Config
  3379  	typ := &b.Func.Config.Types
  3380  	// match: (MOVDstore [off] {sym} ptr (MOVVgpfp val) mem)
  3381  	// result: (MOVVstore [off] {sym} ptr val mem)
  3382  	for {
  3383  		off := auxIntToInt32(v.AuxInt)
  3384  		sym := auxToSym(v.Aux)
  3385  		ptr := v_0
  3386  		if v_1.Op != OpLOONG64MOVVgpfp {
  3387  			break
  3388  		}
  3389  		val := v_1.Args[0]
  3390  		mem := v_2
  3391  		v.reset(OpLOONG64MOVVstore)
  3392  		v.AuxInt = int32ToAuxInt(off)
  3393  		v.Aux = symToAux(sym)
  3394  		v.AddArg3(ptr, val, mem)
  3395  		return true
  3396  	}
  3397  	// match: (MOVDstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  3398  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  3399  	// result: (MOVDstore [off1+int32(off2)] {sym} ptr val mem)
  3400  	for {
  3401  		off1 := auxIntToInt32(v.AuxInt)
  3402  		sym := auxToSym(v.Aux)
  3403  		if v_0.Op != OpLOONG64ADDVconst {
  3404  			break
  3405  		}
  3406  		off2 := auxIntToInt64(v_0.AuxInt)
  3407  		ptr := v_0.Args[0]
  3408  		val := v_1
  3409  		mem := v_2
  3410  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  3411  			break
  3412  		}
  3413  		v.reset(OpLOONG64MOVDstore)
  3414  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3415  		v.Aux = symToAux(sym)
  3416  		v.AddArg3(ptr, val, mem)
  3417  		return true
  3418  	}
  3419  	// match: (MOVDstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  3420  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  3421  	// result: (MOVDstore [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr val mem)
  3422  	for {
  3423  		off1 := auxIntToInt32(v.AuxInt)
  3424  		sym1 := auxToSym(v.Aux)
  3425  		if v_0.Op != OpLOONG64MOVVaddr {
  3426  			break
  3427  		}
  3428  		off2 := auxIntToInt32(v_0.AuxInt)
  3429  		sym2 := auxToSym(v_0.Aux)
  3430  		ptr := v_0.Args[0]
  3431  		val := v_1
  3432  		mem := v_2
  3433  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  3434  			break
  3435  		}
  3436  		v.reset(OpLOONG64MOVDstore)
  3437  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3438  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3439  		v.AddArg3(ptr, val, mem)
  3440  		return true
  3441  	}
  3442  	// match: (MOVDstore [off] {sym} (ADDV ptr idx) val mem)
  3443  	// cond: off == 0 && sym == nil
  3444  	// result: (MOVDstoreidx ptr idx val mem)
  3445  	for {
  3446  		off := auxIntToInt32(v.AuxInt)
  3447  		sym := auxToSym(v.Aux)
  3448  		if v_0.Op != OpLOONG64ADDV {
  3449  			break
  3450  		}
  3451  		idx := v_0.Args[1]
  3452  		ptr := v_0.Args[0]
  3453  		val := v_1
  3454  		mem := v_2
  3455  		if !(off == 0 && sym == nil) {
  3456  			break
  3457  		}
  3458  		v.reset(OpLOONG64MOVDstoreidx)
  3459  		v.AddArg4(ptr, idx, val, mem)
  3460  		return true
  3461  	}
  3462  	// match: (MOVDstore [off] {sym} (ADDshiftLLV [shift] ptr idx) val mem)
  3463  	// cond: off == 0 && sym == nil
  3464  	// result: (MOVDstoreidx ptr (SLLVconst <typ.Int64> [shift] idx) val mem)
  3465  	for {
  3466  		off := auxIntToInt32(v.AuxInt)
  3467  		sym := auxToSym(v.Aux)
  3468  		if v_0.Op != OpLOONG64ADDshiftLLV {
  3469  			break
  3470  		}
  3471  		shift := auxIntToInt64(v_0.AuxInt)
  3472  		idx := v_0.Args[1]
  3473  		ptr := v_0.Args[0]
  3474  		val := v_1
  3475  		mem := v_2
  3476  		if !(off == 0 && sym == nil) {
  3477  			break
  3478  		}
  3479  		v.reset(OpLOONG64MOVDstoreidx)
  3480  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, typ.Int64)
  3481  		v0.AuxInt = int64ToAuxInt(shift)
  3482  		v0.AddArg(idx)
  3483  		v.AddArg4(ptr, v0, val, mem)
  3484  		return true
  3485  	}
  3486  	return false
  3487  }
  3488  func rewriteValueLOONG64_OpLOONG64MOVDstoreidx(v *Value) bool {
  3489  	v_3 := v.Args[3]
  3490  	v_2 := v.Args[2]
  3491  	v_1 := v.Args[1]
  3492  	v_0 := v.Args[0]
  3493  	// match: (MOVDstoreidx ptr (MOVVconst [c]) val mem)
  3494  	// cond: is32Bit(c)
  3495  	// result: (MOVDstore [int32(c)] ptr val mem)
  3496  	for {
  3497  		ptr := v_0
  3498  		if v_1.Op != OpLOONG64MOVVconst {
  3499  			break
  3500  		}
  3501  		c := auxIntToInt64(v_1.AuxInt)
  3502  		val := v_2
  3503  		mem := v_3
  3504  		if !(is32Bit(c)) {
  3505  			break
  3506  		}
  3507  		v.reset(OpLOONG64MOVDstore)
  3508  		v.AuxInt = int32ToAuxInt(int32(c))
  3509  		v.AddArg3(ptr, val, mem)
  3510  		return true
  3511  	}
  3512  	// match: (MOVDstoreidx (MOVVconst [c]) idx val mem)
  3513  	// cond: is32Bit(c)
  3514  	// result: (MOVDstore [int32(c)] idx val mem)
  3515  	for {
  3516  		if v_0.Op != OpLOONG64MOVVconst {
  3517  			break
  3518  		}
  3519  		c := auxIntToInt64(v_0.AuxInt)
  3520  		idx := v_1
  3521  		val := v_2
  3522  		mem := v_3
  3523  		if !(is32Bit(c)) {
  3524  			break
  3525  		}
  3526  		v.reset(OpLOONG64MOVDstore)
  3527  		v.AuxInt = int32ToAuxInt(int32(c))
  3528  		v.AddArg3(idx, val, mem)
  3529  		return true
  3530  	}
  3531  	return false
  3532  }
  3533  func rewriteValueLOONG64_OpLOONG64MOVFload(v *Value) bool {
  3534  	v_1 := v.Args[1]
  3535  	v_0 := v.Args[0]
  3536  	b := v.Block
  3537  	config := b.Func.Config
  3538  	typ := &b.Func.Config.Types
  3539  	// match: (MOVFload [off] {sym} ptr (MOVWstore [off] {sym} ptr val _))
  3540  	// result: (MOVWgpfp val)
  3541  	for {
  3542  		off := auxIntToInt32(v.AuxInt)
  3543  		sym := auxToSym(v.Aux)
  3544  		ptr := v_0
  3545  		if v_1.Op != OpLOONG64MOVWstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  3546  			break
  3547  		}
  3548  		val := v_1.Args[1]
  3549  		if ptr != v_1.Args[0] {
  3550  			break
  3551  		}
  3552  		v.reset(OpLOONG64MOVWgpfp)
  3553  		v.AddArg(val)
  3554  		return true
  3555  	}
  3556  	// match: (MOVFload [off1] {sym} (ADDVconst [off2] ptr) mem)
  3557  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  3558  	// result: (MOVFload [off1+int32(off2)] {sym} ptr mem)
  3559  	for {
  3560  		off1 := auxIntToInt32(v.AuxInt)
  3561  		sym := auxToSym(v.Aux)
  3562  		if v_0.Op != OpLOONG64ADDVconst {
  3563  			break
  3564  		}
  3565  		off2 := auxIntToInt64(v_0.AuxInt)
  3566  		ptr := v_0.Args[0]
  3567  		mem := v_1
  3568  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  3569  			break
  3570  		}
  3571  		v.reset(OpLOONG64MOVFload)
  3572  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3573  		v.Aux = symToAux(sym)
  3574  		v.AddArg2(ptr, mem)
  3575  		return true
  3576  	}
  3577  	// match: (MOVFload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  3578  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  3579  	// result: (MOVFload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  3580  	for {
  3581  		off1 := auxIntToInt32(v.AuxInt)
  3582  		sym1 := auxToSym(v.Aux)
  3583  		if v_0.Op != OpLOONG64MOVVaddr {
  3584  			break
  3585  		}
  3586  		off2 := auxIntToInt32(v_0.AuxInt)
  3587  		sym2 := auxToSym(v_0.Aux)
  3588  		ptr := v_0.Args[0]
  3589  		mem := v_1
  3590  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  3591  			break
  3592  		}
  3593  		v.reset(OpLOONG64MOVFload)
  3594  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3595  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3596  		v.AddArg2(ptr, mem)
  3597  		return true
  3598  	}
  3599  	// match: (MOVFload [off] {sym} (ADDV ptr idx) mem)
  3600  	// cond: off == 0 && sym == nil
  3601  	// result: (MOVFloadidx ptr idx mem)
  3602  	for {
  3603  		off := auxIntToInt32(v.AuxInt)
  3604  		sym := auxToSym(v.Aux)
  3605  		if v_0.Op != OpLOONG64ADDV {
  3606  			break
  3607  		}
  3608  		idx := v_0.Args[1]
  3609  		ptr := v_0.Args[0]
  3610  		mem := v_1
  3611  		if !(off == 0 && sym == nil) {
  3612  			break
  3613  		}
  3614  		v.reset(OpLOONG64MOVFloadidx)
  3615  		v.AddArg3(ptr, idx, mem)
  3616  		return true
  3617  	}
  3618  	// match: (MOVFload [off] {sym} (ADDshiftLLV [shift] ptr idx) mem)
  3619  	// cond: off == 0 && sym == nil
  3620  	// result: (MOVFloadidx ptr (SLLVconst <typ.Int64> [shift] idx) mem)
  3621  	for {
  3622  		off := auxIntToInt32(v.AuxInt)
  3623  		sym := auxToSym(v.Aux)
  3624  		if v_0.Op != OpLOONG64ADDshiftLLV {
  3625  			break
  3626  		}
  3627  		shift := auxIntToInt64(v_0.AuxInt)
  3628  		idx := v_0.Args[1]
  3629  		ptr := v_0.Args[0]
  3630  		mem := v_1
  3631  		if !(off == 0 && sym == nil) {
  3632  			break
  3633  		}
  3634  		v.reset(OpLOONG64MOVFloadidx)
  3635  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, typ.Int64)
  3636  		v0.AuxInt = int64ToAuxInt(shift)
  3637  		v0.AddArg(idx)
  3638  		v.AddArg3(ptr, v0, mem)
  3639  		return true
  3640  	}
  3641  	return false
  3642  }
  3643  func rewriteValueLOONG64_OpLOONG64MOVFloadidx(v *Value) bool {
  3644  	v_2 := v.Args[2]
  3645  	v_1 := v.Args[1]
  3646  	v_0 := v.Args[0]
  3647  	// match: (MOVFloadidx ptr (MOVVconst [c]) mem)
  3648  	// cond: is32Bit(c)
  3649  	// result: (MOVFload [int32(c)] ptr mem)
  3650  	for {
  3651  		ptr := v_0
  3652  		if v_1.Op != OpLOONG64MOVVconst {
  3653  			break
  3654  		}
  3655  		c := auxIntToInt64(v_1.AuxInt)
  3656  		mem := v_2
  3657  		if !(is32Bit(c)) {
  3658  			break
  3659  		}
  3660  		v.reset(OpLOONG64MOVFload)
  3661  		v.AuxInt = int32ToAuxInt(int32(c))
  3662  		v.AddArg2(ptr, mem)
  3663  		return true
  3664  	}
  3665  	// match: (MOVFloadidx (MOVVconst [c]) ptr mem)
  3666  	// cond: is32Bit(c)
  3667  	// result: (MOVFload [int32(c)] ptr mem)
  3668  	for {
  3669  		if v_0.Op != OpLOONG64MOVVconst {
  3670  			break
  3671  		}
  3672  		c := auxIntToInt64(v_0.AuxInt)
  3673  		ptr := v_1
  3674  		mem := v_2
  3675  		if !(is32Bit(c)) {
  3676  			break
  3677  		}
  3678  		v.reset(OpLOONG64MOVFload)
  3679  		v.AuxInt = int32ToAuxInt(int32(c))
  3680  		v.AddArg2(ptr, mem)
  3681  		return true
  3682  	}
  3683  	return false
  3684  }
  3685  func rewriteValueLOONG64_OpLOONG64MOVFstore(v *Value) bool {
  3686  	v_2 := v.Args[2]
  3687  	v_1 := v.Args[1]
  3688  	v_0 := v.Args[0]
  3689  	b := v.Block
  3690  	config := b.Func.Config
  3691  	typ := &b.Func.Config.Types
  3692  	// match: (MOVFstore [off] {sym} ptr (MOVWgpfp val) mem)
  3693  	// result: (MOVWstore [off] {sym} ptr val mem)
  3694  	for {
  3695  		off := auxIntToInt32(v.AuxInt)
  3696  		sym := auxToSym(v.Aux)
  3697  		ptr := v_0
  3698  		if v_1.Op != OpLOONG64MOVWgpfp {
  3699  			break
  3700  		}
  3701  		val := v_1.Args[0]
  3702  		mem := v_2
  3703  		v.reset(OpLOONG64MOVWstore)
  3704  		v.AuxInt = int32ToAuxInt(off)
  3705  		v.Aux = symToAux(sym)
  3706  		v.AddArg3(ptr, val, mem)
  3707  		return true
  3708  	}
  3709  	// match: (MOVFstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  3710  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  3711  	// result: (MOVFstore [off1+int32(off2)] {sym} ptr val mem)
  3712  	for {
  3713  		off1 := auxIntToInt32(v.AuxInt)
  3714  		sym := auxToSym(v.Aux)
  3715  		if v_0.Op != OpLOONG64ADDVconst {
  3716  			break
  3717  		}
  3718  		off2 := auxIntToInt64(v_0.AuxInt)
  3719  		ptr := v_0.Args[0]
  3720  		val := v_1
  3721  		mem := v_2
  3722  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  3723  			break
  3724  		}
  3725  		v.reset(OpLOONG64MOVFstore)
  3726  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3727  		v.Aux = symToAux(sym)
  3728  		v.AddArg3(ptr, val, mem)
  3729  		return true
  3730  	}
  3731  	// match: (MOVFstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  3732  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  3733  	// result: (MOVFstore [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr val mem)
  3734  	for {
  3735  		off1 := auxIntToInt32(v.AuxInt)
  3736  		sym1 := auxToSym(v.Aux)
  3737  		if v_0.Op != OpLOONG64MOVVaddr {
  3738  			break
  3739  		}
  3740  		off2 := auxIntToInt32(v_0.AuxInt)
  3741  		sym2 := auxToSym(v_0.Aux)
  3742  		ptr := v_0.Args[0]
  3743  		val := v_1
  3744  		mem := v_2
  3745  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  3746  			break
  3747  		}
  3748  		v.reset(OpLOONG64MOVFstore)
  3749  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3750  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3751  		v.AddArg3(ptr, val, mem)
  3752  		return true
  3753  	}
  3754  	// match: (MOVFstore [off] {sym} (ADDV ptr idx) val mem)
  3755  	// cond: off == 0 && sym == nil
  3756  	// result: (MOVFstoreidx ptr idx val mem)
  3757  	for {
  3758  		off := auxIntToInt32(v.AuxInt)
  3759  		sym := auxToSym(v.Aux)
  3760  		if v_0.Op != OpLOONG64ADDV {
  3761  			break
  3762  		}
  3763  		idx := v_0.Args[1]
  3764  		ptr := v_0.Args[0]
  3765  		val := v_1
  3766  		mem := v_2
  3767  		if !(off == 0 && sym == nil) {
  3768  			break
  3769  		}
  3770  		v.reset(OpLOONG64MOVFstoreidx)
  3771  		v.AddArg4(ptr, idx, val, mem)
  3772  		return true
  3773  	}
  3774  	// match: (MOVFstore [off] {sym} (ADDshiftLLV [shift] ptr idx) val mem)
  3775  	// cond: off == 0 && sym == nil
  3776  	// result: (MOVFstoreidx ptr (SLLVconst <typ.Int64> [shift] idx) val mem)
  3777  	for {
  3778  		off := auxIntToInt32(v.AuxInt)
  3779  		sym := auxToSym(v.Aux)
  3780  		if v_0.Op != OpLOONG64ADDshiftLLV {
  3781  			break
  3782  		}
  3783  		shift := auxIntToInt64(v_0.AuxInt)
  3784  		idx := v_0.Args[1]
  3785  		ptr := v_0.Args[0]
  3786  		val := v_1
  3787  		mem := v_2
  3788  		if !(off == 0 && sym == nil) {
  3789  			break
  3790  		}
  3791  		v.reset(OpLOONG64MOVFstoreidx)
  3792  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, typ.Int64)
  3793  		v0.AuxInt = int64ToAuxInt(shift)
  3794  		v0.AddArg(idx)
  3795  		v.AddArg4(ptr, v0, val, mem)
  3796  		return true
  3797  	}
  3798  	return false
  3799  }
  3800  func rewriteValueLOONG64_OpLOONG64MOVFstoreidx(v *Value) bool {
  3801  	v_3 := v.Args[3]
  3802  	v_2 := v.Args[2]
  3803  	v_1 := v.Args[1]
  3804  	v_0 := v.Args[0]
  3805  	// match: (MOVFstoreidx ptr (MOVVconst [c]) val mem)
  3806  	// cond: is32Bit(c)
  3807  	// result: (MOVFstore [int32(c)] ptr val mem)
  3808  	for {
  3809  		ptr := v_0
  3810  		if v_1.Op != OpLOONG64MOVVconst {
  3811  			break
  3812  		}
  3813  		c := auxIntToInt64(v_1.AuxInt)
  3814  		val := v_2
  3815  		mem := v_3
  3816  		if !(is32Bit(c)) {
  3817  			break
  3818  		}
  3819  		v.reset(OpLOONG64MOVFstore)
  3820  		v.AuxInt = int32ToAuxInt(int32(c))
  3821  		v.AddArg3(ptr, val, mem)
  3822  		return true
  3823  	}
  3824  	// match: (MOVFstoreidx (MOVVconst [c]) idx val mem)
  3825  	// cond: is32Bit(c)
  3826  	// result: (MOVFstore [int32(c)] idx val mem)
  3827  	for {
  3828  		if v_0.Op != OpLOONG64MOVVconst {
  3829  			break
  3830  		}
  3831  		c := auxIntToInt64(v_0.AuxInt)
  3832  		idx := v_1
  3833  		val := v_2
  3834  		mem := v_3
  3835  		if !(is32Bit(c)) {
  3836  			break
  3837  		}
  3838  		v.reset(OpLOONG64MOVFstore)
  3839  		v.AuxInt = int32ToAuxInt(int32(c))
  3840  		v.AddArg3(idx, val, mem)
  3841  		return true
  3842  	}
  3843  	return false
  3844  }
  3845  func rewriteValueLOONG64_OpLOONG64MOVHUload(v *Value) bool {
  3846  	v_1 := v.Args[1]
  3847  	v_0 := v.Args[0]
  3848  	b := v.Block
  3849  	config := b.Func.Config
  3850  	typ := &b.Func.Config.Types
  3851  	// match: (MOVHUload [off] {sym} ptr (MOVHstore [off] {sym} ptr x _))
  3852  	// result: (MOVHUreg x)
  3853  	for {
  3854  		off := auxIntToInt32(v.AuxInt)
  3855  		sym := auxToSym(v.Aux)
  3856  		ptr := v_0
  3857  		if v_1.Op != OpLOONG64MOVHstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  3858  			break
  3859  		}
  3860  		x := v_1.Args[1]
  3861  		if ptr != v_1.Args[0] {
  3862  			break
  3863  		}
  3864  		v.reset(OpLOONG64MOVHUreg)
  3865  		v.AddArg(x)
  3866  		return true
  3867  	}
  3868  	// match: (MOVHUload [off1] {sym} (ADDVconst [off2] ptr) mem)
  3869  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  3870  	// result: (MOVHUload [off1+int32(off2)] {sym} ptr mem)
  3871  	for {
  3872  		off1 := auxIntToInt32(v.AuxInt)
  3873  		sym := auxToSym(v.Aux)
  3874  		if v_0.Op != OpLOONG64ADDVconst {
  3875  			break
  3876  		}
  3877  		off2 := auxIntToInt64(v_0.AuxInt)
  3878  		ptr := v_0.Args[0]
  3879  		mem := v_1
  3880  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  3881  			break
  3882  		}
  3883  		v.reset(OpLOONG64MOVHUload)
  3884  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3885  		v.Aux = symToAux(sym)
  3886  		v.AddArg2(ptr, mem)
  3887  		return true
  3888  	}
  3889  	// match: (MOVHUload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  3890  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  3891  	// result: (MOVHUload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  3892  	for {
  3893  		off1 := auxIntToInt32(v.AuxInt)
  3894  		sym1 := auxToSym(v.Aux)
  3895  		if v_0.Op != OpLOONG64MOVVaddr {
  3896  			break
  3897  		}
  3898  		off2 := auxIntToInt32(v_0.AuxInt)
  3899  		sym2 := auxToSym(v_0.Aux)
  3900  		ptr := v_0.Args[0]
  3901  		mem := v_1
  3902  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  3903  			break
  3904  		}
  3905  		v.reset(OpLOONG64MOVHUload)
  3906  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3907  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3908  		v.AddArg2(ptr, mem)
  3909  		return true
  3910  	}
  3911  	// match: (MOVHUload [off] {sym} (ADDV ptr idx) mem)
  3912  	// cond: off == 0 && sym == nil
  3913  	// result: (MOVHUloadidx ptr idx mem)
  3914  	for {
  3915  		off := auxIntToInt32(v.AuxInt)
  3916  		sym := auxToSym(v.Aux)
  3917  		if v_0.Op != OpLOONG64ADDV {
  3918  			break
  3919  		}
  3920  		idx := v_0.Args[1]
  3921  		ptr := v_0.Args[0]
  3922  		mem := v_1
  3923  		if !(off == 0 && sym == nil) {
  3924  			break
  3925  		}
  3926  		v.reset(OpLOONG64MOVHUloadidx)
  3927  		v.AddArg3(ptr, idx, mem)
  3928  		return true
  3929  	}
  3930  	// match: (MOVHUload [off] {sym} (ADDshiftLLV [shift] ptr idx) mem)
  3931  	// cond: off == 0 && sym == nil
  3932  	// result: (MOVHUloadidx ptr (SLLVconst <typ.Int64> [shift] idx) mem)
  3933  	for {
  3934  		off := auxIntToInt32(v.AuxInt)
  3935  		sym := auxToSym(v.Aux)
  3936  		if v_0.Op != OpLOONG64ADDshiftLLV {
  3937  			break
  3938  		}
  3939  		shift := auxIntToInt64(v_0.AuxInt)
  3940  		idx := v_0.Args[1]
  3941  		ptr := v_0.Args[0]
  3942  		mem := v_1
  3943  		if !(off == 0 && sym == nil) {
  3944  			break
  3945  		}
  3946  		v.reset(OpLOONG64MOVHUloadidx)
  3947  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, typ.Int64)
  3948  		v0.AuxInt = int64ToAuxInt(shift)
  3949  		v0.AddArg(idx)
  3950  		v.AddArg3(ptr, v0, mem)
  3951  		return true
  3952  	}
  3953  	// match: (MOVHUload [off] {sym} (SB) _)
  3954  	// cond: symIsRO(sym)
  3955  	// result: (MOVVconst [int64(read16(sym, int64(off), config.ctxt.Arch.ByteOrder))])
  3956  	for {
  3957  		off := auxIntToInt32(v.AuxInt)
  3958  		sym := auxToSym(v.Aux)
  3959  		if v_0.Op != OpSB || !(symIsRO(sym)) {
  3960  			break
  3961  		}
  3962  		v.reset(OpLOONG64MOVVconst)
  3963  		v.AuxInt = int64ToAuxInt(int64(read16(sym, int64(off), config.ctxt.Arch.ByteOrder)))
  3964  		return true
  3965  	}
  3966  	return false
  3967  }
  3968  func rewriteValueLOONG64_OpLOONG64MOVHUloadidx(v *Value) bool {
  3969  	v_2 := v.Args[2]
  3970  	v_1 := v.Args[1]
  3971  	v_0 := v.Args[0]
  3972  	// match: (MOVHUloadidx ptr (MOVVconst [c]) mem)
  3973  	// cond: is32Bit(c)
  3974  	// result: (MOVHUload [int32(c)] ptr mem)
  3975  	for {
  3976  		ptr := v_0
  3977  		if v_1.Op != OpLOONG64MOVVconst {
  3978  			break
  3979  		}
  3980  		c := auxIntToInt64(v_1.AuxInt)
  3981  		mem := v_2
  3982  		if !(is32Bit(c)) {
  3983  			break
  3984  		}
  3985  		v.reset(OpLOONG64MOVHUload)
  3986  		v.AuxInt = int32ToAuxInt(int32(c))
  3987  		v.AddArg2(ptr, mem)
  3988  		return true
  3989  	}
  3990  	// match: (MOVHUloadidx (MOVVconst [c]) ptr mem)
  3991  	// cond: is32Bit(c)
  3992  	// result: (MOVHUload [int32(c)] ptr mem)
  3993  	for {
  3994  		if v_0.Op != OpLOONG64MOVVconst {
  3995  			break
  3996  		}
  3997  		c := auxIntToInt64(v_0.AuxInt)
  3998  		ptr := v_1
  3999  		mem := v_2
  4000  		if !(is32Bit(c)) {
  4001  			break
  4002  		}
  4003  		v.reset(OpLOONG64MOVHUload)
  4004  		v.AuxInt = int32ToAuxInt(int32(c))
  4005  		v.AddArg2(ptr, mem)
  4006  		return true
  4007  	}
  4008  	return false
  4009  }
  4010  func rewriteValueLOONG64_OpLOONG64MOVHUreg(v *Value) bool {
  4011  	v_0 := v.Args[0]
  4012  	// match: (MOVHUreg (SRLVconst [rc] x))
  4013  	// cond: rc < 16
  4014  	// result: (BSTRPICKV [rc + (15+rc)<<6] x)
  4015  	for {
  4016  		if v_0.Op != OpLOONG64SRLVconst {
  4017  			break
  4018  		}
  4019  		rc := auxIntToInt64(v_0.AuxInt)
  4020  		x := v_0.Args[0]
  4021  		if !(rc < 16) {
  4022  			break
  4023  		}
  4024  		v.reset(OpLOONG64BSTRPICKV)
  4025  		v.AuxInt = int64ToAuxInt(rc + (15+rc)<<6)
  4026  		v.AddArg(x)
  4027  		return true
  4028  	}
  4029  	// match: (MOVHUreg x:(MOVBUload _ _))
  4030  	// result: (MOVVreg x)
  4031  	for {
  4032  		x := v_0
  4033  		if x.Op != OpLOONG64MOVBUload {
  4034  			break
  4035  		}
  4036  		v.reset(OpLOONG64MOVVreg)
  4037  		v.AddArg(x)
  4038  		return true
  4039  	}
  4040  	// match: (MOVHUreg x:(MOVHUload _ _))
  4041  	// result: (MOVVreg x)
  4042  	for {
  4043  		x := v_0
  4044  		if x.Op != OpLOONG64MOVHUload {
  4045  			break
  4046  		}
  4047  		v.reset(OpLOONG64MOVVreg)
  4048  		v.AddArg(x)
  4049  		return true
  4050  	}
  4051  	// match: (MOVHUreg x:(MOVBUloadidx _ _ _))
  4052  	// result: (MOVVreg x)
  4053  	for {
  4054  		x := v_0
  4055  		if x.Op != OpLOONG64MOVBUloadidx {
  4056  			break
  4057  		}
  4058  		v.reset(OpLOONG64MOVVreg)
  4059  		v.AddArg(x)
  4060  		return true
  4061  	}
  4062  	// match: (MOVHUreg x:(MOVHUloadidx _ _ _))
  4063  	// result: (MOVVreg x)
  4064  	for {
  4065  		x := v_0
  4066  		if x.Op != OpLOONG64MOVHUloadidx {
  4067  			break
  4068  		}
  4069  		v.reset(OpLOONG64MOVVreg)
  4070  		v.AddArg(x)
  4071  		return true
  4072  	}
  4073  	// match: (MOVHUreg x:(MOVBUreg _))
  4074  	// result: (MOVVreg x)
  4075  	for {
  4076  		x := v_0
  4077  		if x.Op != OpLOONG64MOVBUreg {
  4078  			break
  4079  		}
  4080  		v.reset(OpLOONG64MOVVreg)
  4081  		v.AddArg(x)
  4082  		return true
  4083  	}
  4084  	// match: (MOVHUreg x:(MOVHUreg _))
  4085  	// result: (MOVVreg x)
  4086  	for {
  4087  		x := v_0
  4088  		if x.Op != OpLOONG64MOVHUreg {
  4089  			break
  4090  		}
  4091  		v.reset(OpLOONG64MOVVreg)
  4092  		v.AddArg(x)
  4093  		return true
  4094  	}
  4095  	// match: (MOVHUreg (SLLVconst [lc] x))
  4096  	// cond: lc >= 16
  4097  	// result: (MOVVconst [0])
  4098  	for {
  4099  		if v_0.Op != OpLOONG64SLLVconst {
  4100  			break
  4101  		}
  4102  		lc := auxIntToInt64(v_0.AuxInt)
  4103  		if !(lc >= 16) {
  4104  			break
  4105  		}
  4106  		v.reset(OpLOONG64MOVVconst)
  4107  		v.AuxInt = int64ToAuxInt(0)
  4108  		return true
  4109  	}
  4110  	// match: (MOVHUreg (MOVVconst [c]))
  4111  	// result: (MOVVconst [int64(uint16(c))])
  4112  	for {
  4113  		if v_0.Op != OpLOONG64MOVVconst {
  4114  			break
  4115  		}
  4116  		c := auxIntToInt64(v_0.AuxInt)
  4117  		v.reset(OpLOONG64MOVVconst)
  4118  		v.AuxInt = int64ToAuxInt(int64(uint16(c)))
  4119  		return true
  4120  	}
  4121  	// match: (MOVHUreg x:(SRLconst [c] y))
  4122  	// cond: c >= 16
  4123  	// result: x
  4124  	for {
  4125  		x := v_0
  4126  		if x.Op != OpLOONG64SRLconst {
  4127  			break
  4128  		}
  4129  		c := auxIntToInt64(x.AuxInt)
  4130  		if !(c >= 16) {
  4131  			break
  4132  		}
  4133  		v.copyOf(x)
  4134  		return true
  4135  	}
  4136  	// match: (MOVHUreg x:(ANDconst [c] y))
  4137  	// cond: c >= 0 && int64(uint16(c)) == c
  4138  	// result: x
  4139  	for {
  4140  		x := v_0
  4141  		if x.Op != OpLOONG64ANDconst {
  4142  			break
  4143  		}
  4144  		c := auxIntToInt64(x.AuxInt)
  4145  		if !(c >= 0 && int64(uint16(c)) == c) {
  4146  			break
  4147  		}
  4148  		v.copyOf(x)
  4149  		return true
  4150  	}
  4151  	return false
  4152  }
  4153  func rewriteValueLOONG64_OpLOONG64MOVHload(v *Value) bool {
  4154  	v_1 := v.Args[1]
  4155  	v_0 := v.Args[0]
  4156  	b := v.Block
  4157  	config := b.Func.Config
  4158  	typ := &b.Func.Config.Types
  4159  	// match: (MOVHload [off] {sym} ptr (MOVHstore [off] {sym} ptr x _))
  4160  	// result: (MOVHreg x)
  4161  	for {
  4162  		off := auxIntToInt32(v.AuxInt)
  4163  		sym := auxToSym(v.Aux)
  4164  		ptr := v_0
  4165  		if v_1.Op != OpLOONG64MOVHstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  4166  			break
  4167  		}
  4168  		x := v_1.Args[1]
  4169  		if ptr != v_1.Args[0] {
  4170  			break
  4171  		}
  4172  		v.reset(OpLOONG64MOVHreg)
  4173  		v.AddArg(x)
  4174  		return true
  4175  	}
  4176  	// match: (MOVHload [off1] {sym} (ADDVconst [off2] ptr) mem)
  4177  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  4178  	// result: (MOVHload [off1+int32(off2)] {sym} ptr mem)
  4179  	for {
  4180  		off1 := auxIntToInt32(v.AuxInt)
  4181  		sym := auxToSym(v.Aux)
  4182  		if v_0.Op != OpLOONG64ADDVconst {
  4183  			break
  4184  		}
  4185  		off2 := auxIntToInt64(v_0.AuxInt)
  4186  		ptr := v_0.Args[0]
  4187  		mem := v_1
  4188  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4189  			break
  4190  		}
  4191  		v.reset(OpLOONG64MOVHload)
  4192  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4193  		v.Aux = symToAux(sym)
  4194  		v.AddArg2(ptr, mem)
  4195  		return true
  4196  	}
  4197  	// match: (MOVHload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4198  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  4199  	// result: (MOVHload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  4200  	for {
  4201  		off1 := auxIntToInt32(v.AuxInt)
  4202  		sym1 := auxToSym(v.Aux)
  4203  		if v_0.Op != OpLOONG64MOVVaddr {
  4204  			break
  4205  		}
  4206  		off2 := auxIntToInt32(v_0.AuxInt)
  4207  		sym2 := auxToSym(v_0.Aux)
  4208  		ptr := v_0.Args[0]
  4209  		mem := v_1
  4210  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4211  			break
  4212  		}
  4213  		v.reset(OpLOONG64MOVHload)
  4214  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4215  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4216  		v.AddArg2(ptr, mem)
  4217  		return true
  4218  	}
  4219  	// match: (MOVHload [off] {sym} (ADDV ptr idx) mem)
  4220  	// cond: off == 0 && sym == nil
  4221  	// result: (MOVHloadidx ptr idx mem)
  4222  	for {
  4223  		off := auxIntToInt32(v.AuxInt)
  4224  		sym := auxToSym(v.Aux)
  4225  		if v_0.Op != OpLOONG64ADDV {
  4226  			break
  4227  		}
  4228  		idx := v_0.Args[1]
  4229  		ptr := v_0.Args[0]
  4230  		mem := v_1
  4231  		if !(off == 0 && sym == nil) {
  4232  			break
  4233  		}
  4234  		v.reset(OpLOONG64MOVHloadidx)
  4235  		v.AddArg3(ptr, idx, mem)
  4236  		return true
  4237  	}
  4238  	// match: (MOVHload [off] {sym} (ADDshiftLLV [shift] ptr idx) mem)
  4239  	// cond: off == 0 && sym == nil
  4240  	// result: (MOVHloadidx ptr (SLLVconst <typ.Int64> [shift] idx) mem)
  4241  	for {
  4242  		off := auxIntToInt32(v.AuxInt)
  4243  		sym := auxToSym(v.Aux)
  4244  		if v_0.Op != OpLOONG64ADDshiftLLV {
  4245  			break
  4246  		}
  4247  		shift := auxIntToInt64(v_0.AuxInt)
  4248  		idx := v_0.Args[1]
  4249  		ptr := v_0.Args[0]
  4250  		mem := v_1
  4251  		if !(off == 0 && sym == nil) {
  4252  			break
  4253  		}
  4254  		v.reset(OpLOONG64MOVHloadidx)
  4255  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, typ.Int64)
  4256  		v0.AuxInt = int64ToAuxInt(shift)
  4257  		v0.AddArg(idx)
  4258  		v.AddArg3(ptr, v0, mem)
  4259  		return true
  4260  	}
  4261  	// match: (MOVHload [off] {sym} (SB) _)
  4262  	// cond: symIsRO(sym)
  4263  	// result: (MOVVconst [int64(int16(read16(sym, int64(off), config.ctxt.Arch.ByteOrder)))])
  4264  	for {
  4265  		off := auxIntToInt32(v.AuxInt)
  4266  		sym := auxToSym(v.Aux)
  4267  		if v_0.Op != OpSB || !(symIsRO(sym)) {
  4268  			break
  4269  		}
  4270  		v.reset(OpLOONG64MOVVconst)
  4271  		v.AuxInt = int64ToAuxInt(int64(int16(read16(sym, int64(off), config.ctxt.Arch.ByteOrder))))
  4272  		return true
  4273  	}
  4274  	return false
  4275  }
  4276  func rewriteValueLOONG64_OpLOONG64MOVHloadidx(v *Value) bool {
  4277  	v_2 := v.Args[2]
  4278  	v_1 := v.Args[1]
  4279  	v_0 := v.Args[0]
  4280  	// match: (MOVHloadidx ptr (MOVVconst [c]) mem)
  4281  	// cond: is32Bit(c)
  4282  	// result: (MOVHload [int32(c)] ptr mem)
  4283  	for {
  4284  		ptr := v_0
  4285  		if v_1.Op != OpLOONG64MOVVconst {
  4286  			break
  4287  		}
  4288  		c := auxIntToInt64(v_1.AuxInt)
  4289  		mem := v_2
  4290  		if !(is32Bit(c)) {
  4291  			break
  4292  		}
  4293  		v.reset(OpLOONG64MOVHload)
  4294  		v.AuxInt = int32ToAuxInt(int32(c))
  4295  		v.AddArg2(ptr, mem)
  4296  		return true
  4297  	}
  4298  	// match: (MOVHloadidx (MOVVconst [c]) ptr mem)
  4299  	// cond: is32Bit(c)
  4300  	// result: (MOVHload [int32(c)] ptr mem)
  4301  	for {
  4302  		if v_0.Op != OpLOONG64MOVVconst {
  4303  			break
  4304  		}
  4305  		c := auxIntToInt64(v_0.AuxInt)
  4306  		ptr := v_1
  4307  		mem := v_2
  4308  		if !(is32Bit(c)) {
  4309  			break
  4310  		}
  4311  		v.reset(OpLOONG64MOVHload)
  4312  		v.AuxInt = int32ToAuxInt(int32(c))
  4313  		v.AddArg2(ptr, mem)
  4314  		return true
  4315  	}
  4316  	return false
  4317  }
  4318  func rewriteValueLOONG64_OpLOONG64MOVHreg(v *Value) bool {
  4319  	v_0 := v.Args[0]
  4320  	// match: (MOVHreg x:(MOVBload _ _))
  4321  	// result: (MOVVreg x)
  4322  	for {
  4323  		x := v_0
  4324  		if x.Op != OpLOONG64MOVBload {
  4325  			break
  4326  		}
  4327  		v.reset(OpLOONG64MOVVreg)
  4328  		v.AddArg(x)
  4329  		return true
  4330  	}
  4331  	// match: (MOVHreg x:(MOVBUload _ _))
  4332  	// result: (MOVVreg x)
  4333  	for {
  4334  		x := v_0
  4335  		if x.Op != OpLOONG64MOVBUload {
  4336  			break
  4337  		}
  4338  		v.reset(OpLOONG64MOVVreg)
  4339  		v.AddArg(x)
  4340  		return true
  4341  	}
  4342  	// match: (MOVHreg x:(MOVHload _ _))
  4343  	// result: (MOVVreg x)
  4344  	for {
  4345  		x := v_0
  4346  		if x.Op != OpLOONG64MOVHload {
  4347  			break
  4348  		}
  4349  		v.reset(OpLOONG64MOVVreg)
  4350  		v.AddArg(x)
  4351  		return true
  4352  	}
  4353  	// match: (MOVHreg x:(MOVBloadidx _ _ _))
  4354  	// result: (MOVVreg x)
  4355  	for {
  4356  		x := v_0
  4357  		if x.Op != OpLOONG64MOVBloadidx {
  4358  			break
  4359  		}
  4360  		v.reset(OpLOONG64MOVVreg)
  4361  		v.AddArg(x)
  4362  		return true
  4363  	}
  4364  	// match: (MOVHreg x:(MOVBUloadidx _ _ _))
  4365  	// result: (MOVVreg x)
  4366  	for {
  4367  		x := v_0
  4368  		if x.Op != OpLOONG64MOVBUloadidx {
  4369  			break
  4370  		}
  4371  		v.reset(OpLOONG64MOVVreg)
  4372  		v.AddArg(x)
  4373  		return true
  4374  	}
  4375  	// match: (MOVHreg x:(MOVHloadidx _ _ _))
  4376  	// result: (MOVVreg x)
  4377  	for {
  4378  		x := v_0
  4379  		if x.Op != OpLOONG64MOVHloadidx {
  4380  			break
  4381  		}
  4382  		v.reset(OpLOONG64MOVVreg)
  4383  		v.AddArg(x)
  4384  		return true
  4385  	}
  4386  	// match: (MOVHreg x:(MOVBreg _))
  4387  	// result: (MOVVreg x)
  4388  	for {
  4389  		x := v_0
  4390  		if x.Op != OpLOONG64MOVBreg {
  4391  			break
  4392  		}
  4393  		v.reset(OpLOONG64MOVVreg)
  4394  		v.AddArg(x)
  4395  		return true
  4396  	}
  4397  	// match: (MOVHreg x:(MOVBUreg _))
  4398  	// result: (MOVVreg x)
  4399  	for {
  4400  		x := v_0
  4401  		if x.Op != OpLOONG64MOVBUreg {
  4402  			break
  4403  		}
  4404  		v.reset(OpLOONG64MOVVreg)
  4405  		v.AddArg(x)
  4406  		return true
  4407  	}
  4408  	// match: (MOVHreg x:(MOVHreg _))
  4409  	// result: (MOVVreg x)
  4410  	for {
  4411  		x := v_0
  4412  		if x.Op != OpLOONG64MOVHreg {
  4413  			break
  4414  		}
  4415  		v.reset(OpLOONG64MOVVreg)
  4416  		v.AddArg(x)
  4417  		return true
  4418  	}
  4419  	// match: (MOVHreg (MOVVconst [c]))
  4420  	// result: (MOVVconst [int64(int16(c))])
  4421  	for {
  4422  		if v_0.Op != OpLOONG64MOVVconst {
  4423  			break
  4424  		}
  4425  		c := auxIntToInt64(v_0.AuxInt)
  4426  		v.reset(OpLOONG64MOVVconst)
  4427  		v.AuxInt = int64ToAuxInt(int64(int16(c)))
  4428  		return true
  4429  	}
  4430  	// match: (MOVHreg x:(ANDconst [c] y))
  4431  	// cond: c >= 0 && int64(int16(c)) == c
  4432  	// result: x
  4433  	for {
  4434  		x := v_0
  4435  		if x.Op != OpLOONG64ANDconst {
  4436  			break
  4437  		}
  4438  		c := auxIntToInt64(x.AuxInt)
  4439  		if !(c >= 0 && int64(int16(c)) == c) {
  4440  			break
  4441  		}
  4442  		v.copyOf(x)
  4443  		return true
  4444  	}
  4445  	return false
  4446  }
  4447  func rewriteValueLOONG64_OpLOONG64MOVHstore(v *Value) bool {
  4448  	v_2 := v.Args[2]
  4449  	v_1 := v.Args[1]
  4450  	v_0 := v.Args[0]
  4451  	b := v.Block
  4452  	config := b.Func.Config
  4453  	typ := &b.Func.Config.Types
  4454  	// match: (MOVHstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  4455  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  4456  	// result: (MOVHstore [off1+int32(off2)] {sym} ptr val mem)
  4457  	for {
  4458  		off1 := auxIntToInt32(v.AuxInt)
  4459  		sym := auxToSym(v.Aux)
  4460  		if v_0.Op != OpLOONG64ADDVconst {
  4461  			break
  4462  		}
  4463  		off2 := auxIntToInt64(v_0.AuxInt)
  4464  		ptr := v_0.Args[0]
  4465  		val := v_1
  4466  		mem := v_2
  4467  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4468  			break
  4469  		}
  4470  		v.reset(OpLOONG64MOVHstore)
  4471  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4472  		v.Aux = symToAux(sym)
  4473  		v.AddArg3(ptr, val, mem)
  4474  		return true
  4475  	}
  4476  	// match: (MOVHstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  4477  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  4478  	// result: (MOVHstore [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr val mem)
  4479  	for {
  4480  		off1 := auxIntToInt32(v.AuxInt)
  4481  		sym1 := auxToSym(v.Aux)
  4482  		if v_0.Op != OpLOONG64MOVVaddr {
  4483  			break
  4484  		}
  4485  		off2 := auxIntToInt32(v_0.AuxInt)
  4486  		sym2 := auxToSym(v_0.Aux)
  4487  		ptr := v_0.Args[0]
  4488  		val := v_1
  4489  		mem := v_2
  4490  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4491  			break
  4492  		}
  4493  		v.reset(OpLOONG64MOVHstore)
  4494  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4495  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4496  		v.AddArg3(ptr, val, mem)
  4497  		return true
  4498  	}
  4499  	// match: (MOVHstore [off] {sym} ptr (MOVHreg x) mem)
  4500  	// result: (MOVHstore [off] {sym} ptr x mem)
  4501  	for {
  4502  		off := auxIntToInt32(v.AuxInt)
  4503  		sym := auxToSym(v.Aux)
  4504  		ptr := v_0
  4505  		if v_1.Op != OpLOONG64MOVHreg {
  4506  			break
  4507  		}
  4508  		x := v_1.Args[0]
  4509  		mem := v_2
  4510  		v.reset(OpLOONG64MOVHstore)
  4511  		v.AuxInt = int32ToAuxInt(off)
  4512  		v.Aux = symToAux(sym)
  4513  		v.AddArg3(ptr, x, mem)
  4514  		return true
  4515  	}
  4516  	// match: (MOVHstore [off] {sym} ptr (MOVHUreg x) mem)
  4517  	// result: (MOVHstore [off] {sym} ptr x mem)
  4518  	for {
  4519  		off := auxIntToInt32(v.AuxInt)
  4520  		sym := auxToSym(v.Aux)
  4521  		ptr := v_0
  4522  		if v_1.Op != OpLOONG64MOVHUreg {
  4523  			break
  4524  		}
  4525  		x := v_1.Args[0]
  4526  		mem := v_2
  4527  		v.reset(OpLOONG64MOVHstore)
  4528  		v.AuxInt = int32ToAuxInt(off)
  4529  		v.Aux = symToAux(sym)
  4530  		v.AddArg3(ptr, x, mem)
  4531  		return true
  4532  	}
  4533  	// match: (MOVHstore [off] {sym} ptr (MOVWreg x) mem)
  4534  	// result: (MOVHstore [off] {sym} ptr x mem)
  4535  	for {
  4536  		off := auxIntToInt32(v.AuxInt)
  4537  		sym := auxToSym(v.Aux)
  4538  		ptr := v_0
  4539  		if v_1.Op != OpLOONG64MOVWreg {
  4540  			break
  4541  		}
  4542  		x := v_1.Args[0]
  4543  		mem := v_2
  4544  		v.reset(OpLOONG64MOVHstore)
  4545  		v.AuxInt = int32ToAuxInt(off)
  4546  		v.Aux = symToAux(sym)
  4547  		v.AddArg3(ptr, x, mem)
  4548  		return true
  4549  	}
  4550  	// match: (MOVHstore [off] {sym} ptr (MOVWUreg x) mem)
  4551  	// result: (MOVHstore [off] {sym} ptr x mem)
  4552  	for {
  4553  		off := auxIntToInt32(v.AuxInt)
  4554  		sym := auxToSym(v.Aux)
  4555  		ptr := v_0
  4556  		if v_1.Op != OpLOONG64MOVWUreg {
  4557  			break
  4558  		}
  4559  		x := v_1.Args[0]
  4560  		mem := v_2
  4561  		v.reset(OpLOONG64MOVHstore)
  4562  		v.AuxInt = int32ToAuxInt(off)
  4563  		v.Aux = symToAux(sym)
  4564  		v.AddArg3(ptr, x, mem)
  4565  		return true
  4566  	}
  4567  	// match: (MOVHstore [off] {sym} (ADDV ptr idx) val mem)
  4568  	// cond: off == 0 && sym == nil
  4569  	// result: (MOVHstoreidx ptr idx val mem)
  4570  	for {
  4571  		off := auxIntToInt32(v.AuxInt)
  4572  		sym := auxToSym(v.Aux)
  4573  		if v_0.Op != OpLOONG64ADDV {
  4574  			break
  4575  		}
  4576  		idx := v_0.Args[1]
  4577  		ptr := v_0.Args[0]
  4578  		val := v_1
  4579  		mem := v_2
  4580  		if !(off == 0 && sym == nil) {
  4581  			break
  4582  		}
  4583  		v.reset(OpLOONG64MOVHstoreidx)
  4584  		v.AddArg4(ptr, idx, val, mem)
  4585  		return true
  4586  	}
  4587  	// match: (MOVHstore [off] {sym} (ADDshiftLLV [shift] ptr idx) val mem)
  4588  	// cond: off == 0 && sym == nil
  4589  	// result: (MOVHstoreidx ptr (SLLVconst <typ.Int64> [shift] idx) val mem)
  4590  	for {
  4591  		off := auxIntToInt32(v.AuxInt)
  4592  		sym := auxToSym(v.Aux)
  4593  		if v_0.Op != OpLOONG64ADDshiftLLV {
  4594  			break
  4595  		}
  4596  		shift := auxIntToInt64(v_0.AuxInt)
  4597  		idx := v_0.Args[1]
  4598  		ptr := v_0.Args[0]
  4599  		val := v_1
  4600  		mem := v_2
  4601  		if !(off == 0 && sym == nil) {
  4602  			break
  4603  		}
  4604  		v.reset(OpLOONG64MOVHstoreidx)
  4605  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, typ.Int64)
  4606  		v0.AuxInt = int64ToAuxInt(shift)
  4607  		v0.AddArg(idx)
  4608  		v.AddArg4(ptr, v0, val, mem)
  4609  		return true
  4610  	}
  4611  	return false
  4612  }
  4613  func rewriteValueLOONG64_OpLOONG64MOVHstoreidx(v *Value) bool {
  4614  	v_3 := v.Args[3]
  4615  	v_2 := v.Args[2]
  4616  	v_1 := v.Args[1]
  4617  	v_0 := v.Args[0]
  4618  	// match: (MOVHstoreidx ptr (MOVVconst [c]) val mem)
  4619  	// cond: is32Bit(c)
  4620  	// result: (MOVHstore [int32(c)] ptr val mem)
  4621  	for {
  4622  		ptr := v_0
  4623  		if v_1.Op != OpLOONG64MOVVconst {
  4624  			break
  4625  		}
  4626  		c := auxIntToInt64(v_1.AuxInt)
  4627  		val := v_2
  4628  		mem := v_3
  4629  		if !(is32Bit(c)) {
  4630  			break
  4631  		}
  4632  		v.reset(OpLOONG64MOVHstore)
  4633  		v.AuxInt = int32ToAuxInt(int32(c))
  4634  		v.AddArg3(ptr, val, mem)
  4635  		return true
  4636  	}
  4637  	// match: (MOVHstoreidx (MOVVconst [c]) idx val mem)
  4638  	// cond: is32Bit(c)
  4639  	// result: (MOVHstore [int32(c)] idx val mem)
  4640  	for {
  4641  		if v_0.Op != OpLOONG64MOVVconst {
  4642  			break
  4643  		}
  4644  		c := auxIntToInt64(v_0.AuxInt)
  4645  		idx := v_1
  4646  		val := v_2
  4647  		mem := v_3
  4648  		if !(is32Bit(c)) {
  4649  			break
  4650  		}
  4651  		v.reset(OpLOONG64MOVHstore)
  4652  		v.AuxInt = int32ToAuxInt(int32(c))
  4653  		v.AddArg3(idx, val, mem)
  4654  		return true
  4655  	}
  4656  	return false
  4657  }
  4658  func rewriteValueLOONG64_OpLOONG64MOVVload(v *Value) bool {
  4659  	v_1 := v.Args[1]
  4660  	v_0 := v.Args[0]
  4661  	b := v.Block
  4662  	config := b.Func.Config
  4663  	typ := &b.Func.Config.Types
  4664  	// match: (MOVVload [off] {sym} ptr (MOVDstore [off] {sym} ptr val _))
  4665  	// result: (MOVVfpgp val)
  4666  	for {
  4667  		off := auxIntToInt32(v.AuxInt)
  4668  		sym := auxToSym(v.Aux)
  4669  		ptr := v_0
  4670  		if v_1.Op != OpLOONG64MOVDstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  4671  			break
  4672  		}
  4673  		val := v_1.Args[1]
  4674  		if ptr != v_1.Args[0] {
  4675  			break
  4676  		}
  4677  		v.reset(OpLOONG64MOVVfpgp)
  4678  		v.AddArg(val)
  4679  		return true
  4680  	}
  4681  	// match: (MOVVload [off] {sym} ptr (MOVVstore [off] {sym} ptr x _))
  4682  	// result: (MOVVreg x)
  4683  	for {
  4684  		off := auxIntToInt32(v.AuxInt)
  4685  		sym := auxToSym(v.Aux)
  4686  		ptr := v_0
  4687  		if v_1.Op != OpLOONG64MOVVstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  4688  			break
  4689  		}
  4690  		x := v_1.Args[1]
  4691  		if ptr != v_1.Args[0] {
  4692  			break
  4693  		}
  4694  		v.reset(OpLOONG64MOVVreg)
  4695  		v.AddArg(x)
  4696  		return true
  4697  	}
  4698  	// match: (MOVVload [off1] {sym} (ADDVconst [off2] ptr) mem)
  4699  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  4700  	// result: (MOVVload [off1+int32(off2)] {sym} ptr mem)
  4701  	for {
  4702  		off1 := auxIntToInt32(v.AuxInt)
  4703  		sym := auxToSym(v.Aux)
  4704  		if v_0.Op != OpLOONG64ADDVconst {
  4705  			break
  4706  		}
  4707  		off2 := auxIntToInt64(v_0.AuxInt)
  4708  		ptr := v_0.Args[0]
  4709  		mem := v_1
  4710  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4711  			break
  4712  		}
  4713  		v.reset(OpLOONG64MOVVload)
  4714  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4715  		v.Aux = symToAux(sym)
  4716  		v.AddArg2(ptr, mem)
  4717  		return true
  4718  	}
  4719  	// match: (MOVVload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4720  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  4721  	// result: (MOVVload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  4722  	for {
  4723  		off1 := auxIntToInt32(v.AuxInt)
  4724  		sym1 := auxToSym(v.Aux)
  4725  		if v_0.Op != OpLOONG64MOVVaddr {
  4726  			break
  4727  		}
  4728  		off2 := auxIntToInt32(v_0.AuxInt)
  4729  		sym2 := auxToSym(v_0.Aux)
  4730  		ptr := v_0.Args[0]
  4731  		mem := v_1
  4732  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4733  			break
  4734  		}
  4735  		v.reset(OpLOONG64MOVVload)
  4736  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4737  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4738  		v.AddArg2(ptr, mem)
  4739  		return true
  4740  	}
  4741  	// match: (MOVVload [off] {sym} (ADDV ptr idx) mem)
  4742  	// cond: off == 0 && sym == nil
  4743  	// result: (MOVVloadidx ptr idx mem)
  4744  	for {
  4745  		off := auxIntToInt32(v.AuxInt)
  4746  		sym := auxToSym(v.Aux)
  4747  		if v_0.Op != OpLOONG64ADDV {
  4748  			break
  4749  		}
  4750  		idx := v_0.Args[1]
  4751  		ptr := v_0.Args[0]
  4752  		mem := v_1
  4753  		if !(off == 0 && sym == nil) {
  4754  			break
  4755  		}
  4756  		v.reset(OpLOONG64MOVVloadidx)
  4757  		v.AddArg3(ptr, idx, mem)
  4758  		return true
  4759  	}
  4760  	// match: (MOVVload [off] {sym} (ADDshiftLLV [shift] ptr idx) mem)
  4761  	// cond: off == 0 && sym == nil
  4762  	// result: (MOVVloadidx ptr (SLLVconst <typ.Int64> [shift] idx) mem)
  4763  	for {
  4764  		off := auxIntToInt32(v.AuxInt)
  4765  		sym := auxToSym(v.Aux)
  4766  		if v_0.Op != OpLOONG64ADDshiftLLV {
  4767  			break
  4768  		}
  4769  		shift := auxIntToInt64(v_0.AuxInt)
  4770  		idx := v_0.Args[1]
  4771  		ptr := v_0.Args[0]
  4772  		mem := v_1
  4773  		if !(off == 0 && sym == nil) {
  4774  			break
  4775  		}
  4776  		v.reset(OpLOONG64MOVVloadidx)
  4777  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, typ.Int64)
  4778  		v0.AuxInt = int64ToAuxInt(shift)
  4779  		v0.AddArg(idx)
  4780  		v.AddArg3(ptr, v0, mem)
  4781  		return true
  4782  	}
  4783  	// match: (MOVVload [off] {sym} (SB) _)
  4784  	// cond: symIsRO(sym)
  4785  	// result: (MOVVconst [int64(read64(sym, int64(off), config.ctxt.Arch.ByteOrder))])
  4786  	for {
  4787  		off := auxIntToInt32(v.AuxInt)
  4788  		sym := auxToSym(v.Aux)
  4789  		if v_0.Op != OpSB || !(symIsRO(sym)) {
  4790  			break
  4791  		}
  4792  		v.reset(OpLOONG64MOVVconst)
  4793  		v.AuxInt = int64ToAuxInt(int64(read64(sym, int64(off), config.ctxt.Arch.ByteOrder)))
  4794  		return true
  4795  	}
  4796  	return false
  4797  }
  4798  func rewriteValueLOONG64_OpLOONG64MOVVloadidx(v *Value) bool {
  4799  	v_2 := v.Args[2]
  4800  	v_1 := v.Args[1]
  4801  	v_0 := v.Args[0]
  4802  	// match: (MOVVloadidx ptr (MOVVconst [c]) mem)
  4803  	// cond: is32Bit(c)
  4804  	// result: (MOVVload [int32(c)] ptr mem)
  4805  	for {
  4806  		ptr := v_0
  4807  		if v_1.Op != OpLOONG64MOVVconst {
  4808  			break
  4809  		}
  4810  		c := auxIntToInt64(v_1.AuxInt)
  4811  		mem := v_2
  4812  		if !(is32Bit(c)) {
  4813  			break
  4814  		}
  4815  		v.reset(OpLOONG64MOVVload)
  4816  		v.AuxInt = int32ToAuxInt(int32(c))
  4817  		v.AddArg2(ptr, mem)
  4818  		return true
  4819  	}
  4820  	// match: (MOVVloadidx (MOVVconst [c]) ptr mem)
  4821  	// cond: is32Bit(c)
  4822  	// result: (MOVVload [int32(c)] ptr mem)
  4823  	for {
  4824  		if v_0.Op != OpLOONG64MOVVconst {
  4825  			break
  4826  		}
  4827  		c := auxIntToInt64(v_0.AuxInt)
  4828  		ptr := v_1
  4829  		mem := v_2
  4830  		if !(is32Bit(c)) {
  4831  			break
  4832  		}
  4833  		v.reset(OpLOONG64MOVVload)
  4834  		v.AuxInt = int32ToAuxInt(int32(c))
  4835  		v.AddArg2(ptr, mem)
  4836  		return true
  4837  	}
  4838  	return false
  4839  }
  4840  func rewriteValueLOONG64_OpLOONG64MOVVnop(v *Value) bool {
  4841  	v_0 := v.Args[0]
  4842  	// match: (MOVVnop (MOVVconst [c]))
  4843  	// result: (MOVVconst [c])
  4844  	for {
  4845  		if v_0.Op != OpLOONG64MOVVconst {
  4846  			break
  4847  		}
  4848  		c := auxIntToInt64(v_0.AuxInt)
  4849  		v.reset(OpLOONG64MOVVconst)
  4850  		v.AuxInt = int64ToAuxInt(c)
  4851  		return true
  4852  	}
  4853  	return false
  4854  }
  4855  func rewriteValueLOONG64_OpLOONG64MOVVreg(v *Value) bool {
  4856  	v_0 := v.Args[0]
  4857  	// match: (MOVVreg x)
  4858  	// cond: x.Uses == 1
  4859  	// result: (MOVVnop x)
  4860  	for {
  4861  		x := v_0
  4862  		if !(x.Uses == 1) {
  4863  			break
  4864  		}
  4865  		v.reset(OpLOONG64MOVVnop)
  4866  		v.AddArg(x)
  4867  		return true
  4868  	}
  4869  	// match: (MOVVreg (MOVVconst [c]))
  4870  	// result: (MOVVconst [c])
  4871  	for {
  4872  		if v_0.Op != OpLOONG64MOVVconst {
  4873  			break
  4874  		}
  4875  		c := auxIntToInt64(v_0.AuxInt)
  4876  		v.reset(OpLOONG64MOVVconst)
  4877  		v.AuxInt = int64ToAuxInt(c)
  4878  		return true
  4879  	}
  4880  	return false
  4881  }
  4882  func rewriteValueLOONG64_OpLOONG64MOVVstore(v *Value) bool {
  4883  	v_2 := v.Args[2]
  4884  	v_1 := v.Args[1]
  4885  	v_0 := v.Args[0]
  4886  	b := v.Block
  4887  	config := b.Func.Config
  4888  	typ := &b.Func.Config.Types
  4889  	// match: (MOVVstore [off] {sym} ptr (MOVVfpgp val) mem)
  4890  	// result: (MOVDstore [off] {sym} ptr val mem)
  4891  	for {
  4892  		off := auxIntToInt32(v.AuxInt)
  4893  		sym := auxToSym(v.Aux)
  4894  		ptr := v_0
  4895  		if v_1.Op != OpLOONG64MOVVfpgp {
  4896  			break
  4897  		}
  4898  		val := v_1.Args[0]
  4899  		mem := v_2
  4900  		v.reset(OpLOONG64MOVDstore)
  4901  		v.AuxInt = int32ToAuxInt(off)
  4902  		v.Aux = symToAux(sym)
  4903  		v.AddArg3(ptr, val, mem)
  4904  		return true
  4905  	}
  4906  	// match: (MOVVstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  4907  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  4908  	// result: (MOVVstore [off1+int32(off2)] {sym} ptr val mem)
  4909  	for {
  4910  		off1 := auxIntToInt32(v.AuxInt)
  4911  		sym := auxToSym(v.Aux)
  4912  		if v_0.Op != OpLOONG64ADDVconst {
  4913  			break
  4914  		}
  4915  		off2 := auxIntToInt64(v_0.AuxInt)
  4916  		ptr := v_0.Args[0]
  4917  		val := v_1
  4918  		mem := v_2
  4919  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4920  			break
  4921  		}
  4922  		v.reset(OpLOONG64MOVVstore)
  4923  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4924  		v.Aux = symToAux(sym)
  4925  		v.AddArg3(ptr, val, mem)
  4926  		return true
  4927  	}
  4928  	// match: (MOVVstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  4929  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  4930  	// result: (MOVVstore [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr val mem)
  4931  	for {
  4932  		off1 := auxIntToInt32(v.AuxInt)
  4933  		sym1 := auxToSym(v.Aux)
  4934  		if v_0.Op != OpLOONG64MOVVaddr {
  4935  			break
  4936  		}
  4937  		off2 := auxIntToInt32(v_0.AuxInt)
  4938  		sym2 := auxToSym(v_0.Aux)
  4939  		ptr := v_0.Args[0]
  4940  		val := v_1
  4941  		mem := v_2
  4942  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4943  			break
  4944  		}
  4945  		v.reset(OpLOONG64MOVVstore)
  4946  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4947  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4948  		v.AddArg3(ptr, val, mem)
  4949  		return true
  4950  	}
  4951  	// match: (MOVVstore [off] {sym} (ADDV ptr idx) val mem)
  4952  	// cond: off == 0 && sym == nil
  4953  	// result: (MOVVstoreidx ptr idx val mem)
  4954  	for {
  4955  		off := auxIntToInt32(v.AuxInt)
  4956  		sym := auxToSym(v.Aux)
  4957  		if v_0.Op != OpLOONG64ADDV {
  4958  			break
  4959  		}
  4960  		idx := v_0.Args[1]
  4961  		ptr := v_0.Args[0]
  4962  		val := v_1
  4963  		mem := v_2
  4964  		if !(off == 0 && sym == nil) {
  4965  			break
  4966  		}
  4967  		v.reset(OpLOONG64MOVVstoreidx)
  4968  		v.AddArg4(ptr, idx, val, mem)
  4969  		return true
  4970  	}
  4971  	// match: (MOVVstore [off] {sym} (ADDshiftLLV [shift] ptr idx) val mem)
  4972  	// cond: off == 0 && sym == nil
  4973  	// result: (MOVVstoreidx ptr (SLLVconst <typ.Int64> [shift] idx) val mem)
  4974  	for {
  4975  		off := auxIntToInt32(v.AuxInt)
  4976  		sym := auxToSym(v.Aux)
  4977  		if v_0.Op != OpLOONG64ADDshiftLLV {
  4978  			break
  4979  		}
  4980  		shift := auxIntToInt64(v_0.AuxInt)
  4981  		idx := v_0.Args[1]
  4982  		ptr := v_0.Args[0]
  4983  		val := v_1
  4984  		mem := v_2
  4985  		if !(off == 0 && sym == nil) {
  4986  			break
  4987  		}
  4988  		v.reset(OpLOONG64MOVVstoreidx)
  4989  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, typ.Int64)
  4990  		v0.AuxInt = int64ToAuxInt(shift)
  4991  		v0.AddArg(idx)
  4992  		v.AddArg4(ptr, v0, val, mem)
  4993  		return true
  4994  	}
  4995  	return false
  4996  }
  4997  func rewriteValueLOONG64_OpLOONG64MOVVstoreidx(v *Value) bool {
  4998  	v_3 := v.Args[3]
  4999  	v_2 := v.Args[2]
  5000  	v_1 := v.Args[1]
  5001  	v_0 := v.Args[0]
  5002  	// match: (MOVVstoreidx ptr (MOVVconst [c]) val mem)
  5003  	// cond: is32Bit(c)
  5004  	// result: (MOVVstore [int32(c)] ptr val mem)
  5005  	for {
  5006  		ptr := v_0
  5007  		if v_1.Op != OpLOONG64MOVVconst {
  5008  			break
  5009  		}
  5010  		c := auxIntToInt64(v_1.AuxInt)
  5011  		val := v_2
  5012  		mem := v_3
  5013  		if !(is32Bit(c)) {
  5014  			break
  5015  		}
  5016  		v.reset(OpLOONG64MOVVstore)
  5017  		v.AuxInt = int32ToAuxInt(int32(c))
  5018  		v.AddArg3(ptr, val, mem)
  5019  		return true
  5020  	}
  5021  	// match: (MOVVstoreidx (MOVVconst [c]) idx val mem)
  5022  	// cond: is32Bit(c)
  5023  	// result: (MOVVstore [int32(c)] idx val mem)
  5024  	for {
  5025  		if v_0.Op != OpLOONG64MOVVconst {
  5026  			break
  5027  		}
  5028  		c := auxIntToInt64(v_0.AuxInt)
  5029  		idx := v_1
  5030  		val := v_2
  5031  		mem := v_3
  5032  		if !(is32Bit(c)) {
  5033  			break
  5034  		}
  5035  		v.reset(OpLOONG64MOVVstore)
  5036  		v.AuxInt = int32ToAuxInt(int32(c))
  5037  		v.AddArg3(idx, val, mem)
  5038  		return true
  5039  	}
  5040  	return false
  5041  }
  5042  func rewriteValueLOONG64_OpLOONG64MOVWUload(v *Value) bool {
  5043  	v_1 := v.Args[1]
  5044  	v_0 := v.Args[0]
  5045  	b := v.Block
  5046  	config := b.Func.Config
  5047  	typ := &b.Func.Config.Types
  5048  	// match: (MOVWUload [off] {sym} ptr (MOVFstore [off] {sym} ptr val _))
  5049  	// result: (ZeroExt32to64 (MOVWfpgp <typ.Float32> val))
  5050  	for {
  5051  		off := auxIntToInt32(v.AuxInt)
  5052  		sym := auxToSym(v.Aux)
  5053  		ptr := v_0
  5054  		if v_1.Op != OpLOONG64MOVFstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  5055  			break
  5056  		}
  5057  		val := v_1.Args[1]
  5058  		if ptr != v_1.Args[0] {
  5059  			break
  5060  		}
  5061  		v.reset(OpZeroExt32to64)
  5062  		v0 := b.NewValue0(v_1.Pos, OpLOONG64MOVWfpgp, typ.Float32)
  5063  		v0.AddArg(val)
  5064  		v.AddArg(v0)
  5065  		return true
  5066  	}
  5067  	// match: (MOVWUload [off] {sym} ptr (MOVWstore [off] {sym} ptr x _))
  5068  	// result: (MOVWUreg x)
  5069  	for {
  5070  		off := auxIntToInt32(v.AuxInt)
  5071  		sym := auxToSym(v.Aux)
  5072  		ptr := v_0
  5073  		if v_1.Op != OpLOONG64MOVWstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  5074  			break
  5075  		}
  5076  		x := v_1.Args[1]
  5077  		if ptr != v_1.Args[0] {
  5078  			break
  5079  		}
  5080  		v.reset(OpLOONG64MOVWUreg)
  5081  		v.AddArg(x)
  5082  		return true
  5083  	}
  5084  	// match: (MOVWUload [off1] {sym} (ADDVconst [off2] ptr) mem)
  5085  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  5086  	// result: (MOVWUload [off1+int32(off2)] {sym} ptr mem)
  5087  	for {
  5088  		off1 := auxIntToInt32(v.AuxInt)
  5089  		sym := auxToSym(v.Aux)
  5090  		if v_0.Op != OpLOONG64ADDVconst {
  5091  			break
  5092  		}
  5093  		off2 := auxIntToInt64(v_0.AuxInt)
  5094  		ptr := v_0.Args[0]
  5095  		mem := v_1
  5096  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  5097  			break
  5098  		}
  5099  		v.reset(OpLOONG64MOVWUload)
  5100  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5101  		v.Aux = symToAux(sym)
  5102  		v.AddArg2(ptr, mem)
  5103  		return true
  5104  	}
  5105  	// match: (MOVWUload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  5106  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  5107  	// result: (MOVWUload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  5108  	for {
  5109  		off1 := auxIntToInt32(v.AuxInt)
  5110  		sym1 := auxToSym(v.Aux)
  5111  		if v_0.Op != OpLOONG64MOVVaddr {
  5112  			break
  5113  		}
  5114  		off2 := auxIntToInt32(v_0.AuxInt)
  5115  		sym2 := auxToSym(v_0.Aux)
  5116  		ptr := v_0.Args[0]
  5117  		mem := v_1
  5118  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  5119  			break
  5120  		}
  5121  		v.reset(OpLOONG64MOVWUload)
  5122  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5123  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5124  		v.AddArg2(ptr, mem)
  5125  		return true
  5126  	}
  5127  	// match: (MOVWUload [off] {sym} (ADDV ptr idx) mem)
  5128  	// cond: off == 0 && sym == nil
  5129  	// result: (MOVWUloadidx ptr idx mem)
  5130  	for {
  5131  		off := auxIntToInt32(v.AuxInt)
  5132  		sym := auxToSym(v.Aux)
  5133  		if v_0.Op != OpLOONG64ADDV {
  5134  			break
  5135  		}
  5136  		idx := v_0.Args[1]
  5137  		ptr := v_0.Args[0]
  5138  		mem := v_1
  5139  		if !(off == 0 && sym == nil) {
  5140  			break
  5141  		}
  5142  		v.reset(OpLOONG64MOVWUloadidx)
  5143  		v.AddArg3(ptr, idx, mem)
  5144  		return true
  5145  	}
  5146  	// match: (MOVWUload [off] {sym} (ADDshiftLLV [shift] ptr idx) mem)
  5147  	// cond: off == 0 && sym == nil
  5148  	// result: (MOVWUloadidx ptr (SLLVconst <typ.Int64> [shift] idx) mem)
  5149  	for {
  5150  		off := auxIntToInt32(v.AuxInt)
  5151  		sym := auxToSym(v.Aux)
  5152  		if v_0.Op != OpLOONG64ADDshiftLLV {
  5153  			break
  5154  		}
  5155  		shift := auxIntToInt64(v_0.AuxInt)
  5156  		idx := v_0.Args[1]
  5157  		ptr := v_0.Args[0]
  5158  		mem := v_1
  5159  		if !(off == 0 && sym == nil) {
  5160  			break
  5161  		}
  5162  		v.reset(OpLOONG64MOVWUloadidx)
  5163  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, typ.Int64)
  5164  		v0.AuxInt = int64ToAuxInt(shift)
  5165  		v0.AddArg(idx)
  5166  		v.AddArg3(ptr, v0, mem)
  5167  		return true
  5168  	}
  5169  	// match: (MOVWUload [off] {sym} (SB) _)
  5170  	// cond: symIsRO(sym)
  5171  	// result: (MOVVconst [int64(read32(sym, int64(off), config.ctxt.Arch.ByteOrder))])
  5172  	for {
  5173  		off := auxIntToInt32(v.AuxInt)
  5174  		sym := auxToSym(v.Aux)
  5175  		if v_0.Op != OpSB || !(symIsRO(sym)) {
  5176  			break
  5177  		}
  5178  		v.reset(OpLOONG64MOVVconst)
  5179  		v.AuxInt = int64ToAuxInt(int64(read32(sym, int64(off), config.ctxt.Arch.ByteOrder)))
  5180  		return true
  5181  	}
  5182  	return false
  5183  }
  5184  func rewriteValueLOONG64_OpLOONG64MOVWUloadidx(v *Value) bool {
  5185  	v_2 := v.Args[2]
  5186  	v_1 := v.Args[1]
  5187  	v_0 := v.Args[0]
  5188  	// match: (MOVWUloadidx ptr (MOVVconst [c]) mem)
  5189  	// cond: is32Bit(c)
  5190  	// result: (MOVWUload [int32(c)] ptr mem)
  5191  	for {
  5192  		ptr := v_0
  5193  		if v_1.Op != OpLOONG64MOVVconst {
  5194  			break
  5195  		}
  5196  		c := auxIntToInt64(v_1.AuxInt)
  5197  		mem := v_2
  5198  		if !(is32Bit(c)) {
  5199  			break
  5200  		}
  5201  		v.reset(OpLOONG64MOVWUload)
  5202  		v.AuxInt = int32ToAuxInt(int32(c))
  5203  		v.AddArg2(ptr, mem)
  5204  		return true
  5205  	}
  5206  	// match: (MOVWUloadidx (MOVVconst [c]) ptr mem)
  5207  	// cond: is32Bit(c)
  5208  	// result: (MOVWUload [int32(c)] ptr mem)
  5209  	for {
  5210  		if v_0.Op != OpLOONG64MOVVconst {
  5211  			break
  5212  		}
  5213  		c := auxIntToInt64(v_0.AuxInt)
  5214  		ptr := v_1
  5215  		mem := v_2
  5216  		if !(is32Bit(c)) {
  5217  			break
  5218  		}
  5219  		v.reset(OpLOONG64MOVWUload)
  5220  		v.AuxInt = int32ToAuxInt(int32(c))
  5221  		v.AddArg2(ptr, mem)
  5222  		return true
  5223  	}
  5224  	return false
  5225  }
  5226  func rewriteValueLOONG64_OpLOONG64MOVWUreg(v *Value) bool {
  5227  	v_0 := v.Args[0]
  5228  	// match: (MOVWUreg (SRLVconst [rc] x))
  5229  	// cond: rc < 32
  5230  	// result: (BSTRPICKV [rc + (31+rc)<<6] x)
  5231  	for {
  5232  		if v_0.Op != OpLOONG64SRLVconst {
  5233  			break
  5234  		}
  5235  		rc := auxIntToInt64(v_0.AuxInt)
  5236  		x := v_0.Args[0]
  5237  		if !(rc < 32) {
  5238  			break
  5239  		}
  5240  		v.reset(OpLOONG64BSTRPICKV)
  5241  		v.AuxInt = int64ToAuxInt(rc + (31+rc)<<6)
  5242  		v.AddArg(x)
  5243  		return true
  5244  	}
  5245  	// match: (MOVWUreg x:(MOVBUload _ _))
  5246  	// result: (MOVVreg x)
  5247  	for {
  5248  		x := v_0
  5249  		if x.Op != OpLOONG64MOVBUload {
  5250  			break
  5251  		}
  5252  		v.reset(OpLOONG64MOVVreg)
  5253  		v.AddArg(x)
  5254  		return true
  5255  	}
  5256  	// match: (MOVWUreg x:(MOVHUload _ _))
  5257  	// result: (MOVVreg x)
  5258  	for {
  5259  		x := v_0
  5260  		if x.Op != OpLOONG64MOVHUload {
  5261  			break
  5262  		}
  5263  		v.reset(OpLOONG64MOVVreg)
  5264  		v.AddArg(x)
  5265  		return true
  5266  	}
  5267  	// match: (MOVWUreg x:(MOVWUload _ _))
  5268  	// result: (MOVVreg x)
  5269  	for {
  5270  		x := v_0
  5271  		if x.Op != OpLOONG64MOVWUload {
  5272  			break
  5273  		}
  5274  		v.reset(OpLOONG64MOVVreg)
  5275  		v.AddArg(x)
  5276  		return true
  5277  	}
  5278  	// match: (MOVWUreg x:(MOVBUloadidx _ _ _))
  5279  	// result: (MOVVreg x)
  5280  	for {
  5281  		x := v_0
  5282  		if x.Op != OpLOONG64MOVBUloadidx {
  5283  			break
  5284  		}
  5285  		v.reset(OpLOONG64MOVVreg)
  5286  		v.AddArg(x)
  5287  		return true
  5288  	}
  5289  	// match: (MOVWUreg x:(MOVHUloadidx _ _ _))
  5290  	// result: (MOVVreg x)
  5291  	for {
  5292  		x := v_0
  5293  		if x.Op != OpLOONG64MOVHUloadidx {
  5294  			break
  5295  		}
  5296  		v.reset(OpLOONG64MOVVreg)
  5297  		v.AddArg(x)
  5298  		return true
  5299  	}
  5300  	// match: (MOVWUreg x:(MOVWUloadidx _ _ _))
  5301  	// result: (MOVVreg x)
  5302  	for {
  5303  		x := v_0
  5304  		if x.Op != OpLOONG64MOVWUloadidx {
  5305  			break
  5306  		}
  5307  		v.reset(OpLOONG64MOVVreg)
  5308  		v.AddArg(x)
  5309  		return true
  5310  	}
  5311  	// match: (MOVWUreg x:(MOVBUreg _))
  5312  	// result: (MOVVreg x)
  5313  	for {
  5314  		x := v_0
  5315  		if x.Op != OpLOONG64MOVBUreg {
  5316  			break
  5317  		}
  5318  		v.reset(OpLOONG64MOVVreg)
  5319  		v.AddArg(x)
  5320  		return true
  5321  	}
  5322  	// match: (MOVWUreg x:(MOVHUreg _))
  5323  	// result: (MOVVreg x)
  5324  	for {
  5325  		x := v_0
  5326  		if x.Op != OpLOONG64MOVHUreg {
  5327  			break
  5328  		}
  5329  		v.reset(OpLOONG64MOVVreg)
  5330  		v.AddArg(x)
  5331  		return true
  5332  	}
  5333  	// match: (MOVWUreg x:(MOVWUreg _))
  5334  	// result: (MOVVreg x)
  5335  	for {
  5336  		x := v_0
  5337  		if x.Op != OpLOONG64MOVWUreg {
  5338  			break
  5339  		}
  5340  		v.reset(OpLOONG64MOVVreg)
  5341  		v.AddArg(x)
  5342  		return true
  5343  	}
  5344  	// match: (MOVWUreg (SLLVconst [lc] x))
  5345  	// cond: lc >= 32
  5346  	// result: (MOVVconst [0])
  5347  	for {
  5348  		if v_0.Op != OpLOONG64SLLVconst {
  5349  			break
  5350  		}
  5351  		lc := auxIntToInt64(v_0.AuxInt)
  5352  		if !(lc >= 32) {
  5353  			break
  5354  		}
  5355  		v.reset(OpLOONG64MOVVconst)
  5356  		v.AuxInt = int64ToAuxInt(0)
  5357  		return true
  5358  	}
  5359  	// match: (MOVWUreg (MOVVconst [c]))
  5360  	// result: (MOVVconst [int64(uint32(c))])
  5361  	for {
  5362  		if v_0.Op != OpLOONG64MOVVconst {
  5363  			break
  5364  		}
  5365  		c := auxIntToInt64(v_0.AuxInt)
  5366  		v.reset(OpLOONG64MOVVconst)
  5367  		v.AuxInt = int64ToAuxInt(int64(uint32(c)))
  5368  		return true
  5369  	}
  5370  	// match: (MOVWUreg x:(SRLconst [c] y))
  5371  	// result: x
  5372  	for {
  5373  		x := v_0
  5374  		if x.Op != OpLOONG64SRLconst {
  5375  			break
  5376  		}
  5377  		v.copyOf(x)
  5378  		return true
  5379  	}
  5380  	// match: (MOVWUreg x:(ANDconst [c] y))
  5381  	// cond: c >= 0 && int64(uint32(c)) == c
  5382  	// result: x
  5383  	for {
  5384  		x := v_0
  5385  		if x.Op != OpLOONG64ANDconst {
  5386  			break
  5387  		}
  5388  		c := auxIntToInt64(x.AuxInt)
  5389  		if !(c >= 0 && int64(uint32(c)) == c) {
  5390  			break
  5391  		}
  5392  		v.copyOf(x)
  5393  		return true
  5394  	}
  5395  	return false
  5396  }
  5397  func rewriteValueLOONG64_OpLOONG64MOVWload(v *Value) bool {
  5398  	v_1 := v.Args[1]
  5399  	v_0 := v.Args[0]
  5400  	b := v.Block
  5401  	config := b.Func.Config
  5402  	typ := &b.Func.Config.Types
  5403  	// match: (MOVWload [off] {sym} ptr (MOVWstore [off] {sym} ptr x _))
  5404  	// result: (MOVWreg x)
  5405  	for {
  5406  		off := auxIntToInt32(v.AuxInt)
  5407  		sym := auxToSym(v.Aux)
  5408  		ptr := v_0
  5409  		if v_1.Op != OpLOONG64MOVWstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  5410  			break
  5411  		}
  5412  		x := v_1.Args[1]
  5413  		if ptr != v_1.Args[0] {
  5414  			break
  5415  		}
  5416  		v.reset(OpLOONG64MOVWreg)
  5417  		v.AddArg(x)
  5418  		return true
  5419  	}
  5420  	// match: (MOVWload [off1] {sym} (ADDVconst [off2] ptr) mem)
  5421  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  5422  	// result: (MOVWload [off1+int32(off2)] {sym} ptr mem)
  5423  	for {
  5424  		off1 := auxIntToInt32(v.AuxInt)
  5425  		sym := auxToSym(v.Aux)
  5426  		if v_0.Op != OpLOONG64ADDVconst {
  5427  			break
  5428  		}
  5429  		off2 := auxIntToInt64(v_0.AuxInt)
  5430  		ptr := v_0.Args[0]
  5431  		mem := v_1
  5432  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  5433  			break
  5434  		}
  5435  		v.reset(OpLOONG64MOVWload)
  5436  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5437  		v.Aux = symToAux(sym)
  5438  		v.AddArg2(ptr, mem)
  5439  		return true
  5440  	}
  5441  	// match: (MOVWload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  5442  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  5443  	// result: (MOVWload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  5444  	for {
  5445  		off1 := auxIntToInt32(v.AuxInt)
  5446  		sym1 := auxToSym(v.Aux)
  5447  		if v_0.Op != OpLOONG64MOVVaddr {
  5448  			break
  5449  		}
  5450  		off2 := auxIntToInt32(v_0.AuxInt)
  5451  		sym2 := auxToSym(v_0.Aux)
  5452  		ptr := v_0.Args[0]
  5453  		mem := v_1
  5454  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  5455  			break
  5456  		}
  5457  		v.reset(OpLOONG64MOVWload)
  5458  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5459  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5460  		v.AddArg2(ptr, mem)
  5461  		return true
  5462  	}
  5463  	// match: (MOVWload [off] {sym} (ADDV ptr idx) mem)
  5464  	// cond: off == 0 && sym == nil
  5465  	// result: (MOVWloadidx ptr idx mem)
  5466  	for {
  5467  		off := auxIntToInt32(v.AuxInt)
  5468  		sym := auxToSym(v.Aux)
  5469  		if v_0.Op != OpLOONG64ADDV {
  5470  			break
  5471  		}
  5472  		idx := v_0.Args[1]
  5473  		ptr := v_0.Args[0]
  5474  		mem := v_1
  5475  		if !(off == 0 && sym == nil) {
  5476  			break
  5477  		}
  5478  		v.reset(OpLOONG64MOVWloadidx)
  5479  		v.AddArg3(ptr, idx, mem)
  5480  		return true
  5481  	}
  5482  	// match: (MOVWload [off] {sym} (ADDshiftLLV [shift] ptr idx) mem)
  5483  	// cond: off == 0 && sym == nil
  5484  	// result: (MOVWloadidx ptr (SLLVconst <typ.Int64> [shift] idx) mem)
  5485  	for {
  5486  		off := auxIntToInt32(v.AuxInt)
  5487  		sym := auxToSym(v.Aux)
  5488  		if v_0.Op != OpLOONG64ADDshiftLLV {
  5489  			break
  5490  		}
  5491  		shift := auxIntToInt64(v_0.AuxInt)
  5492  		idx := v_0.Args[1]
  5493  		ptr := v_0.Args[0]
  5494  		mem := v_1
  5495  		if !(off == 0 && sym == nil) {
  5496  			break
  5497  		}
  5498  		v.reset(OpLOONG64MOVWloadidx)
  5499  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, typ.Int64)
  5500  		v0.AuxInt = int64ToAuxInt(shift)
  5501  		v0.AddArg(idx)
  5502  		v.AddArg3(ptr, v0, mem)
  5503  		return true
  5504  	}
  5505  	// match: (MOVWload [off] {sym} (SB) _)
  5506  	// cond: symIsRO(sym)
  5507  	// result: (MOVVconst [int64(int32(read32(sym, int64(off), config.ctxt.Arch.ByteOrder)))])
  5508  	for {
  5509  		off := auxIntToInt32(v.AuxInt)
  5510  		sym := auxToSym(v.Aux)
  5511  		if v_0.Op != OpSB || !(symIsRO(sym)) {
  5512  			break
  5513  		}
  5514  		v.reset(OpLOONG64MOVVconst)
  5515  		v.AuxInt = int64ToAuxInt(int64(int32(read32(sym, int64(off), config.ctxt.Arch.ByteOrder))))
  5516  		return true
  5517  	}
  5518  	return false
  5519  }
  5520  func rewriteValueLOONG64_OpLOONG64MOVWloadidx(v *Value) bool {
  5521  	v_2 := v.Args[2]
  5522  	v_1 := v.Args[1]
  5523  	v_0 := v.Args[0]
  5524  	// match: (MOVWloadidx ptr (MOVVconst [c]) mem)
  5525  	// cond: is32Bit(c)
  5526  	// result: (MOVWload [int32(c)] ptr mem)
  5527  	for {
  5528  		ptr := v_0
  5529  		if v_1.Op != OpLOONG64MOVVconst {
  5530  			break
  5531  		}
  5532  		c := auxIntToInt64(v_1.AuxInt)
  5533  		mem := v_2
  5534  		if !(is32Bit(c)) {
  5535  			break
  5536  		}
  5537  		v.reset(OpLOONG64MOVWload)
  5538  		v.AuxInt = int32ToAuxInt(int32(c))
  5539  		v.AddArg2(ptr, mem)
  5540  		return true
  5541  	}
  5542  	// match: (MOVWloadidx (MOVVconst [c]) ptr mem)
  5543  	// cond: is32Bit(c)
  5544  	// result: (MOVWload [int32(c)] ptr mem)
  5545  	for {
  5546  		if v_0.Op != OpLOONG64MOVVconst {
  5547  			break
  5548  		}
  5549  		c := auxIntToInt64(v_0.AuxInt)
  5550  		ptr := v_1
  5551  		mem := v_2
  5552  		if !(is32Bit(c)) {
  5553  			break
  5554  		}
  5555  		v.reset(OpLOONG64MOVWload)
  5556  		v.AuxInt = int32ToAuxInt(int32(c))
  5557  		v.AddArg2(ptr, mem)
  5558  		return true
  5559  	}
  5560  	return false
  5561  }
  5562  func rewriteValueLOONG64_OpLOONG64MOVWreg(v *Value) bool {
  5563  	v_0 := v.Args[0]
  5564  	// match: (MOVWreg x:(MOVBload _ _))
  5565  	// result: (MOVVreg x)
  5566  	for {
  5567  		x := v_0
  5568  		if x.Op != OpLOONG64MOVBload {
  5569  			break
  5570  		}
  5571  		v.reset(OpLOONG64MOVVreg)
  5572  		v.AddArg(x)
  5573  		return true
  5574  	}
  5575  	// match: (MOVWreg x:(MOVBUload _ _))
  5576  	// result: (MOVVreg x)
  5577  	for {
  5578  		x := v_0
  5579  		if x.Op != OpLOONG64MOVBUload {
  5580  			break
  5581  		}
  5582  		v.reset(OpLOONG64MOVVreg)
  5583  		v.AddArg(x)
  5584  		return true
  5585  	}
  5586  	// match: (MOVWreg x:(MOVHload _ _))
  5587  	// result: (MOVVreg x)
  5588  	for {
  5589  		x := v_0
  5590  		if x.Op != OpLOONG64MOVHload {
  5591  			break
  5592  		}
  5593  		v.reset(OpLOONG64MOVVreg)
  5594  		v.AddArg(x)
  5595  		return true
  5596  	}
  5597  	// match: (MOVWreg x:(MOVHUload _ _))
  5598  	// result: (MOVVreg x)
  5599  	for {
  5600  		x := v_0
  5601  		if x.Op != OpLOONG64MOVHUload {
  5602  			break
  5603  		}
  5604  		v.reset(OpLOONG64MOVVreg)
  5605  		v.AddArg(x)
  5606  		return true
  5607  	}
  5608  	// match: (MOVWreg x:(MOVWload _ _))
  5609  	// result: (MOVVreg x)
  5610  	for {
  5611  		x := v_0
  5612  		if x.Op != OpLOONG64MOVWload {
  5613  			break
  5614  		}
  5615  		v.reset(OpLOONG64MOVVreg)
  5616  		v.AddArg(x)
  5617  		return true
  5618  	}
  5619  	// match: (MOVWreg x:(MOVBloadidx _ _ _))
  5620  	// result: (MOVVreg x)
  5621  	for {
  5622  		x := v_0
  5623  		if x.Op != OpLOONG64MOVBloadidx {
  5624  			break
  5625  		}
  5626  		v.reset(OpLOONG64MOVVreg)
  5627  		v.AddArg(x)
  5628  		return true
  5629  	}
  5630  	// match: (MOVWreg x:(MOVBUloadidx _ _ _))
  5631  	// result: (MOVVreg x)
  5632  	for {
  5633  		x := v_0
  5634  		if x.Op != OpLOONG64MOVBUloadidx {
  5635  			break
  5636  		}
  5637  		v.reset(OpLOONG64MOVVreg)
  5638  		v.AddArg(x)
  5639  		return true
  5640  	}
  5641  	// match: (MOVWreg x:(MOVHloadidx _ _ _))
  5642  	// result: (MOVVreg x)
  5643  	for {
  5644  		x := v_0
  5645  		if x.Op != OpLOONG64MOVHloadidx {
  5646  			break
  5647  		}
  5648  		v.reset(OpLOONG64MOVVreg)
  5649  		v.AddArg(x)
  5650  		return true
  5651  	}
  5652  	// match: (MOVWreg x:(MOVHUloadidx _ _ _))
  5653  	// result: (MOVVreg x)
  5654  	for {
  5655  		x := v_0
  5656  		if x.Op != OpLOONG64MOVHUloadidx {
  5657  			break
  5658  		}
  5659  		v.reset(OpLOONG64MOVVreg)
  5660  		v.AddArg(x)
  5661  		return true
  5662  	}
  5663  	// match: (MOVWreg x:(MOVWloadidx _ _ _))
  5664  	// result: (MOVVreg x)
  5665  	for {
  5666  		x := v_0
  5667  		if x.Op != OpLOONG64MOVWloadidx {
  5668  			break
  5669  		}
  5670  		v.reset(OpLOONG64MOVVreg)
  5671  		v.AddArg(x)
  5672  		return true
  5673  	}
  5674  	// match: (MOVWreg x:(MOVBreg _))
  5675  	// result: (MOVVreg x)
  5676  	for {
  5677  		x := v_0
  5678  		if x.Op != OpLOONG64MOVBreg {
  5679  			break
  5680  		}
  5681  		v.reset(OpLOONG64MOVVreg)
  5682  		v.AddArg(x)
  5683  		return true
  5684  	}
  5685  	// match: (MOVWreg x:(MOVBUreg _))
  5686  	// result: (MOVVreg x)
  5687  	for {
  5688  		x := v_0
  5689  		if x.Op != OpLOONG64MOVBUreg {
  5690  			break
  5691  		}
  5692  		v.reset(OpLOONG64MOVVreg)
  5693  		v.AddArg(x)
  5694  		return true
  5695  	}
  5696  	// match: (MOVWreg x:(MOVHreg _))
  5697  	// result: (MOVVreg x)
  5698  	for {
  5699  		x := v_0
  5700  		if x.Op != OpLOONG64MOVHreg {
  5701  			break
  5702  		}
  5703  		v.reset(OpLOONG64MOVVreg)
  5704  		v.AddArg(x)
  5705  		return true
  5706  	}
  5707  	// match: (MOVWreg x:(MOVWreg _))
  5708  	// result: (MOVVreg x)
  5709  	for {
  5710  		x := v_0
  5711  		if x.Op != OpLOONG64MOVWreg {
  5712  			break
  5713  		}
  5714  		v.reset(OpLOONG64MOVVreg)
  5715  		v.AddArg(x)
  5716  		return true
  5717  	}
  5718  	// match: (MOVWreg (MOVVconst [c]))
  5719  	// result: (MOVVconst [int64(int32(c))])
  5720  	for {
  5721  		if v_0.Op != OpLOONG64MOVVconst {
  5722  			break
  5723  		}
  5724  		c := auxIntToInt64(v_0.AuxInt)
  5725  		v.reset(OpLOONG64MOVVconst)
  5726  		v.AuxInt = int64ToAuxInt(int64(int32(c)))
  5727  		return true
  5728  	}
  5729  	// match: (MOVWreg x:(ANDconst [c] y))
  5730  	// cond: c >= 0 && int64(int32(c)) == c
  5731  	// result: x
  5732  	for {
  5733  		x := v_0
  5734  		if x.Op != OpLOONG64ANDconst {
  5735  			break
  5736  		}
  5737  		c := auxIntToInt64(x.AuxInt)
  5738  		if !(c >= 0 && int64(int32(c)) == c) {
  5739  			break
  5740  		}
  5741  		v.copyOf(x)
  5742  		return true
  5743  	}
  5744  	return false
  5745  }
  5746  func rewriteValueLOONG64_OpLOONG64MOVWstore(v *Value) bool {
  5747  	v_2 := v.Args[2]
  5748  	v_1 := v.Args[1]
  5749  	v_0 := v.Args[0]
  5750  	b := v.Block
  5751  	config := b.Func.Config
  5752  	typ := &b.Func.Config.Types
  5753  	// match: (MOVWstore [off] {sym} ptr (MOVWfpgp val) mem)
  5754  	// result: (MOVFstore [off] {sym} ptr val mem)
  5755  	for {
  5756  		off := auxIntToInt32(v.AuxInt)
  5757  		sym := auxToSym(v.Aux)
  5758  		ptr := v_0
  5759  		if v_1.Op != OpLOONG64MOVWfpgp {
  5760  			break
  5761  		}
  5762  		val := v_1.Args[0]
  5763  		mem := v_2
  5764  		v.reset(OpLOONG64MOVFstore)
  5765  		v.AuxInt = int32ToAuxInt(off)
  5766  		v.Aux = symToAux(sym)
  5767  		v.AddArg3(ptr, val, mem)
  5768  		return true
  5769  	}
  5770  	// match: (MOVWstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  5771  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  5772  	// result: (MOVWstore [off1+int32(off2)] {sym} ptr val mem)
  5773  	for {
  5774  		off1 := auxIntToInt32(v.AuxInt)
  5775  		sym := auxToSym(v.Aux)
  5776  		if v_0.Op != OpLOONG64ADDVconst {
  5777  			break
  5778  		}
  5779  		off2 := auxIntToInt64(v_0.AuxInt)
  5780  		ptr := v_0.Args[0]
  5781  		val := v_1
  5782  		mem := v_2
  5783  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  5784  			break
  5785  		}
  5786  		v.reset(OpLOONG64MOVWstore)
  5787  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5788  		v.Aux = symToAux(sym)
  5789  		v.AddArg3(ptr, val, mem)
  5790  		return true
  5791  	}
  5792  	// match: (MOVWstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  5793  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  5794  	// result: (MOVWstore [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr val mem)
  5795  	for {
  5796  		off1 := auxIntToInt32(v.AuxInt)
  5797  		sym1 := auxToSym(v.Aux)
  5798  		if v_0.Op != OpLOONG64MOVVaddr {
  5799  			break
  5800  		}
  5801  		off2 := auxIntToInt32(v_0.AuxInt)
  5802  		sym2 := auxToSym(v_0.Aux)
  5803  		ptr := v_0.Args[0]
  5804  		val := v_1
  5805  		mem := v_2
  5806  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  5807  			break
  5808  		}
  5809  		v.reset(OpLOONG64MOVWstore)
  5810  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5811  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5812  		v.AddArg3(ptr, val, mem)
  5813  		return true
  5814  	}
  5815  	// match: (MOVWstore [off] {sym} ptr (MOVWreg x) mem)
  5816  	// result: (MOVWstore [off] {sym} ptr x mem)
  5817  	for {
  5818  		off := auxIntToInt32(v.AuxInt)
  5819  		sym := auxToSym(v.Aux)
  5820  		ptr := v_0
  5821  		if v_1.Op != OpLOONG64MOVWreg {
  5822  			break
  5823  		}
  5824  		x := v_1.Args[0]
  5825  		mem := v_2
  5826  		v.reset(OpLOONG64MOVWstore)
  5827  		v.AuxInt = int32ToAuxInt(off)
  5828  		v.Aux = symToAux(sym)
  5829  		v.AddArg3(ptr, x, mem)
  5830  		return true
  5831  	}
  5832  	// match: (MOVWstore [off] {sym} ptr (MOVWUreg x) mem)
  5833  	// result: (MOVWstore [off] {sym} ptr x mem)
  5834  	for {
  5835  		off := auxIntToInt32(v.AuxInt)
  5836  		sym := auxToSym(v.Aux)
  5837  		ptr := v_0
  5838  		if v_1.Op != OpLOONG64MOVWUreg {
  5839  			break
  5840  		}
  5841  		x := v_1.Args[0]
  5842  		mem := v_2
  5843  		v.reset(OpLOONG64MOVWstore)
  5844  		v.AuxInt = int32ToAuxInt(off)
  5845  		v.Aux = symToAux(sym)
  5846  		v.AddArg3(ptr, x, mem)
  5847  		return true
  5848  	}
  5849  	// match: (MOVWstore [off] {sym} (ADDV ptr idx) val mem)
  5850  	// cond: off == 0 && sym == nil
  5851  	// result: (MOVWstoreidx ptr idx val mem)
  5852  	for {
  5853  		off := auxIntToInt32(v.AuxInt)
  5854  		sym := auxToSym(v.Aux)
  5855  		if v_0.Op != OpLOONG64ADDV {
  5856  			break
  5857  		}
  5858  		idx := v_0.Args[1]
  5859  		ptr := v_0.Args[0]
  5860  		val := v_1
  5861  		mem := v_2
  5862  		if !(off == 0 && sym == nil) {
  5863  			break
  5864  		}
  5865  		v.reset(OpLOONG64MOVWstoreidx)
  5866  		v.AddArg4(ptr, idx, val, mem)
  5867  		return true
  5868  	}
  5869  	// match: (MOVWstore [off] {sym} (ADDshiftLLV [shift] ptr idx) val mem)
  5870  	// cond: off == 0 && sym == nil
  5871  	// result: (MOVWstoreidx ptr (SLLVconst <typ.Int64> [shift] idx) val mem)
  5872  	for {
  5873  		off := auxIntToInt32(v.AuxInt)
  5874  		sym := auxToSym(v.Aux)
  5875  		if v_0.Op != OpLOONG64ADDshiftLLV {
  5876  			break
  5877  		}
  5878  		shift := auxIntToInt64(v_0.AuxInt)
  5879  		idx := v_0.Args[1]
  5880  		ptr := v_0.Args[0]
  5881  		val := v_1
  5882  		mem := v_2
  5883  		if !(off == 0 && sym == nil) {
  5884  			break
  5885  		}
  5886  		v.reset(OpLOONG64MOVWstoreidx)
  5887  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, typ.Int64)
  5888  		v0.AuxInt = int64ToAuxInt(shift)
  5889  		v0.AddArg(idx)
  5890  		v.AddArg4(ptr, v0, val, mem)
  5891  		return true
  5892  	}
  5893  	return false
  5894  }
  5895  func rewriteValueLOONG64_OpLOONG64MOVWstoreidx(v *Value) bool {
  5896  	v_3 := v.Args[3]
  5897  	v_2 := v.Args[2]
  5898  	v_1 := v.Args[1]
  5899  	v_0 := v.Args[0]
  5900  	// match: (MOVWstoreidx ptr (MOVVconst [c]) val mem)
  5901  	// cond: is32Bit(c)
  5902  	// result: (MOVWstore [int32(c)] ptr val mem)
  5903  	for {
  5904  		ptr := v_0
  5905  		if v_1.Op != OpLOONG64MOVVconst {
  5906  			break
  5907  		}
  5908  		c := auxIntToInt64(v_1.AuxInt)
  5909  		val := v_2
  5910  		mem := v_3
  5911  		if !(is32Bit(c)) {
  5912  			break
  5913  		}
  5914  		v.reset(OpLOONG64MOVWstore)
  5915  		v.AuxInt = int32ToAuxInt(int32(c))
  5916  		v.AddArg3(ptr, val, mem)
  5917  		return true
  5918  	}
  5919  	// match: (MOVWstoreidx (MOVVconst [c]) idx val mem)
  5920  	// cond: is32Bit(c)
  5921  	// result: (MOVWstore [int32(c)] idx val mem)
  5922  	for {
  5923  		if v_0.Op != OpLOONG64MOVVconst {
  5924  			break
  5925  		}
  5926  		c := auxIntToInt64(v_0.AuxInt)
  5927  		idx := v_1
  5928  		val := v_2
  5929  		mem := v_3
  5930  		if !(is32Bit(c)) {
  5931  			break
  5932  		}
  5933  		v.reset(OpLOONG64MOVWstore)
  5934  		v.AuxInt = int32ToAuxInt(int32(c))
  5935  		v.AddArg3(idx, val, mem)
  5936  		return true
  5937  	}
  5938  	return false
  5939  }
  5940  func rewriteValueLOONG64_OpLOONG64MULV(v *Value) bool {
  5941  	v_1 := v.Args[1]
  5942  	v_0 := v.Args[0]
  5943  	b := v.Block
  5944  	config := b.Func.Config
  5945  	// match: (MULV r:(MOVWUreg x) s:(MOVWUreg y))
  5946  	// cond: r.Uses == 1 && s.Uses == 1
  5947  	// result: (MULWVWU x y)
  5948  	for {
  5949  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  5950  			r := v_0
  5951  			if r.Op != OpLOONG64MOVWUreg {
  5952  				continue
  5953  			}
  5954  			x := r.Args[0]
  5955  			s := v_1
  5956  			if s.Op != OpLOONG64MOVWUreg {
  5957  				continue
  5958  			}
  5959  			y := s.Args[0]
  5960  			if !(r.Uses == 1 && s.Uses == 1) {
  5961  				continue
  5962  			}
  5963  			v.reset(OpLOONG64MULWVWU)
  5964  			v.AddArg2(x, y)
  5965  			return true
  5966  		}
  5967  		break
  5968  	}
  5969  	// match: (MULV r:(MOVWreg x) s:(MOVWreg y))
  5970  	// cond: r.Uses == 1 && s.Uses == 1
  5971  	// result: (MULWVW x y)
  5972  	for {
  5973  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  5974  			r := v_0
  5975  			if r.Op != OpLOONG64MOVWreg {
  5976  				continue
  5977  			}
  5978  			x := r.Args[0]
  5979  			s := v_1
  5980  			if s.Op != OpLOONG64MOVWreg {
  5981  				continue
  5982  			}
  5983  			y := s.Args[0]
  5984  			if !(r.Uses == 1 && s.Uses == 1) {
  5985  				continue
  5986  			}
  5987  			v.reset(OpLOONG64MULWVW)
  5988  			v.AddArg2(x, y)
  5989  			return true
  5990  		}
  5991  		break
  5992  	}
  5993  	// match: (MULV _ (MOVVconst [0]))
  5994  	// result: (MOVVconst [0])
  5995  	for {
  5996  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  5997  			if v_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
  5998  				continue
  5999  			}
  6000  			v.reset(OpLOONG64MOVVconst)
  6001  			v.AuxInt = int64ToAuxInt(0)
  6002  			return true
  6003  		}
  6004  		break
  6005  	}
  6006  	// match: (MULV x (MOVVconst [1]))
  6007  	// result: x
  6008  	for {
  6009  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  6010  			x := v_0
  6011  			if v_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_1.AuxInt) != 1 {
  6012  				continue
  6013  			}
  6014  			v.copyOf(x)
  6015  			return true
  6016  		}
  6017  		break
  6018  	}
  6019  	// match: (MULV x (MOVVconst [c]))
  6020  	// cond: canMulStrengthReduce(config, c)
  6021  	// result: {mulStrengthReduce(v, x, c)}
  6022  	for {
  6023  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  6024  			x := v_0
  6025  			if v_1.Op != OpLOONG64MOVVconst {
  6026  				continue
  6027  			}
  6028  			c := auxIntToInt64(v_1.AuxInt)
  6029  			if !(canMulStrengthReduce(config, c)) {
  6030  				continue
  6031  			}
  6032  			v.copyOf(mulStrengthReduce(v, x, c))
  6033  			return true
  6034  		}
  6035  		break
  6036  	}
  6037  	// match: (MULV (MOVVconst [c]) (MOVVconst [d]))
  6038  	// result: (MOVVconst [c*d])
  6039  	for {
  6040  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  6041  			if v_0.Op != OpLOONG64MOVVconst {
  6042  				continue
  6043  			}
  6044  			c := auxIntToInt64(v_0.AuxInt)
  6045  			if v_1.Op != OpLOONG64MOVVconst {
  6046  				continue
  6047  			}
  6048  			d := auxIntToInt64(v_1.AuxInt)
  6049  			v.reset(OpLOONG64MOVVconst)
  6050  			v.AuxInt = int64ToAuxInt(c * d)
  6051  			return true
  6052  		}
  6053  		break
  6054  	}
  6055  	return false
  6056  }
  6057  func rewriteValueLOONG64_OpLOONG64NEGV(v *Value) bool {
  6058  	v_0 := v.Args[0]
  6059  	b := v.Block
  6060  	// match: (NEGV (SUBV x y))
  6061  	// result: (SUBV y x)
  6062  	for {
  6063  		if v_0.Op != OpLOONG64SUBV {
  6064  			break
  6065  		}
  6066  		y := v_0.Args[1]
  6067  		x := v_0.Args[0]
  6068  		v.reset(OpLOONG64SUBV)
  6069  		v.AddArg2(y, x)
  6070  		return true
  6071  	}
  6072  	// match: (NEGV <t> s:(ADDVconst [c] (SUBV x y)))
  6073  	// cond: s.Uses == 1 && is12Bit(-c)
  6074  	// result: (ADDVconst [-c] (SUBV <t> y x))
  6075  	for {
  6076  		t := v.Type
  6077  		s := v_0
  6078  		if s.Op != OpLOONG64ADDVconst {
  6079  			break
  6080  		}
  6081  		c := auxIntToInt64(s.AuxInt)
  6082  		s_0 := s.Args[0]
  6083  		if s_0.Op != OpLOONG64SUBV {
  6084  			break
  6085  		}
  6086  		y := s_0.Args[1]
  6087  		x := s_0.Args[0]
  6088  		if !(s.Uses == 1 && is12Bit(-c)) {
  6089  			break
  6090  		}
  6091  		v.reset(OpLOONG64ADDVconst)
  6092  		v.AuxInt = int64ToAuxInt(-c)
  6093  		v0 := b.NewValue0(v.Pos, OpLOONG64SUBV, t)
  6094  		v0.AddArg2(y, x)
  6095  		v.AddArg(v0)
  6096  		return true
  6097  	}
  6098  	// match: (NEGV (NEGV x))
  6099  	// result: x
  6100  	for {
  6101  		if v_0.Op != OpLOONG64NEGV {
  6102  			break
  6103  		}
  6104  		x := v_0.Args[0]
  6105  		v.copyOf(x)
  6106  		return true
  6107  	}
  6108  	// match: (NEGV <t> s:(ADDVconst [c] (NEGV x)))
  6109  	// cond: s.Uses == 1 && is12Bit(-c)
  6110  	// result: (ADDVconst [-c] x)
  6111  	for {
  6112  		s := v_0
  6113  		if s.Op != OpLOONG64ADDVconst {
  6114  			break
  6115  		}
  6116  		c := auxIntToInt64(s.AuxInt)
  6117  		s_0 := s.Args[0]
  6118  		if s_0.Op != OpLOONG64NEGV {
  6119  			break
  6120  		}
  6121  		x := s_0.Args[0]
  6122  		if !(s.Uses == 1 && is12Bit(-c)) {
  6123  			break
  6124  		}
  6125  		v.reset(OpLOONG64ADDVconst)
  6126  		v.AuxInt = int64ToAuxInt(-c)
  6127  		v.AddArg(x)
  6128  		return true
  6129  	}
  6130  	// match: (NEGV (MOVVconst [c]))
  6131  	// result: (MOVVconst [-c])
  6132  	for {
  6133  		if v_0.Op != OpLOONG64MOVVconst {
  6134  			break
  6135  		}
  6136  		c := auxIntToInt64(v_0.AuxInt)
  6137  		v.reset(OpLOONG64MOVVconst)
  6138  		v.AuxInt = int64ToAuxInt(-c)
  6139  		return true
  6140  	}
  6141  	return false
  6142  }
  6143  func rewriteValueLOONG64_OpLOONG64NOR(v *Value) bool {
  6144  	v_1 := v.Args[1]
  6145  	v_0 := v.Args[0]
  6146  	// match: (NOR x (MOVVconst [c]))
  6147  	// cond: is32Bit(c)
  6148  	// result: (NORconst [c] x)
  6149  	for {
  6150  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  6151  			x := v_0
  6152  			if v_1.Op != OpLOONG64MOVVconst {
  6153  				continue
  6154  			}
  6155  			c := auxIntToInt64(v_1.AuxInt)
  6156  			if !(is32Bit(c)) {
  6157  				continue
  6158  			}
  6159  			v.reset(OpLOONG64NORconst)
  6160  			v.AuxInt = int64ToAuxInt(c)
  6161  			v.AddArg(x)
  6162  			return true
  6163  		}
  6164  		break
  6165  	}
  6166  	return false
  6167  }
  6168  func rewriteValueLOONG64_OpLOONG64NORconst(v *Value) bool {
  6169  	v_0 := v.Args[0]
  6170  	// match: (NORconst [c] (MOVVconst [d]))
  6171  	// result: (MOVVconst [^(c|d)])
  6172  	for {
  6173  		c := auxIntToInt64(v.AuxInt)
  6174  		if v_0.Op != OpLOONG64MOVVconst {
  6175  			break
  6176  		}
  6177  		d := auxIntToInt64(v_0.AuxInt)
  6178  		v.reset(OpLOONG64MOVVconst)
  6179  		v.AuxInt = int64ToAuxInt(^(c | d))
  6180  		return true
  6181  	}
  6182  	return false
  6183  }
  6184  func rewriteValueLOONG64_OpLOONG64OR(v *Value) bool {
  6185  	v_1 := v.Args[1]
  6186  	v_0 := v.Args[0]
  6187  	b := v.Block
  6188  	typ := &b.Func.Config.Types
  6189  	// match: (OR <typ.UInt16> (SRLVconst [8] <typ.UInt16> x) (SLLVconst [8] <typ.UInt16> x))
  6190  	// result: (REVB2H x)
  6191  	for {
  6192  		if v.Type != typ.UInt16 {
  6193  			break
  6194  		}
  6195  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  6196  			if v_0.Op != OpLOONG64SRLVconst || v_0.Type != typ.UInt16 || auxIntToInt64(v_0.AuxInt) != 8 {
  6197  				continue
  6198  			}
  6199  			x := v_0.Args[0]
  6200  			if v_1.Op != OpLOONG64SLLVconst || v_1.Type != typ.UInt16 || auxIntToInt64(v_1.AuxInt) != 8 || x != v_1.Args[0] {
  6201  				continue
  6202  			}
  6203  			v.reset(OpLOONG64REVB2H)
  6204  			v.AddArg(x)
  6205  			return true
  6206  		}
  6207  		break
  6208  	}
  6209  	// match: (OR (SRLconst [8] (ANDconst [c1] x)) (SLLconst [8] (ANDconst [c2] x)))
  6210  	// cond: uint32(c1) == 0xff00ff00 && uint32(c2) == 0x00ff00ff
  6211  	// result: (REVB2H x)
  6212  	for {
  6213  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  6214  			if v_0.Op != OpLOONG64SRLconst || auxIntToInt64(v_0.AuxInt) != 8 {
  6215  				continue
  6216  			}
  6217  			v_0_0 := v_0.Args[0]
  6218  			if v_0_0.Op != OpLOONG64ANDconst {
  6219  				continue
  6220  			}
  6221  			c1 := auxIntToInt64(v_0_0.AuxInt)
  6222  			x := v_0_0.Args[0]
  6223  			if v_1.Op != OpLOONG64SLLconst || auxIntToInt64(v_1.AuxInt) != 8 {
  6224  				continue
  6225  			}
  6226  			v_1_0 := v_1.Args[0]
  6227  			if v_1_0.Op != OpLOONG64ANDconst {
  6228  				continue
  6229  			}
  6230  			c2 := auxIntToInt64(v_1_0.AuxInt)
  6231  			if x != v_1_0.Args[0] || !(uint32(c1) == 0xff00ff00 && uint32(c2) == 0x00ff00ff) {
  6232  				continue
  6233  			}
  6234  			v.reset(OpLOONG64REVB2H)
  6235  			v.AddArg(x)
  6236  			return true
  6237  		}
  6238  		break
  6239  	}
  6240  	// match: (OR (SRLVconst [8] (AND (MOVVconst [c1]) x)) (SLLVconst [8] (AND (MOVVconst [c2]) x)))
  6241  	// cond: uint64(c1) == 0xff00ff00ff00ff00 && uint64(c2) == 0x00ff00ff00ff00ff
  6242  	// result: (REVB4H x)
  6243  	for {
  6244  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  6245  			if v_0.Op != OpLOONG64SRLVconst || auxIntToInt64(v_0.AuxInt) != 8 {
  6246  				continue
  6247  			}
  6248  			v_0_0 := v_0.Args[0]
  6249  			if v_0_0.Op != OpLOONG64AND {
  6250  				continue
  6251  			}
  6252  			_ = v_0_0.Args[1]
  6253  			v_0_0_0 := v_0_0.Args[0]
  6254  			v_0_0_1 := v_0_0.Args[1]
  6255  			for _i1 := 0; _i1 <= 1; _i1, v_0_0_0, v_0_0_1 = _i1+1, v_0_0_1, v_0_0_0 {
  6256  				if v_0_0_0.Op != OpLOONG64MOVVconst {
  6257  					continue
  6258  				}
  6259  				c1 := auxIntToInt64(v_0_0_0.AuxInt)
  6260  				x := v_0_0_1
  6261  				if v_1.Op != OpLOONG64SLLVconst || auxIntToInt64(v_1.AuxInt) != 8 {
  6262  					continue
  6263  				}
  6264  				v_1_0 := v_1.Args[0]
  6265  				if v_1_0.Op != OpLOONG64AND {
  6266  					continue
  6267  				}
  6268  				_ = v_1_0.Args[1]
  6269  				v_1_0_0 := v_1_0.Args[0]
  6270  				v_1_0_1 := v_1_0.Args[1]
  6271  				for _i2 := 0; _i2 <= 1; _i2, v_1_0_0, v_1_0_1 = _i2+1, v_1_0_1, v_1_0_0 {
  6272  					if v_1_0_0.Op != OpLOONG64MOVVconst {
  6273  						continue
  6274  					}
  6275  					c2 := auxIntToInt64(v_1_0_0.AuxInt)
  6276  					if x != v_1_0_1 || !(uint64(c1) == 0xff00ff00ff00ff00 && uint64(c2) == 0x00ff00ff00ff00ff) {
  6277  						continue
  6278  					}
  6279  					v.reset(OpLOONG64REVB4H)
  6280  					v.AddArg(x)
  6281  					return true
  6282  				}
  6283  			}
  6284  		}
  6285  		break
  6286  	}
  6287  	// match: (OR (SRLVconst [8] (AND (MOVVconst [c1]) x)) (SLLVconst [8] (ANDconst [c2] x)))
  6288  	// cond: uint64(c1) == 0xff00ff00 && uint64(c2) == 0x00ff00ff
  6289  	// result: (REVB4H (ANDconst <x.Type> [0xffffffff] x))
  6290  	for {
  6291  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  6292  			if v_0.Op != OpLOONG64SRLVconst || auxIntToInt64(v_0.AuxInt) != 8 {
  6293  				continue
  6294  			}
  6295  			v_0_0 := v_0.Args[0]
  6296  			if v_0_0.Op != OpLOONG64AND {
  6297  				continue
  6298  			}
  6299  			_ = v_0_0.Args[1]
  6300  			v_0_0_0 := v_0_0.Args[0]
  6301  			v_0_0_1 := v_0_0.Args[1]
  6302  			for _i1 := 0; _i1 <= 1; _i1, v_0_0_0, v_0_0_1 = _i1+1, v_0_0_1, v_0_0_0 {
  6303  				if v_0_0_0.Op != OpLOONG64MOVVconst {
  6304  					continue
  6305  				}
  6306  				c1 := auxIntToInt64(v_0_0_0.AuxInt)
  6307  				x := v_0_0_1
  6308  				if v_1.Op != OpLOONG64SLLVconst || auxIntToInt64(v_1.AuxInt) != 8 {
  6309  					continue
  6310  				}
  6311  				v_1_0 := v_1.Args[0]
  6312  				if v_1_0.Op != OpLOONG64ANDconst {
  6313  					continue
  6314  				}
  6315  				c2 := auxIntToInt64(v_1_0.AuxInt)
  6316  				if x != v_1_0.Args[0] || !(uint64(c1) == 0xff00ff00 && uint64(c2) == 0x00ff00ff) {
  6317  					continue
  6318  				}
  6319  				v.reset(OpLOONG64REVB4H)
  6320  				v0 := b.NewValue0(v.Pos, OpLOONG64ANDconst, x.Type)
  6321  				v0.AuxInt = int64ToAuxInt(0xffffffff)
  6322  				v0.AddArg(x)
  6323  				v.AddArg(v0)
  6324  				return true
  6325  			}
  6326  		}
  6327  		break
  6328  	}
  6329  	// match: (OR x (MOVVconst [c]))
  6330  	// cond: is32Bit(c)
  6331  	// result: (ORconst [c] x)
  6332  	for {
  6333  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  6334  			x := v_0
  6335  			if v_1.Op != OpLOONG64MOVVconst {
  6336  				continue
  6337  			}
  6338  			c := auxIntToInt64(v_1.AuxInt)
  6339  			if !(is32Bit(c)) {
  6340  				continue
  6341  			}
  6342  			v.reset(OpLOONG64ORconst)
  6343  			v.AuxInt = int64ToAuxInt(c)
  6344  			v.AddArg(x)
  6345  			return true
  6346  		}
  6347  		break
  6348  	}
  6349  	// match: (OR x x)
  6350  	// result: x
  6351  	for {
  6352  		x := v_0
  6353  		if x != v_1 {
  6354  			break
  6355  		}
  6356  		v.copyOf(x)
  6357  		return true
  6358  	}
  6359  	// match: (OR x (NORconst [0] y))
  6360  	// result: (ORN x y)
  6361  	for {
  6362  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  6363  			x := v_0
  6364  			if v_1.Op != OpLOONG64NORconst || auxIntToInt64(v_1.AuxInt) != 0 {
  6365  				continue
  6366  			}
  6367  			y := v_1.Args[0]
  6368  			v.reset(OpLOONG64ORN)
  6369  			v.AddArg2(x, y)
  6370  			return true
  6371  		}
  6372  		break
  6373  	}
  6374  	return false
  6375  }
  6376  func rewriteValueLOONG64_OpLOONG64ORN(v *Value) bool {
  6377  	v_1 := v.Args[1]
  6378  	v_0 := v.Args[0]
  6379  	// match: (ORN x (MOVVconst [-1]))
  6380  	// result: x
  6381  	for {
  6382  		x := v_0
  6383  		if v_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_1.AuxInt) != -1 {
  6384  			break
  6385  		}
  6386  		v.copyOf(x)
  6387  		return true
  6388  	}
  6389  	return false
  6390  }
  6391  func rewriteValueLOONG64_OpLOONG64ORconst(v *Value) bool {
  6392  	v_0 := v.Args[0]
  6393  	// match: (ORconst [0] x)
  6394  	// result: x
  6395  	for {
  6396  		if auxIntToInt64(v.AuxInt) != 0 {
  6397  			break
  6398  		}
  6399  		x := v_0
  6400  		v.copyOf(x)
  6401  		return true
  6402  	}
  6403  	// match: (ORconst [-1] _)
  6404  	// result: (MOVVconst [-1])
  6405  	for {
  6406  		if auxIntToInt64(v.AuxInt) != -1 {
  6407  			break
  6408  		}
  6409  		v.reset(OpLOONG64MOVVconst)
  6410  		v.AuxInt = int64ToAuxInt(-1)
  6411  		return true
  6412  	}
  6413  	// match: (ORconst [c] (MOVVconst [d]))
  6414  	// result: (MOVVconst [c|d])
  6415  	for {
  6416  		c := auxIntToInt64(v.AuxInt)
  6417  		if v_0.Op != OpLOONG64MOVVconst {
  6418  			break
  6419  		}
  6420  		d := auxIntToInt64(v_0.AuxInt)
  6421  		v.reset(OpLOONG64MOVVconst)
  6422  		v.AuxInt = int64ToAuxInt(c | d)
  6423  		return true
  6424  	}
  6425  	// match: (ORconst [c] (ORconst [d] x))
  6426  	// cond: is32Bit(c|d)
  6427  	// result: (ORconst [c|d] x)
  6428  	for {
  6429  		c := auxIntToInt64(v.AuxInt)
  6430  		if v_0.Op != OpLOONG64ORconst {
  6431  			break
  6432  		}
  6433  		d := auxIntToInt64(v_0.AuxInt)
  6434  		x := v_0.Args[0]
  6435  		if !(is32Bit(c | d)) {
  6436  			break
  6437  		}
  6438  		v.reset(OpLOONG64ORconst)
  6439  		v.AuxInt = int64ToAuxInt(c | d)
  6440  		v.AddArg(x)
  6441  		return true
  6442  	}
  6443  	return false
  6444  }
  6445  func rewriteValueLOONG64_OpLOONG64REMV(v *Value) bool {
  6446  	v_1 := v.Args[1]
  6447  	v_0 := v.Args[0]
  6448  	// match: (REMV (MOVVconst [c]) (MOVVconst [d]))
  6449  	// cond: d != 0
  6450  	// result: (MOVVconst [c%d])
  6451  	for {
  6452  		if v_0.Op != OpLOONG64MOVVconst {
  6453  			break
  6454  		}
  6455  		c := auxIntToInt64(v_0.AuxInt)
  6456  		if v_1.Op != OpLOONG64MOVVconst {
  6457  			break
  6458  		}
  6459  		d := auxIntToInt64(v_1.AuxInt)
  6460  		if !(d != 0) {
  6461  			break
  6462  		}
  6463  		v.reset(OpLOONG64MOVVconst)
  6464  		v.AuxInt = int64ToAuxInt(c % d)
  6465  		return true
  6466  	}
  6467  	return false
  6468  }
  6469  func rewriteValueLOONG64_OpLOONG64REMVU(v *Value) bool {
  6470  	v_1 := v.Args[1]
  6471  	v_0 := v.Args[0]
  6472  	// match: (REMVU _ (MOVVconst [1]))
  6473  	// result: (MOVVconst [0])
  6474  	for {
  6475  		if v_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_1.AuxInt) != 1 {
  6476  			break
  6477  		}
  6478  		v.reset(OpLOONG64MOVVconst)
  6479  		v.AuxInt = int64ToAuxInt(0)
  6480  		return true
  6481  	}
  6482  	// match: (REMVU x (MOVVconst [c]))
  6483  	// cond: isPowerOfTwo(c)
  6484  	// result: (ANDconst [c-1] x)
  6485  	for {
  6486  		x := v_0
  6487  		if v_1.Op != OpLOONG64MOVVconst {
  6488  			break
  6489  		}
  6490  		c := auxIntToInt64(v_1.AuxInt)
  6491  		if !(isPowerOfTwo(c)) {
  6492  			break
  6493  		}
  6494  		v.reset(OpLOONG64ANDconst)
  6495  		v.AuxInt = int64ToAuxInt(c - 1)
  6496  		v.AddArg(x)
  6497  		return true
  6498  	}
  6499  	// match: (REMVU (MOVVconst [c]) (MOVVconst [d]))
  6500  	// cond: d != 0
  6501  	// result: (MOVVconst [int64(uint64(c)%uint64(d))])
  6502  	for {
  6503  		if v_0.Op != OpLOONG64MOVVconst {
  6504  			break
  6505  		}
  6506  		c := auxIntToInt64(v_0.AuxInt)
  6507  		if v_1.Op != OpLOONG64MOVVconst {
  6508  			break
  6509  		}
  6510  		d := auxIntToInt64(v_1.AuxInt)
  6511  		if !(d != 0) {
  6512  			break
  6513  		}
  6514  		v.reset(OpLOONG64MOVVconst)
  6515  		v.AuxInt = int64ToAuxInt(int64(uint64(c) % uint64(d)))
  6516  		return true
  6517  	}
  6518  	return false
  6519  }
  6520  func rewriteValueLOONG64_OpLOONG64ROTR(v *Value) bool {
  6521  	v_1 := v.Args[1]
  6522  	v_0 := v.Args[0]
  6523  	// match: (ROTR x (MOVVconst [c]))
  6524  	// result: (ROTRconst x [c&31])
  6525  	for {
  6526  		x := v_0
  6527  		if v_1.Op != OpLOONG64MOVVconst {
  6528  			break
  6529  		}
  6530  		c := auxIntToInt64(v_1.AuxInt)
  6531  		v.reset(OpLOONG64ROTRconst)
  6532  		v.AuxInt = int64ToAuxInt(c & 31)
  6533  		v.AddArg(x)
  6534  		return true
  6535  	}
  6536  	return false
  6537  }
  6538  func rewriteValueLOONG64_OpLOONG64ROTRV(v *Value) bool {
  6539  	v_1 := v.Args[1]
  6540  	v_0 := v.Args[0]
  6541  	// match: (ROTRV x (MOVVconst [c]))
  6542  	// result: (ROTRVconst x [c&63])
  6543  	for {
  6544  		x := v_0
  6545  		if v_1.Op != OpLOONG64MOVVconst {
  6546  			break
  6547  		}
  6548  		c := auxIntToInt64(v_1.AuxInt)
  6549  		v.reset(OpLOONG64ROTRVconst)
  6550  		v.AuxInt = int64ToAuxInt(c & 63)
  6551  		v.AddArg(x)
  6552  		return true
  6553  	}
  6554  	return false
  6555  }
  6556  func rewriteValueLOONG64_OpLOONG64SGT(v *Value) bool {
  6557  	v_1 := v.Args[1]
  6558  	v_0 := v.Args[0]
  6559  	b := v.Block
  6560  	typ := &b.Func.Config.Types
  6561  	// match: (SGT (MOVVconst [c]) (NEGV (SUBVconst [d] x)))
  6562  	// cond: is32Bit(d-c)
  6563  	// result: (SGT x (MOVVconst [d-c]))
  6564  	for {
  6565  		if v_0.Op != OpLOONG64MOVVconst {
  6566  			break
  6567  		}
  6568  		c := auxIntToInt64(v_0.AuxInt)
  6569  		if v_1.Op != OpLOONG64NEGV {
  6570  			break
  6571  		}
  6572  		v_1_0 := v_1.Args[0]
  6573  		if v_1_0.Op != OpLOONG64SUBVconst {
  6574  			break
  6575  		}
  6576  		d := auxIntToInt64(v_1_0.AuxInt)
  6577  		x := v_1_0.Args[0]
  6578  		if !(is32Bit(d - c)) {
  6579  			break
  6580  		}
  6581  		v.reset(OpLOONG64SGT)
  6582  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  6583  		v0.AuxInt = int64ToAuxInt(d - c)
  6584  		v.AddArg2(x, v0)
  6585  		return true
  6586  	}
  6587  	// match: (SGT (MOVVconst [c]) x)
  6588  	// cond: is32Bit(c)
  6589  	// result: (SGTconst [c] x)
  6590  	for {
  6591  		if v_0.Op != OpLOONG64MOVVconst {
  6592  			break
  6593  		}
  6594  		c := auxIntToInt64(v_0.AuxInt)
  6595  		x := v_1
  6596  		if !(is32Bit(c)) {
  6597  			break
  6598  		}
  6599  		v.reset(OpLOONG64SGTconst)
  6600  		v.AuxInt = int64ToAuxInt(c)
  6601  		v.AddArg(x)
  6602  		return true
  6603  	}
  6604  	// match: (SGT x x)
  6605  	// result: (MOVVconst [0])
  6606  	for {
  6607  		x := v_0
  6608  		if x != v_1 {
  6609  			break
  6610  		}
  6611  		v.reset(OpLOONG64MOVVconst)
  6612  		v.AuxInt = int64ToAuxInt(0)
  6613  		return true
  6614  	}
  6615  	return false
  6616  }
  6617  func rewriteValueLOONG64_OpLOONG64SGTU(v *Value) bool {
  6618  	v_1 := v.Args[1]
  6619  	v_0 := v.Args[0]
  6620  	// match: (SGTU (MOVVconst [c]) x)
  6621  	// cond: is32Bit(c)
  6622  	// result: (SGTUconst [c] x)
  6623  	for {
  6624  		if v_0.Op != OpLOONG64MOVVconst {
  6625  			break
  6626  		}
  6627  		c := auxIntToInt64(v_0.AuxInt)
  6628  		x := v_1
  6629  		if !(is32Bit(c)) {
  6630  			break
  6631  		}
  6632  		v.reset(OpLOONG64SGTUconst)
  6633  		v.AuxInt = int64ToAuxInt(c)
  6634  		v.AddArg(x)
  6635  		return true
  6636  	}
  6637  	// match: (SGTU x x)
  6638  	// result: (MOVVconst [0])
  6639  	for {
  6640  		x := v_0
  6641  		if x != v_1 {
  6642  			break
  6643  		}
  6644  		v.reset(OpLOONG64MOVVconst)
  6645  		v.AuxInt = int64ToAuxInt(0)
  6646  		return true
  6647  	}
  6648  	return false
  6649  }
  6650  func rewriteValueLOONG64_OpLOONG64SGTUconst(v *Value) bool {
  6651  	v_0 := v.Args[0]
  6652  	// match: (SGTUconst [c] (MOVVconst [d]))
  6653  	// cond: uint64(c)>uint64(d)
  6654  	// result: (MOVVconst [1])
  6655  	for {
  6656  		c := auxIntToInt64(v.AuxInt)
  6657  		if v_0.Op != OpLOONG64MOVVconst {
  6658  			break
  6659  		}
  6660  		d := auxIntToInt64(v_0.AuxInt)
  6661  		if !(uint64(c) > uint64(d)) {
  6662  			break
  6663  		}
  6664  		v.reset(OpLOONG64MOVVconst)
  6665  		v.AuxInt = int64ToAuxInt(1)
  6666  		return true
  6667  	}
  6668  	// match: (SGTUconst [c] (MOVVconst [d]))
  6669  	// cond: uint64(c)<=uint64(d)
  6670  	// result: (MOVVconst [0])
  6671  	for {
  6672  		c := auxIntToInt64(v.AuxInt)
  6673  		if v_0.Op != OpLOONG64MOVVconst {
  6674  			break
  6675  		}
  6676  		d := auxIntToInt64(v_0.AuxInt)
  6677  		if !(uint64(c) <= uint64(d)) {
  6678  			break
  6679  		}
  6680  		v.reset(OpLOONG64MOVVconst)
  6681  		v.AuxInt = int64ToAuxInt(0)
  6682  		return true
  6683  	}
  6684  	// match: (SGTUconst [c] (MOVBUreg _))
  6685  	// cond: 0xff < uint64(c)
  6686  	// result: (MOVVconst [1])
  6687  	for {
  6688  		c := auxIntToInt64(v.AuxInt)
  6689  		if v_0.Op != OpLOONG64MOVBUreg || !(0xff < uint64(c)) {
  6690  			break
  6691  		}
  6692  		v.reset(OpLOONG64MOVVconst)
  6693  		v.AuxInt = int64ToAuxInt(1)
  6694  		return true
  6695  	}
  6696  	// match: (SGTUconst [c] (MOVHUreg _))
  6697  	// cond: 0xffff < uint64(c)
  6698  	// result: (MOVVconst [1])
  6699  	for {
  6700  		c := auxIntToInt64(v.AuxInt)
  6701  		if v_0.Op != OpLOONG64MOVHUreg || !(0xffff < uint64(c)) {
  6702  			break
  6703  		}
  6704  		v.reset(OpLOONG64MOVVconst)
  6705  		v.AuxInt = int64ToAuxInt(1)
  6706  		return true
  6707  	}
  6708  	// match: (SGTUconst [c] (ANDconst [m] _))
  6709  	// cond: uint64(m) < uint64(c)
  6710  	// result: (MOVVconst [1])
  6711  	for {
  6712  		c := auxIntToInt64(v.AuxInt)
  6713  		if v_0.Op != OpLOONG64ANDconst {
  6714  			break
  6715  		}
  6716  		m := auxIntToInt64(v_0.AuxInt)
  6717  		if !(uint64(m) < uint64(c)) {
  6718  			break
  6719  		}
  6720  		v.reset(OpLOONG64MOVVconst)
  6721  		v.AuxInt = int64ToAuxInt(1)
  6722  		return true
  6723  	}
  6724  	// match: (SGTUconst [c] (SRLVconst _ [d]))
  6725  	// cond: 0 < d && d <= 63 && 0xffffffffffffffff>>uint64(d) < uint64(c)
  6726  	// result: (MOVVconst [1])
  6727  	for {
  6728  		c := auxIntToInt64(v.AuxInt)
  6729  		if v_0.Op != OpLOONG64SRLVconst {
  6730  			break
  6731  		}
  6732  		d := auxIntToInt64(v_0.AuxInt)
  6733  		if !(0 < d && d <= 63 && 0xffffffffffffffff>>uint64(d) < uint64(c)) {
  6734  			break
  6735  		}
  6736  		v.reset(OpLOONG64MOVVconst)
  6737  		v.AuxInt = int64ToAuxInt(1)
  6738  		return true
  6739  	}
  6740  	return false
  6741  }
  6742  func rewriteValueLOONG64_OpLOONG64SGTconst(v *Value) bool {
  6743  	v_0 := v.Args[0]
  6744  	// match: (SGTconst [c] (MOVVconst [d]))
  6745  	// cond: c>d
  6746  	// result: (MOVVconst [1])
  6747  	for {
  6748  		c := auxIntToInt64(v.AuxInt)
  6749  		if v_0.Op != OpLOONG64MOVVconst {
  6750  			break
  6751  		}
  6752  		d := auxIntToInt64(v_0.AuxInt)
  6753  		if !(c > d) {
  6754  			break
  6755  		}
  6756  		v.reset(OpLOONG64MOVVconst)
  6757  		v.AuxInt = int64ToAuxInt(1)
  6758  		return true
  6759  	}
  6760  	// match: (SGTconst [c] (MOVVconst [d]))
  6761  	// cond: c<=d
  6762  	// result: (MOVVconst [0])
  6763  	for {
  6764  		c := auxIntToInt64(v.AuxInt)
  6765  		if v_0.Op != OpLOONG64MOVVconst {
  6766  			break
  6767  		}
  6768  		d := auxIntToInt64(v_0.AuxInt)
  6769  		if !(c <= d) {
  6770  			break
  6771  		}
  6772  		v.reset(OpLOONG64MOVVconst)
  6773  		v.AuxInt = int64ToAuxInt(0)
  6774  		return true
  6775  	}
  6776  	// match: (SGTconst [c] (MOVBreg _))
  6777  	// cond: 0x7f < c
  6778  	// result: (MOVVconst [1])
  6779  	for {
  6780  		c := auxIntToInt64(v.AuxInt)
  6781  		if v_0.Op != OpLOONG64MOVBreg || !(0x7f < c) {
  6782  			break
  6783  		}
  6784  		v.reset(OpLOONG64MOVVconst)
  6785  		v.AuxInt = int64ToAuxInt(1)
  6786  		return true
  6787  	}
  6788  	// match: (SGTconst [c] (MOVBreg _))
  6789  	// cond: c <= -0x80
  6790  	// result: (MOVVconst [0])
  6791  	for {
  6792  		c := auxIntToInt64(v.AuxInt)
  6793  		if v_0.Op != OpLOONG64MOVBreg || !(c <= -0x80) {
  6794  			break
  6795  		}
  6796  		v.reset(OpLOONG64MOVVconst)
  6797  		v.AuxInt = int64ToAuxInt(0)
  6798  		return true
  6799  	}
  6800  	// match: (SGTconst [c] (MOVBUreg _))
  6801  	// cond: 0xff < c
  6802  	// result: (MOVVconst [1])
  6803  	for {
  6804  		c := auxIntToInt64(v.AuxInt)
  6805  		if v_0.Op != OpLOONG64MOVBUreg || !(0xff < c) {
  6806  			break
  6807  		}
  6808  		v.reset(OpLOONG64MOVVconst)
  6809  		v.AuxInt = int64ToAuxInt(1)
  6810  		return true
  6811  	}
  6812  	// match: (SGTconst [c] (MOVBUreg _))
  6813  	// cond: c < 0
  6814  	// result: (MOVVconst [0])
  6815  	for {
  6816  		c := auxIntToInt64(v.AuxInt)
  6817  		if v_0.Op != OpLOONG64MOVBUreg || !(c < 0) {
  6818  			break
  6819  		}
  6820  		v.reset(OpLOONG64MOVVconst)
  6821  		v.AuxInt = int64ToAuxInt(0)
  6822  		return true
  6823  	}
  6824  	// match: (SGTconst [c] (MOVHreg _))
  6825  	// cond: 0x7fff < c
  6826  	// result: (MOVVconst [1])
  6827  	for {
  6828  		c := auxIntToInt64(v.AuxInt)
  6829  		if v_0.Op != OpLOONG64MOVHreg || !(0x7fff < c) {
  6830  			break
  6831  		}
  6832  		v.reset(OpLOONG64MOVVconst)
  6833  		v.AuxInt = int64ToAuxInt(1)
  6834  		return true
  6835  	}
  6836  	// match: (SGTconst [c] (MOVHreg _))
  6837  	// cond: c <= -0x8000
  6838  	// result: (MOVVconst [0])
  6839  	for {
  6840  		c := auxIntToInt64(v.AuxInt)
  6841  		if v_0.Op != OpLOONG64MOVHreg || !(c <= -0x8000) {
  6842  			break
  6843  		}
  6844  		v.reset(OpLOONG64MOVVconst)
  6845  		v.AuxInt = int64ToAuxInt(0)
  6846  		return true
  6847  	}
  6848  	// match: (SGTconst [c] (MOVHUreg _))
  6849  	// cond: 0xffff < c
  6850  	// result: (MOVVconst [1])
  6851  	for {
  6852  		c := auxIntToInt64(v.AuxInt)
  6853  		if v_0.Op != OpLOONG64MOVHUreg || !(0xffff < c) {
  6854  			break
  6855  		}
  6856  		v.reset(OpLOONG64MOVVconst)
  6857  		v.AuxInt = int64ToAuxInt(1)
  6858  		return true
  6859  	}
  6860  	// match: (SGTconst [c] (MOVHUreg _))
  6861  	// cond: c < 0
  6862  	// result: (MOVVconst [0])
  6863  	for {
  6864  		c := auxIntToInt64(v.AuxInt)
  6865  		if v_0.Op != OpLOONG64MOVHUreg || !(c < 0) {
  6866  			break
  6867  		}
  6868  		v.reset(OpLOONG64MOVVconst)
  6869  		v.AuxInt = int64ToAuxInt(0)
  6870  		return true
  6871  	}
  6872  	// match: (SGTconst [c] (MOVWUreg _))
  6873  	// cond: c < 0
  6874  	// result: (MOVVconst [0])
  6875  	for {
  6876  		c := auxIntToInt64(v.AuxInt)
  6877  		if v_0.Op != OpLOONG64MOVWUreg || !(c < 0) {
  6878  			break
  6879  		}
  6880  		v.reset(OpLOONG64MOVVconst)
  6881  		v.AuxInt = int64ToAuxInt(0)
  6882  		return true
  6883  	}
  6884  	// match: (SGTconst [c] (ANDconst [m] _))
  6885  	// cond: 0 <= m && m < c
  6886  	// result: (MOVVconst [1])
  6887  	for {
  6888  		c := auxIntToInt64(v.AuxInt)
  6889  		if v_0.Op != OpLOONG64ANDconst {
  6890  			break
  6891  		}
  6892  		m := auxIntToInt64(v_0.AuxInt)
  6893  		if !(0 <= m && m < c) {
  6894  			break
  6895  		}
  6896  		v.reset(OpLOONG64MOVVconst)
  6897  		v.AuxInt = int64ToAuxInt(1)
  6898  		return true
  6899  	}
  6900  	// match: (SGTconst [c] (SRLVconst _ [d]))
  6901  	// cond: 0 <= c && 0 < d && d <= 63 && 0xffffffffffffffff>>uint64(d) < uint64(c)
  6902  	// result: (MOVVconst [1])
  6903  	for {
  6904  		c := auxIntToInt64(v.AuxInt)
  6905  		if v_0.Op != OpLOONG64SRLVconst {
  6906  			break
  6907  		}
  6908  		d := auxIntToInt64(v_0.AuxInt)
  6909  		if !(0 <= c && 0 < d && d <= 63 && 0xffffffffffffffff>>uint64(d) < uint64(c)) {
  6910  			break
  6911  		}
  6912  		v.reset(OpLOONG64MOVVconst)
  6913  		v.AuxInt = int64ToAuxInt(1)
  6914  		return true
  6915  	}
  6916  	return false
  6917  }
  6918  func rewriteValueLOONG64_OpLOONG64SLL(v *Value) bool {
  6919  	v_1 := v.Args[1]
  6920  	v_0 := v.Args[0]
  6921  	// match: (SLL _ (MOVVconst [c]))
  6922  	// cond: uint64(c)>=32
  6923  	// result: (MOVVconst [0])
  6924  	for {
  6925  		if v_1.Op != OpLOONG64MOVVconst {
  6926  			break
  6927  		}
  6928  		c := auxIntToInt64(v_1.AuxInt)
  6929  		if !(uint64(c) >= 32) {
  6930  			break
  6931  		}
  6932  		v.reset(OpLOONG64MOVVconst)
  6933  		v.AuxInt = int64ToAuxInt(0)
  6934  		return true
  6935  	}
  6936  	// match: (SLL x (MOVVconst [c]))
  6937  	// cond: uint64(c) >=0 && uint64(c) <=31
  6938  	// result: (SLLconst x [c])
  6939  	for {
  6940  		x := v_0
  6941  		if v_1.Op != OpLOONG64MOVVconst {
  6942  			break
  6943  		}
  6944  		c := auxIntToInt64(v_1.AuxInt)
  6945  		if !(uint64(c) >= 0 && uint64(c) <= 31) {
  6946  			break
  6947  		}
  6948  		v.reset(OpLOONG64SLLconst)
  6949  		v.AuxInt = int64ToAuxInt(c)
  6950  		v.AddArg(x)
  6951  		return true
  6952  	}
  6953  	// match: (SLL x (ANDconst [31] y))
  6954  	// result: (SLL x y)
  6955  	for {
  6956  		x := v_0
  6957  		if v_1.Op != OpLOONG64ANDconst || auxIntToInt64(v_1.AuxInt) != 31 {
  6958  			break
  6959  		}
  6960  		y := v_1.Args[0]
  6961  		v.reset(OpLOONG64SLL)
  6962  		v.AddArg2(x, y)
  6963  		return true
  6964  	}
  6965  	return false
  6966  }
  6967  func rewriteValueLOONG64_OpLOONG64SLLV(v *Value) bool {
  6968  	v_1 := v.Args[1]
  6969  	v_0 := v.Args[0]
  6970  	// match: (SLLV _ (MOVVconst [c]))
  6971  	// cond: uint64(c)>=64
  6972  	// result: (MOVVconst [0])
  6973  	for {
  6974  		if v_1.Op != OpLOONG64MOVVconst {
  6975  			break
  6976  		}
  6977  		c := auxIntToInt64(v_1.AuxInt)
  6978  		if !(uint64(c) >= 64) {
  6979  			break
  6980  		}
  6981  		v.reset(OpLOONG64MOVVconst)
  6982  		v.AuxInt = int64ToAuxInt(0)
  6983  		return true
  6984  	}
  6985  	// match: (SLLV x (MOVVconst [c]))
  6986  	// result: (SLLVconst x [c])
  6987  	for {
  6988  		x := v_0
  6989  		if v_1.Op != OpLOONG64MOVVconst {
  6990  			break
  6991  		}
  6992  		c := auxIntToInt64(v_1.AuxInt)
  6993  		v.reset(OpLOONG64SLLVconst)
  6994  		v.AuxInt = int64ToAuxInt(c)
  6995  		v.AddArg(x)
  6996  		return true
  6997  	}
  6998  	// match: (SLLV x (ANDconst [63] y))
  6999  	// result: (SLLV x y)
  7000  	for {
  7001  		x := v_0
  7002  		if v_1.Op != OpLOONG64ANDconst || auxIntToInt64(v_1.AuxInt) != 63 {
  7003  			break
  7004  		}
  7005  		y := v_1.Args[0]
  7006  		v.reset(OpLOONG64SLLV)
  7007  		v.AddArg2(x, y)
  7008  		return true
  7009  	}
  7010  	return false
  7011  }
  7012  func rewriteValueLOONG64_OpLOONG64SLLVconst(v *Value) bool {
  7013  	v_0 := v.Args[0]
  7014  	// match: (SLLVconst <t> [c] (ADDV x x))
  7015  	// cond: c < t.Size() * 8 - 1
  7016  	// result: (SLLVconst [c+1] x)
  7017  	for {
  7018  		t := v.Type
  7019  		c := auxIntToInt64(v.AuxInt)
  7020  		if v_0.Op != OpLOONG64ADDV {
  7021  			break
  7022  		}
  7023  		x := v_0.Args[1]
  7024  		if x != v_0.Args[0] || !(c < t.Size()*8-1) {
  7025  			break
  7026  		}
  7027  		v.reset(OpLOONG64SLLVconst)
  7028  		v.AuxInt = int64ToAuxInt(c + 1)
  7029  		v.AddArg(x)
  7030  		return true
  7031  	}
  7032  	// match: (SLLVconst <t> [c] (ADDV x x))
  7033  	// cond: c >= t.Size() * 8 - 1
  7034  	// result: (MOVVconst [0])
  7035  	for {
  7036  		t := v.Type
  7037  		c := auxIntToInt64(v.AuxInt)
  7038  		if v_0.Op != OpLOONG64ADDV {
  7039  			break
  7040  		}
  7041  		x := v_0.Args[1]
  7042  		if x != v_0.Args[0] || !(c >= t.Size()*8-1) {
  7043  			break
  7044  		}
  7045  		v.reset(OpLOONG64MOVVconst)
  7046  		v.AuxInt = int64ToAuxInt(0)
  7047  		return true
  7048  	}
  7049  	// match: (SLLVconst [c] (MOVVconst [d]))
  7050  	// result: (MOVVconst [d<<uint64(c)])
  7051  	for {
  7052  		c := auxIntToInt64(v.AuxInt)
  7053  		if v_0.Op != OpLOONG64MOVVconst {
  7054  			break
  7055  		}
  7056  		d := auxIntToInt64(v_0.AuxInt)
  7057  		v.reset(OpLOONG64MOVVconst)
  7058  		v.AuxInt = int64ToAuxInt(d << uint64(c))
  7059  		return true
  7060  	}
  7061  	return false
  7062  }
  7063  func rewriteValueLOONG64_OpLOONG64SLLconst(v *Value) bool {
  7064  	v_0 := v.Args[0]
  7065  	// match: (SLLconst <t> [c] (ADDV x x))
  7066  	// cond: c < t.Size() * 8 - 1
  7067  	// result: (SLLconst [c+1] x)
  7068  	for {
  7069  		t := v.Type
  7070  		c := auxIntToInt64(v.AuxInt)
  7071  		if v_0.Op != OpLOONG64ADDV {
  7072  			break
  7073  		}
  7074  		x := v_0.Args[1]
  7075  		if x != v_0.Args[0] || !(c < t.Size()*8-1) {
  7076  			break
  7077  		}
  7078  		v.reset(OpLOONG64SLLconst)
  7079  		v.AuxInt = int64ToAuxInt(c + 1)
  7080  		v.AddArg(x)
  7081  		return true
  7082  	}
  7083  	// match: (SLLconst <t> [c] (ADDV x x))
  7084  	// cond: c >= t.Size() * 8 - 1
  7085  	// result: (MOVVconst [0])
  7086  	for {
  7087  		t := v.Type
  7088  		c := auxIntToInt64(v.AuxInt)
  7089  		if v_0.Op != OpLOONG64ADDV {
  7090  			break
  7091  		}
  7092  		x := v_0.Args[1]
  7093  		if x != v_0.Args[0] || !(c >= t.Size()*8-1) {
  7094  			break
  7095  		}
  7096  		v.reset(OpLOONG64MOVVconst)
  7097  		v.AuxInt = int64ToAuxInt(0)
  7098  		return true
  7099  	}
  7100  	return false
  7101  }
  7102  func rewriteValueLOONG64_OpLOONG64SRA(v *Value) bool {
  7103  	v_1 := v.Args[1]
  7104  	v_0 := v.Args[0]
  7105  	// match: (SRA x (MOVVconst [c]))
  7106  	// cond: uint64(c)>=32
  7107  	// result: (SRAconst x [31])
  7108  	for {
  7109  		x := v_0
  7110  		if v_1.Op != OpLOONG64MOVVconst {
  7111  			break
  7112  		}
  7113  		c := auxIntToInt64(v_1.AuxInt)
  7114  		if !(uint64(c) >= 32) {
  7115  			break
  7116  		}
  7117  		v.reset(OpLOONG64SRAconst)
  7118  		v.AuxInt = int64ToAuxInt(31)
  7119  		v.AddArg(x)
  7120  		return true
  7121  	}
  7122  	// match: (SRA x (MOVVconst [c]))
  7123  	// cond: uint64(c) >=0 && uint64(c) <=31
  7124  	// result: (SRAconst x [c])
  7125  	for {
  7126  		x := v_0
  7127  		if v_1.Op != OpLOONG64MOVVconst {
  7128  			break
  7129  		}
  7130  		c := auxIntToInt64(v_1.AuxInt)
  7131  		if !(uint64(c) >= 0 && uint64(c) <= 31) {
  7132  			break
  7133  		}
  7134  		v.reset(OpLOONG64SRAconst)
  7135  		v.AuxInt = int64ToAuxInt(c)
  7136  		v.AddArg(x)
  7137  		return true
  7138  	}
  7139  	// match: (SRA x (ANDconst [31] y))
  7140  	// result: (SRA x y)
  7141  	for {
  7142  		x := v_0
  7143  		if v_1.Op != OpLOONG64ANDconst || auxIntToInt64(v_1.AuxInt) != 31 {
  7144  			break
  7145  		}
  7146  		y := v_1.Args[0]
  7147  		v.reset(OpLOONG64SRA)
  7148  		v.AddArg2(x, y)
  7149  		return true
  7150  	}
  7151  	return false
  7152  }
  7153  func rewriteValueLOONG64_OpLOONG64SRAV(v *Value) bool {
  7154  	v_1 := v.Args[1]
  7155  	v_0 := v.Args[0]
  7156  	// match: (SRAV x (MOVVconst [c]))
  7157  	// cond: uint64(c)>=64
  7158  	// result: (SRAVconst x [63])
  7159  	for {
  7160  		x := v_0
  7161  		if v_1.Op != OpLOONG64MOVVconst {
  7162  			break
  7163  		}
  7164  		c := auxIntToInt64(v_1.AuxInt)
  7165  		if !(uint64(c) >= 64) {
  7166  			break
  7167  		}
  7168  		v.reset(OpLOONG64SRAVconst)
  7169  		v.AuxInt = int64ToAuxInt(63)
  7170  		v.AddArg(x)
  7171  		return true
  7172  	}
  7173  	// match: (SRAV x (MOVVconst [c]))
  7174  	// result: (SRAVconst x [c])
  7175  	for {
  7176  		x := v_0
  7177  		if v_1.Op != OpLOONG64MOVVconst {
  7178  			break
  7179  		}
  7180  		c := auxIntToInt64(v_1.AuxInt)
  7181  		v.reset(OpLOONG64SRAVconst)
  7182  		v.AuxInt = int64ToAuxInt(c)
  7183  		v.AddArg(x)
  7184  		return true
  7185  	}
  7186  	// match: (SRAV x (ANDconst [63] y))
  7187  	// result: (SRAV x y)
  7188  	for {
  7189  		x := v_0
  7190  		if v_1.Op != OpLOONG64ANDconst || auxIntToInt64(v_1.AuxInt) != 63 {
  7191  			break
  7192  		}
  7193  		y := v_1.Args[0]
  7194  		v.reset(OpLOONG64SRAV)
  7195  		v.AddArg2(x, y)
  7196  		return true
  7197  	}
  7198  	return false
  7199  }
  7200  func rewriteValueLOONG64_OpLOONG64SRAVconst(v *Value) bool {
  7201  	v_0 := v.Args[0]
  7202  	b := v.Block
  7203  	// match: (SRAVconst [rc] (MOVWreg y))
  7204  	// cond: rc >= 0 && rc <= 31
  7205  	// result: (SRAconst [int64(rc)] y)
  7206  	for {
  7207  		rc := auxIntToInt64(v.AuxInt)
  7208  		if v_0.Op != OpLOONG64MOVWreg {
  7209  			break
  7210  		}
  7211  		y := v_0.Args[0]
  7212  		if !(rc >= 0 && rc <= 31) {
  7213  			break
  7214  		}
  7215  		v.reset(OpLOONG64SRAconst)
  7216  		v.AuxInt = int64ToAuxInt(int64(rc))
  7217  		v.AddArg(y)
  7218  		return true
  7219  	}
  7220  	// match: (SRAVconst <t> [rc] (MOVBreg y))
  7221  	// cond: rc >= 8
  7222  	// result: (SRAVconst [63] (SLLVconst <t> [56] y))
  7223  	for {
  7224  		t := v.Type
  7225  		rc := auxIntToInt64(v.AuxInt)
  7226  		if v_0.Op != OpLOONG64MOVBreg {
  7227  			break
  7228  		}
  7229  		y := v_0.Args[0]
  7230  		if !(rc >= 8) {
  7231  			break
  7232  		}
  7233  		v.reset(OpLOONG64SRAVconst)
  7234  		v.AuxInt = int64ToAuxInt(63)
  7235  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, t)
  7236  		v0.AuxInt = int64ToAuxInt(56)
  7237  		v0.AddArg(y)
  7238  		v.AddArg(v0)
  7239  		return true
  7240  	}
  7241  	// match: (SRAVconst <t> [rc] (MOVHreg y))
  7242  	// cond: rc >= 16
  7243  	// result: (SRAVconst [63] (SLLVconst <t> [48] y))
  7244  	for {
  7245  		t := v.Type
  7246  		rc := auxIntToInt64(v.AuxInt)
  7247  		if v_0.Op != OpLOONG64MOVHreg {
  7248  			break
  7249  		}
  7250  		y := v_0.Args[0]
  7251  		if !(rc >= 16) {
  7252  			break
  7253  		}
  7254  		v.reset(OpLOONG64SRAVconst)
  7255  		v.AuxInt = int64ToAuxInt(63)
  7256  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, t)
  7257  		v0.AuxInt = int64ToAuxInt(48)
  7258  		v0.AddArg(y)
  7259  		v.AddArg(v0)
  7260  		return true
  7261  	}
  7262  	// match: (SRAVconst <t> [rc] (MOVWreg y))
  7263  	// cond: rc >= 32
  7264  	// result: (SRAconst [31] y)
  7265  	for {
  7266  		rc := auxIntToInt64(v.AuxInt)
  7267  		if v_0.Op != OpLOONG64MOVWreg {
  7268  			break
  7269  		}
  7270  		y := v_0.Args[0]
  7271  		if !(rc >= 32) {
  7272  			break
  7273  		}
  7274  		v.reset(OpLOONG64SRAconst)
  7275  		v.AuxInt = int64ToAuxInt(31)
  7276  		v.AddArg(y)
  7277  		return true
  7278  	}
  7279  	// match: (SRAVconst [c] (MOVVconst [d]))
  7280  	// result: (MOVVconst [d>>uint64(c)])
  7281  	for {
  7282  		c := auxIntToInt64(v.AuxInt)
  7283  		if v_0.Op != OpLOONG64MOVVconst {
  7284  			break
  7285  		}
  7286  		d := auxIntToInt64(v_0.AuxInt)
  7287  		v.reset(OpLOONG64MOVVconst)
  7288  		v.AuxInt = int64ToAuxInt(d >> uint64(c))
  7289  		return true
  7290  	}
  7291  	return false
  7292  }
  7293  func rewriteValueLOONG64_OpLOONG64SRL(v *Value) bool {
  7294  	v_1 := v.Args[1]
  7295  	v_0 := v.Args[0]
  7296  	// match: (SRL _ (MOVVconst [c]))
  7297  	// cond: uint64(c)>=32
  7298  	// result: (MOVVconst [0])
  7299  	for {
  7300  		if v_1.Op != OpLOONG64MOVVconst {
  7301  			break
  7302  		}
  7303  		c := auxIntToInt64(v_1.AuxInt)
  7304  		if !(uint64(c) >= 32) {
  7305  			break
  7306  		}
  7307  		v.reset(OpLOONG64MOVVconst)
  7308  		v.AuxInt = int64ToAuxInt(0)
  7309  		return true
  7310  	}
  7311  	// match: (SRL x (MOVVconst [c]))
  7312  	// cond: uint64(c) >=0 && uint64(c) <=31
  7313  	// result: (SRLconst x [c])
  7314  	for {
  7315  		x := v_0
  7316  		if v_1.Op != OpLOONG64MOVVconst {
  7317  			break
  7318  		}
  7319  		c := auxIntToInt64(v_1.AuxInt)
  7320  		if !(uint64(c) >= 0 && uint64(c) <= 31) {
  7321  			break
  7322  		}
  7323  		v.reset(OpLOONG64SRLconst)
  7324  		v.AuxInt = int64ToAuxInt(c)
  7325  		v.AddArg(x)
  7326  		return true
  7327  	}
  7328  	// match: (SRL x (ANDconst [31] y))
  7329  	// result: (SRL x y)
  7330  	for {
  7331  		x := v_0
  7332  		if v_1.Op != OpLOONG64ANDconst || auxIntToInt64(v_1.AuxInt) != 31 {
  7333  			break
  7334  		}
  7335  		y := v_1.Args[0]
  7336  		v.reset(OpLOONG64SRL)
  7337  		v.AddArg2(x, y)
  7338  		return true
  7339  	}
  7340  	return false
  7341  }
  7342  func rewriteValueLOONG64_OpLOONG64SRLV(v *Value) bool {
  7343  	v_1 := v.Args[1]
  7344  	v_0 := v.Args[0]
  7345  	// match: (SRLV _ (MOVVconst [c]))
  7346  	// cond: uint64(c)>=64
  7347  	// result: (MOVVconst [0])
  7348  	for {
  7349  		if v_1.Op != OpLOONG64MOVVconst {
  7350  			break
  7351  		}
  7352  		c := auxIntToInt64(v_1.AuxInt)
  7353  		if !(uint64(c) >= 64) {
  7354  			break
  7355  		}
  7356  		v.reset(OpLOONG64MOVVconst)
  7357  		v.AuxInt = int64ToAuxInt(0)
  7358  		return true
  7359  	}
  7360  	// match: (SRLV x (MOVVconst [c]))
  7361  	// result: (SRLVconst x [c])
  7362  	for {
  7363  		x := v_0
  7364  		if v_1.Op != OpLOONG64MOVVconst {
  7365  			break
  7366  		}
  7367  		c := auxIntToInt64(v_1.AuxInt)
  7368  		v.reset(OpLOONG64SRLVconst)
  7369  		v.AuxInt = int64ToAuxInt(c)
  7370  		v.AddArg(x)
  7371  		return true
  7372  	}
  7373  	// match: (SRLV x (ANDconst [63] y))
  7374  	// result: (SRLV x y)
  7375  	for {
  7376  		x := v_0
  7377  		if v_1.Op != OpLOONG64ANDconst || auxIntToInt64(v_1.AuxInt) != 63 {
  7378  			break
  7379  		}
  7380  		y := v_1.Args[0]
  7381  		v.reset(OpLOONG64SRLV)
  7382  		v.AddArg2(x, y)
  7383  		return true
  7384  	}
  7385  	return false
  7386  }
  7387  func rewriteValueLOONG64_OpLOONG64SRLVconst(v *Value) bool {
  7388  	v_0 := v.Args[0]
  7389  	// match: (SRLVconst [rc] (SLLVconst [lc] x))
  7390  	// cond: lc <= rc
  7391  	// result: (BSTRPICKV [rc-lc + ((64-lc)-1)<<6] x)
  7392  	for {
  7393  		rc := auxIntToInt64(v.AuxInt)
  7394  		if v_0.Op != OpLOONG64SLLVconst {
  7395  			break
  7396  		}
  7397  		lc := auxIntToInt64(v_0.AuxInt)
  7398  		x := v_0.Args[0]
  7399  		if !(lc <= rc) {
  7400  			break
  7401  		}
  7402  		v.reset(OpLOONG64BSTRPICKV)
  7403  		v.AuxInt = int64ToAuxInt(rc - lc + ((64-lc)-1)<<6)
  7404  		v.AddArg(x)
  7405  		return true
  7406  	}
  7407  	// match: (SRLVconst [rc] (MOVWUreg x))
  7408  	// cond: rc < 32
  7409  	// result: (BSTRPICKV [rc + 31<<6] x)
  7410  	for {
  7411  		rc := auxIntToInt64(v.AuxInt)
  7412  		if v_0.Op != OpLOONG64MOVWUreg {
  7413  			break
  7414  		}
  7415  		x := v_0.Args[0]
  7416  		if !(rc < 32) {
  7417  			break
  7418  		}
  7419  		v.reset(OpLOONG64BSTRPICKV)
  7420  		v.AuxInt = int64ToAuxInt(rc + 31<<6)
  7421  		v.AddArg(x)
  7422  		return true
  7423  	}
  7424  	// match: (SRLVconst [rc] (MOVHUreg x))
  7425  	// cond: rc < 16
  7426  	// result: (BSTRPICKV [rc + 15<<6] x)
  7427  	for {
  7428  		rc := auxIntToInt64(v.AuxInt)
  7429  		if v_0.Op != OpLOONG64MOVHUreg {
  7430  			break
  7431  		}
  7432  		x := v_0.Args[0]
  7433  		if !(rc < 16) {
  7434  			break
  7435  		}
  7436  		v.reset(OpLOONG64BSTRPICKV)
  7437  		v.AuxInt = int64ToAuxInt(rc + 15<<6)
  7438  		v.AddArg(x)
  7439  		return true
  7440  	}
  7441  	// match: (SRLVconst [rc] (MOVBUreg x))
  7442  	// cond: rc < 8
  7443  	// result: (BSTRPICKV [rc + 7<<6] x)
  7444  	for {
  7445  		rc := auxIntToInt64(v.AuxInt)
  7446  		if v_0.Op != OpLOONG64MOVBUreg {
  7447  			break
  7448  		}
  7449  		x := v_0.Args[0]
  7450  		if !(rc < 8) {
  7451  			break
  7452  		}
  7453  		v.reset(OpLOONG64BSTRPICKV)
  7454  		v.AuxInt = int64ToAuxInt(rc + 7<<6)
  7455  		v.AddArg(x)
  7456  		return true
  7457  	}
  7458  	// match: (SRLVconst [rc] (MOVWUreg y))
  7459  	// cond: rc >= 0 && rc <= 31
  7460  	// result: (SRLconst [int64(rc)] y)
  7461  	for {
  7462  		rc := auxIntToInt64(v.AuxInt)
  7463  		if v_0.Op != OpLOONG64MOVWUreg {
  7464  			break
  7465  		}
  7466  		y := v_0.Args[0]
  7467  		if !(rc >= 0 && rc <= 31) {
  7468  			break
  7469  		}
  7470  		v.reset(OpLOONG64SRLconst)
  7471  		v.AuxInt = int64ToAuxInt(int64(rc))
  7472  		v.AddArg(y)
  7473  		return true
  7474  	}
  7475  	// match: (SRLVconst [rc] (MOVWUreg x))
  7476  	// cond: rc >= 32
  7477  	// result: (MOVVconst [0])
  7478  	for {
  7479  		rc := auxIntToInt64(v.AuxInt)
  7480  		if v_0.Op != OpLOONG64MOVWUreg {
  7481  			break
  7482  		}
  7483  		if !(rc >= 32) {
  7484  			break
  7485  		}
  7486  		v.reset(OpLOONG64MOVVconst)
  7487  		v.AuxInt = int64ToAuxInt(0)
  7488  		return true
  7489  	}
  7490  	// match: (SRLVconst [rc] (MOVHUreg x))
  7491  	// cond: rc >= 16
  7492  	// result: (MOVVconst [0])
  7493  	for {
  7494  		rc := auxIntToInt64(v.AuxInt)
  7495  		if v_0.Op != OpLOONG64MOVHUreg {
  7496  			break
  7497  		}
  7498  		if !(rc >= 16) {
  7499  			break
  7500  		}
  7501  		v.reset(OpLOONG64MOVVconst)
  7502  		v.AuxInt = int64ToAuxInt(0)
  7503  		return true
  7504  	}
  7505  	// match: (SRLVconst [rc] (MOVBUreg x))
  7506  	// cond: rc >= 8
  7507  	// result: (MOVVconst [0])
  7508  	for {
  7509  		rc := auxIntToInt64(v.AuxInt)
  7510  		if v_0.Op != OpLOONG64MOVBUreg {
  7511  			break
  7512  		}
  7513  		if !(rc >= 8) {
  7514  			break
  7515  		}
  7516  		v.reset(OpLOONG64MOVVconst)
  7517  		v.AuxInt = int64ToAuxInt(0)
  7518  		return true
  7519  	}
  7520  	// match: (SRLVconst [c] (MOVVconst [d]))
  7521  	// result: (MOVVconst [int64(uint64(d)>>uint64(c))])
  7522  	for {
  7523  		c := auxIntToInt64(v.AuxInt)
  7524  		if v_0.Op != OpLOONG64MOVVconst {
  7525  			break
  7526  		}
  7527  		d := auxIntToInt64(v_0.AuxInt)
  7528  		v.reset(OpLOONG64MOVVconst)
  7529  		v.AuxInt = int64ToAuxInt(int64(uint64(d) >> uint64(c)))
  7530  		return true
  7531  	}
  7532  	return false
  7533  }
  7534  func rewriteValueLOONG64_OpLOONG64SUBD(v *Value) bool {
  7535  	v_1 := v.Args[1]
  7536  	v_0 := v.Args[0]
  7537  	// match: (SUBD (MULD x y) z)
  7538  	// cond: z.Block.Func.useFMA(v)
  7539  	// result: (FMSUBD x y z)
  7540  	for {
  7541  		if v_0.Op != OpLOONG64MULD {
  7542  			break
  7543  		}
  7544  		y := v_0.Args[1]
  7545  		x := v_0.Args[0]
  7546  		z := v_1
  7547  		if !(z.Block.Func.useFMA(v)) {
  7548  			break
  7549  		}
  7550  		v.reset(OpLOONG64FMSUBD)
  7551  		v.AddArg3(x, y, z)
  7552  		return true
  7553  	}
  7554  	// match: (SUBD z (MULD x y))
  7555  	// cond: z.Block.Func.useFMA(v)
  7556  	// result: (FNMSUBD x y z)
  7557  	for {
  7558  		z := v_0
  7559  		if v_1.Op != OpLOONG64MULD {
  7560  			break
  7561  		}
  7562  		y := v_1.Args[1]
  7563  		x := v_1.Args[0]
  7564  		if !(z.Block.Func.useFMA(v)) {
  7565  			break
  7566  		}
  7567  		v.reset(OpLOONG64FNMSUBD)
  7568  		v.AddArg3(x, y, z)
  7569  		return true
  7570  	}
  7571  	// match: (SUBD z (NEGD (MULD x y)))
  7572  	// cond: z.Block.Func.useFMA(v)
  7573  	// result: (FMADDD x y z)
  7574  	for {
  7575  		z := v_0
  7576  		if v_1.Op != OpLOONG64NEGD {
  7577  			break
  7578  		}
  7579  		v_1_0 := v_1.Args[0]
  7580  		if v_1_0.Op != OpLOONG64MULD {
  7581  			break
  7582  		}
  7583  		y := v_1_0.Args[1]
  7584  		x := v_1_0.Args[0]
  7585  		if !(z.Block.Func.useFMA(v)) {
  7586  			break
  7587  		}
  7588  		v.reset(OpLOONG64FMADDD)
  7589  		v.AddArg3(x, y, z)
  7590  		return true
  7591  	}
  7592  	// match: (SUBD (NEGD (MULD x y)) z)
  7593  	// cond: z.Block.Func.useFMA(v)
  7594  	// result: (FNMADDD x y z)
  7595  	for {
  7596  		if v_0.Op != OpLOONG64NEGD {
  7597  			break
  7598  		}
  7599  		v_0_0 := v_0.Args[0]
  7600  		if v_0_0.Op != OpLOONG64MULD {
  7601  			break
  7602  		}
  7603  		y := v_0_0.Args[1]
  7604  		x := v_0_0.Args[0]
  7605  		z := v_1
  7606  		if !(z.Block.Func.useFMA(v)) {
  7607  			break
  7608  		}
  7609  		v.reset(OpLOONG64FNMADDD)
  7610  		v.AddArg3(x, y, z)
  7611  		return true
  7612  	}
  7613  	return false
  7614  }
  7615  func rewriteValueLOONG64_OpLOONG64SUBF(v *Value) bool {
  7616  	v_1 := v.Args[1]
  7617  	v_0 := v.Args[0]
  7618  	// match: (SUBF (MULF x y) z)
  7619  	// cond: z.Block.Func.useFMA(v)
  7620  	// result: (FMSUBF x y z)
  7621  	for {
  7622  		if v_0.Op != OpLOONG64MULF {
  7623  			break
  7624  		}
  7625  		y := v_0.Args[1]
  7626  		x := v_0.Args[0]
  7627  		z := v_1
  7628  		if !(z.Block.Func.useFMA(v)) {
  7629  			break
  7630  		}
  7631  		v.reset(OpLOONG64FMSUBF)
  7632  		v.AddArg3(x, y, z)
  7633  		return true
  7634  	}
  7635  	// match: (SUBF z (MULF x y))
  7636  	// cond: z.Block.Func.useFMA(v)
  7637  	// result: (FNMSUBF x y z)
  7638  	for {
  7639  		z := v_0
  7640  		if v_1.Op != OpLOONG64MULF {
  7641  			break
  7642  		}
  7643  		y := v_1.Args[1]
  7644  		x := v_1.Args[0]
  7645  		if !(z.Block.Func.useFMA(v)) {
  7646  			break
  7647  		}
  7648  		v.reset(OpLOONG64FNMSUBF)
  7649  		v.AddArg3(x, y, z)
  7650  		return true
  7651  	}
  7652  	// match: (SUBF z (NEGF (MULF x y)))
  7653  	// cond: z.Block.Func.useFMA(v)
  7654  	// result: (FMADDF x y z)
  7655  	for {
  7656  		z := v_0
  7657  		if v_1.Op != OpLOONG64NEGF {
  7658  			break
  7659  		}
  7660  		v_1_0 := v_1.Args[0]
  7661  		if v_1_0.Op != OpLOONG64MULF {
  7662  			break
  7663  		}
  7664  		y := v_1_0.Args[1]
  7665  		x := v_1_0.Args[0]
  7666  		if !(z.Block.Func.useFMA(v)) {
  7667  			break
  7668  		}
  7669  		v.reset(OpLOONG64FMADDF)
  7670  		v.AddArg3(x, y, z)
  7671  		return true
  7672  	}
  7673  	// match: (SUBF (NEGF (MULF x y)) z)
  7674  	// cond: z.Block.Func.useFMA(v)
  7675  	// result: (FNMADDF x y z)
  7676  	for {
  7677  		if v_0.Op != OpLOONG64NEGF {
  7678  			break
  7679  		}
  7680  		v_0_0 := v_0.Args[0]
  7681  		if v_0_0.Op != OpLOONG64MULF {
  7682  			break
  7683  		}
  7684  		y := v_0_0.Args[1]
  7685  		x := v_0_0.Args[0]
  7686  		z := v_1
  7687  		if !(z.Block.Func.useFMA(v)) {
  7688  			break
  7689  		}
  7690  		v.reset(OpLOONG64FNMADDF)
  7691  		v.AddArg3(x, y, z)
  7692  		return true
  7693  	}
  7694  	return false
  7695  }
  7696  func rewriteValueLOONG64_OpLOONG64SUBV(v *Value) bool {
  7697  	v_1 := v.Args[1]
  7698  	v_0 := v.Args[0]
  7699  	// match: (SUBV x (MOVVconst [c]))
  7700  	// cond: is32Bit(c)
  7701  	// result: (SUBVconst [c] x)
  7702  	for {
  7703  		x := v_0
  7704  		if v_1.Op != OpLOONG64MOVVconst {
  7705  			break
  7706  		}
  7707  		c := auxIntToInt64(v_1.AuxInt)
  7708  		if !(is32Bit(c)) {
  7709  			break
  7710  		}
  7711  		v.reset(OpLOONG64SUBVconst)
  7712  		v.AuxInt = int64ToAuxInt(c)
  7713  		v.AddArg(x)
  7714  		return true
  7715  	}
  7716  	// match: (SUBV x (NEGV y))
  7717  	// result: (ADDV x y)
  7718  	for {
  7719  		x := v_0
  7720  		if v_1.Op != OpLOONG64NEGV {
  7721  			break
  7722  		}
  7723  		y := v_1.Args[0]
  7724  		v.reset(OpLOONG64ADDV)
  7725  		v.AddArg2(x, y)
  7726  		return true
  7727  	}
  7728  	// match: (SUBV x x)
  7729  	// result: (MOVVconst [0])
  7730  	for {
  7731  		x := v_0
  7732  		if x != v_1 {
  7733  			break
  7734  		}
  7735  		v.reset(OpLOONG64MOVVconst)
  7736  		v.AuxInt = int64ToAuxInt(0)
  7737  		return true
  7738  	}
  7739  	// match: (SUBV (MOVVconst [0]) x)
  7740  	// result: (NEGV x)
  7741  	for {
  7742  		if v_0.Op != OpLOONG64MOVVconst || auxIntToInt64(v_0.AuxInt) != 0 {
  7743  			break
  7744  		}
  7745  		x := v_1
  7746  		v.reset(OpLOONG64NEGV)
  7747  		v.AddArg(x)
  7748  		return true
  7749  	}
  7750  	// match: (SUBV (MOVVconst [c]) (NEGV (SUBVconst [d] x)))
  7751  	// result: (ADDVconst [c-d] x)
  7752  	for {
  7753  		if v_0.Op != OpLOONG64MOVVconst {
  7754  			break
  7755  		}
  7756  		c := auxIntToInt64(v_0.AuxInt)
  7757  		if v_1.Op != OpLOONG64NEGV {
  7758  			break
  7759  		}
  7760  		v_1_0 := v_1.Args[0]
  7761  		if v_1_0.Op != OpLOONG64SUBVconst {
  7762  			break
  7763  		}
  7764  		d := auxIntToInt64(v_1_0.AuxInt)
  7765  		x := v_1_0.Args[0]
  7766  		v.reset(OpLOONG64ADDVconst)
  7767  		v.AuxInt = int64ToAuxInt(c - d)
  7768  		v.AddArg(x)
  7769  		return true
  7770  	}
  7771  	return false
  7772  }
  7773  func rewriteValueLOONG64_OpLOONG64SUBVconst(v *Value) bool {
  7774  	v_0 := v.Args[0]
  7775  	// match: (SUBVconst [0] x)
  7776  	// result: x
  7777  	for {
  7778  		if auxIntToInt64(v.AuxInt) != 0 {
  7779  			break
  7780  		}
  7781  		x := v_0
  7782  		v.copyOf(x)
  7783  		return true
  7784  	}
  7785  	// match: (SUBVconst [c] (MOVVconst [d]))
  7786  	// result: (MOVVconst [d-c])
  7787  	for {
  7788  		c := auxIntToInt64(v.AuxInt)
  7789  		if v_0.Op != OpLOONG64MOVVconst {
  7790  			break
  7791  		}
  7792  		d := auxIntToInt64(v_0.AuxInt)
  7793  		v.reset(OpLOONG64MOVVconst)
  7794  		v.AuxInt = int64ToAuxInt(d - c)
  7795  		return true
  7796  	}
  7797  	// match: (SUBVconst [c] (SUBVconst [d] x))
  7798  	// cond: is32Bit(-c-d)
  7799  	// result: (ADDVconst [-c-d] x)
  7800  	for {
  7801  		c := auxIntToInt64(v.AuxInt)
  7802  		if v_0.Op != OpLOONG64SUBVconst {
  7803  			break
  7804  		}
  7805  		d := auxIntToInt64(v_0.AuxInt)
  7806  		x := v_0.Args[0]
  7807  		if !(is32Bit(-c - d)) {
  7808  			break
  7809  		}
  7810  		v.reset(OpLOONG64ADDVconst)
  7811  		v.AuxInt = int64ToAuxInt(-c - d)
  7812  		v.AddArg(x)
  7813  		return true
  7814  	}
  7815  	// match: (SUBVconst [c] (ADDVconst [d] x))
  7816  	// cond: is32Bit(-c+d)
  7817  	// result: (ADDVconst [-c+d] x)
  7818  	for {
  7819  		c := auxIntToInt64(v.AuxInt)
  7820  		if v_0.Op != OpLOONG64ADDVconst {
  7821  			break
  7822  		}
  7823  		d := auxIntToInt64(v_0.AuxInt)
  7824  		x := v_0.Args[0]
  7825  		if !(is32Bit(-c + d)) {
  7826  			break
  7827  		}
  7828  		v.reset(OpLOONG64ADDVconst)
  7829  		v.AuxInt = int64ToAuxInt(-c + d)
  7830  		v.AddArg(x)
  7831  		return true
  7832  	}
  7833  	return false
  7834  }
  7835  func rewriteValueLOONG64_OpLOONG64XOR(v *Value) bool {
  7836  	v_1 := v.Args[1]
  7837  	v_0 := v.Args[0]
  7838  	b := v.Block
  7839  	typ := &b.Func.Config.Types
  7840  	// match: (XOR <typ.UInt16> (SRLVconst [8] <typ.UInt16> x) (SLLVconst [8] <typ.UInt16> x))
  7841  	// result: (REVB2H x)
  7842  	for {
  7843  		if v.Type != typ.UInt16 {
  7844  			break
  7845  		}
  7846  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7847  			if v_0.Op != OpLOONG64SRLVconst || v_0.Type != typ.UInt16 || auxIntToInt64(v_0.AuxInt) != 8 {
  7848  				continue
  7849  			}
  7850  			x := v_0.Args[0]
  7851  			if v_1.Op != OpLOONG64SLLVconst || v_1.Type != typ.UInt16 || auxIntToInt64(v_1.AuxInt) != 8 || x != v_1.Args[0] {
  7852  				continue
  7853  			}
  7854  			v.reset(OpLOONG64REVB2H)
  7855  			v.AddArg(x)
  7856  			return true
  7857  		}
  7858  		break
  7859  	}
  7860  	// match: (XOR (SRLconst [8] (ANDconst [c1] x)) (SLLconst [8] (ANDconst [c2] x)))
  7861  	// cond: uint32(c1) == 0xff00ff00 && uint32(c2) == 0x00ff00ff
  7862  	// result: (REVB2H x)
  7863  	for {
  7864  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7865  			if v_0.Op != OpLOONG64SRLconst || auxIntToInt64(v_0.AuxInt) != 8 {
  7866  				continue
  7867  			}
  7868  			v_0_0 := v_0.Args[0]
  7869  			if v_0_0.Op != OpLOONG64ANDconst {
  7870  				continue
  7871  			}
  7872  			c1 := auxIntToInt64(v_0_0.AuxInt)
  7873  			x := v_0_0.Args[0]
  7874  			if v_1.Op != OpLOONG64SLLconst || auxIntToInt64(v_1.AuxInt) != 8 {
  7875  				continue
  7876  			}
  7877  			v_1_0 := v_1.Args[0]
  7878  			if v_1_0.Op != OpLOONG64ANDconst {
  7879  				continue
  7880  			}
  7881  			c2 := auxIntToInt64(v_1_0.AuxInt)
  7882  			if x != v_1_0.Args[0] || !(uint32(c1) == 0xff00ff00 && uint32(c2) == 0x00ff00ff) {
  7883  				continue
  7884  			}
  7885  			v.reset(OpLOONG64REVB2H)
  7886  			v.AddArg(x)
  7887  			return true
  7888  		}
  7889  		break
  7890  	}
  7891  	// match: (XOR (SRLVconst [8] (AND (MOVVconst [c1]) x)) (SLLVconst [8] (AND (MOVVconst [c2]) x)))
  7892  	// cond: uint64(c1) == 0xff00ff00ff00ff00 && uint64(c2) == 0x00ff00ff00ff00ff
  7893  	// result: (REVB4H x)
  7894  	for {
  7895  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7896  			if v_0.Op != OpLOONG64SRLVconst || auxIntToInt64(v_0.AuxInt) != 8 {
  7897  				continue
  7898  			}
  7899  			v_0_0 := v_0.Args[0]
  7900  			if v_0_0.Op != OpLOONG64AND {
  7901  				continue
  7902  			}
  7903  			_ = v_0_0.Args[1]
  7904  			v_0_0_0 := v_0_0.Args[0]
  7905  			v_0_0_1 := v_0_0.Args[1]
  7906  			for _i1 := 0; _i1 <= 1; _i1, v_0_0_0, v_0_0_1 = _i1+1, v_0_0_1, v_0_0_0 {
  7907  				if v_0_0_0.Op != OpLOONG64MOVVconst {
  7908  					continue
  7909  				}
  7910  				c1 := auxIntToInt64(v_0_0_0.AuxInt)
  7911  				x := v_0_0_1
  7912  				if v_1.Op != OpLOONG64SLLVconst || auxIntToInt64(v_1.AuxInt) != 8 {
  7913  					continue
  7914  				}
  7915  				v_1_0 := v_1.Args[0]
  7916  				if v_1_0.Op != OpLOONG64AND {
  7917  					continue
  7918  				}
  7919  				_ = v_1_0.Args[1]
  7920  				v_1_0_0 := v_1_0.Args[0]
  7921  				v_1_0_1 := v_1_0.Args[1]
  7922  				for _i2 := 0; _i2 <= 1; _i2, v_1_0_0, v_1_0_1 = _i2+1, v_1_0_1, v_1_0_0 {
  7923  					if v_1_0_0.Op != OpLOONG64MOVVconst {
  7924  						continue
  7925  					}
  7926  					c2 := auxIntToInt64(v_1_0_0.AuxInt)
  7927  					if x != v_1_0_1 || !(uint64(c1) == 0xff00ff00ff00ff00 && uint64(c2) == 0x00ff00ff00ff00ff) {
  7928  						continue
  7929  					}
  7930  					v.reset(OpLOONG64REVB4H)
  7931  					v.AddArg(x)
  7932  					return true
  7933  				}
  7934  			}
  7935  		}
  7936  		break
  7937  	}
  7938  	// match: (XOR (SRLVconst [8] (AND (MOVVconst [c1]) x)) (SLLVconst [8] (ANDconst [c2] x)))
  7939  	// cond: uint64(c1) == 0xff00ff00 && uint64(c2) == 0x00ff00ff
  7940  	// result: (REVB4H (ANDconst <x.Type> [0xffffffff] x))
  7941  	for {
  7942  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7943  			if v_0.Op != OpLOONG64SRLVconst || auxIntToInt64(v_0.AuxInt) != 8 {
  7944  				continue
  7945  			}
  7946  			v_0_0 := v_0.Args[0]
  7947  			if v_0_0.Op != OpLOONG64AND {
  7948  				continue
  7949  			}
  7950  			_ = v_0_0.Args[1]
  7951  			v_0_0_0 := v_0_0.Args[0]
  7952  			v_0_0_1 := v_0_0.Args[1]
  7953  			for _i1 := 0; _i1 <= 1; _i1, v_0_0_0, v_0_0_1 = _i1+1, v_0_0_1, v_0_0_0 {
  7954  				if v_0_0_0.Op != OpLOONG64MOVVconst {
  7955  					continue
  7956  				}
  7957  				c1 := auxIntToInt64(v_0_0_0.AuxInt)
  7958  				x := v_0_0_1
  7959  				if v_1.Op != OpLOONG64SLLVconst || auxIntToInt64(v_1.AuxInt) != 8 {
  7960  					continue
  7961  				}
  7962  				v_1_0 := v_1.Args[0]
  7963  				if v_1_0.Op != OpLOONG64ANDconst {
  7964  					continue
  7965  				}
  7966  				c2 := auxIntToInt64(v_1_0.AuxInt)
  7967  				if x != v_1_0.Args[0] || !(uint64(c1) == 0xff00ff00 && uint64(c2) == 0x00ff00ff) {
  7968  					continue
  7969  				}
  7970  				v.reset(OpLOONG64REVB4H)
  7971  				v0 := b.NewValue0(v.Pos, OpLOONG64ANDconst, x.Type)
  7972  				v0.AuxInt = int64ToAuxInt(0xffffffff)
  7973  				v0.AddArg(x)
  7974  				v.AddArg(v0)
  7975  				return true
  7976  			}
  7977  		}
  7978  		break
  7979  	}
  7980  	// match: (XOR x (MOVVconst [c]))
  7981  	// cond: is32Bit(c)
  7982  	// result: (XORconst [c] x)
  7983  	for {
  7984  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7985  			x := v_0
  7986  			if v_1.Op != OpLOONG64MOVVconst {
  7987  				continue
  7988  			}
  7989  			c := auxIntToInt64(v_1.AuxInt)
  7990  			if !(is32Bit(c)) {
  7991  				continue
  7992  			}
  7993  			v.reset(OpLOONG64XORconst)
  7994  			v.AuxInt = int64ToAuxInt(c)
  7995  			v.AddArg(x)
  7996  			return true
  7997  		}
  7998  		break
  7999  	}
  8000  	// match: (XOR x x)
  8001  	// result: (MOVVconst [0])
  8002  	for {
  8003  		x := v_0
  8004  		if x != v_1 {
  8005  			break
  8006  		}
  8007  		v.reset(OpLOONG64MOVVconst)
  8008  		v.AuxInt = int64ToAuxInt(0)
  8009  		return true
  8010  	}
  8011  	return false
  8012  }
  8013  func rewriteValueLOONG64_OpLOONG64XORconst(v *Value) bool {
  8014  	v_0 := v.Args[0]
  8015  	// match: (XORconst [0] x)
  8016  	// result: x
  8017  	for {
  8018  		if auxIntToInt64(v.AuxInt) != 0 {
  8019  			break
  8020  		}
  8021  		x := v_0
  8022  		v.copyOf(x)
  8023  		return true
  8024  	}
  8025  	// match: (XORconst [-1] x)
  8026  	// result: (NORconst [0] x)
  8027  	for {
  8028  		if auxIntToInt64(v.AuxInt) != -1 {
  8029  			break
  8030  		}
  8031  		x := v_0
  8032  		v.reset(OpLOONG64NORconst)
  8033  		v.AuxInt = int64ToAuxInt(0)
  8034  		v.AddArg(x)
  8035  		return true
  8036  	}
  8037  	// match: (XORconst [c] (MOVVconst [d]))
  8038  	// result: (MOVVconst [c^d])
  8039  	for {
  8040  		c := auxIntToInt64(v.AuxInt)
  8041  		if v_0.Op != OpLOONG64MOVVconst {
  8042  			break
  8043  		}
  8044  		d := auxIntToInt64(v_0.AuxInt)
  8045  		v.reset(OpLOONG64MOVVconst)
  8046  		v.AuxInt = int64ToAuxInt(c ^ d)
  8047  		return true
  8048  	}
  8049  	// match: (XORconst [c] (XORconst [d] x))
  8050  	// cond: is32Bit(c^d)
  8051  	// result: (XORconst [c^d] x)
  8052  	for {
  8053  		c := auxIntToInt64(v.AuxInt)
  8054  		if v_0.Op != OpLOONG64XORconst {
  8055  			break
  8056  		}
  8057  		d := auxIntToInt64(v_0.AuxInt)
  8058  		x := v_0.Args[0]
  8059  		if !(is32Bit(c ^ d)) {
  8060  			break
  8061  		}
  8062  		v.reset(OpLOONG64XORconst)
  8063  		v.AuxInt = int64ToAuxInt(c ^ d)
  8064  		v.AddArg(x)
  8065  		return true
  8066  	}
  8067  	return false
  8068  }
  8069  func rewriteValueLOONG64_OpLeq16(v *Value) bool {
  8070  	v_1 := v.Args[1]
  8071  	v_0 := v.Args[0]
  8072  	b := v.Block
  8073  	typ := &b.Func.Config.Types
  8074  	// match: (Leq16 x y)
  8075  	// result: (XOR (MOVVconst [1]) (SGT (SignExt16to64 x) (SignExt16to64 y)))
  8076  	for {
  8077  		x := v_0
  8078  		y := v_1
  8079  		v.reset(OpLOONG64XOR)
  8080  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  8081  		v0.AuxInt = int64ToAuxInt(1)
  8082  		v1 := b.NewValue0(v.Pos, OpLOONG64SGT, typ.Bool)
  8083  		v2 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  8084  		v2.AddArg(x)
  8085  		v3 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  8086  		v3.AddArg(y)
  8087  		v1.AddArg2(v2, v3)
  8088  		v.AddArg2(v0, v1)
  8089  		return true
  8090  	}
  8091  }
  8092  func rewriteValueLOONG64_OpLeq16U(v *Value) bool {
  8093  	v_1 := v.Args[1]
  8094  	v_0 := v.Args[0]
  8095  	b := v.Block
  8096  	typ := &b.Func.Config.Types
  8097  	// match: (Leq16U x y)
  8098  	// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt16to64 x) (ZeroExt16to64 y)))
  8099  	for {
  8100  		x := v_0
  8101  		y := v_1
  8102  		v.reset(OpLOONG64XOR)
  8103  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  8104  		v0.AuxInt = int64ToAuxInt(1)
  8105  		v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  8106  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8107  		v2.AddArg(x)
  8108  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8109  		v3.AddArg(y)
  8110  		v1.AddArg2(v2, v3)
  8111  		v.AddArg2(v0, v1)
  8112  		return true
  8113  	}
  8114  }
  8115  func rewriteValueLOONG64_OpLeq32(v *Value) bool {
  8116  	v_1 := v.Args[1]
  8117  	v_0 := v.Args[0]
  8118  	b := v.Block
  8119  	typ := &b.Func.Config.Types
  8120  	// match: (Leq32 x y)
  8121  	// result: (XOR (MOVVconst [1]) (SGT (SignExt32to64 x) (SignExt32to64 y)))
  8122  	for {
  8123  		x := v_0
  8124  		y := v_1
  8125  		v.reset(OpLOONG64XOR)
  8126  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  8127  		v0.AuxInt = int64ToAuxInt(1)
  8128  		v1 := b.NewValue0(v.Pos, OpLOONG64SGT, typ.Bool)
  8129  		v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  8130  		v2.AddArg(x)
  8131  		v3 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  8132  		v3.AddArg(y)
  8133  		v1.AddArg2(v2, v3)
  8134  		v.AddArg2(v0, v1)
  8135  		return true
  8136  	}
  8137  }
  8138  func rewriteValueLOONG64_OpLeq32F(v *Value) bool {
  8139  	v_1 := v.Args[1]
  8140  	v_0 := v.Args[0]
  8141  	b := v.Block
  8142  	// match: (Leq32F x y)
  8143  	// result: (FPFlagTrue (CMPGEF y x))
  8144  	for {
  8145  		x := v_0
  8146  		y := v_1
  8147  		v.reset(OpLOONG64FPFlagTrue)
  8148  		v0 := b.NewValue0(v.Pos, OpLOONG64CMPGEF, types.TypeFlags)
  8149  		v0.AddArg2(y, x)
  8150  		v.AddArg(v0)
  8151  		return true
  8152  	}
  8153  }
  8154  func rewriteValueLOONG64_OpLeq32U(v *Value) bool {
  8155  	v_1 := v.Args[1]
  8156  	v_0 := v.Args[0]
  8157  	b := v.Block
  8158  	typ := &b.Func.Config.Types
  8159  	// match: (Leq32U x y)
  8160  	// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt32to64 x) (ZeroExt32to64 y)))
  8161  	for {
  8162  		x := v_0
  8163  		y := v_1
  8164  		v.reset(OpLOONG64XOR)
  8165  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  8166  		v0.AuxInt = int64ToAuxInt(1)
  8167  		v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  8168  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8169  		v2.AddArg(x)
  8170  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8171  		v3.AddArg(y)
  8172  		v1.AddArg2(v2, v3)
  8173  		v.AddArg2(v0, v1)
  8174  		return true
  8175  	}
  8176  }
  8177  func rewriteValueLOONG64_OpLeq64(v *Value) bool {
  8178  	v_1 := v.Args[1]
  8179  	v_0 := v.Args[0]
  8180  	b := v.Block
  8181  	typ := &b.Func.Config.Types
  8182  	// match: (Leq64 x y)
  8183  	// result: (XOR (MOVVconst [1]) (SGT x y))
  8184  	for {
  8185  		x := v_0
  8186  		y := v_1
  8187  		v.reset(OpLOONG64XOR)
  8188  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  8189  		v0.AuxInt = int64ToAuxInt(1)
  8190  		v1 := b.NewValue0(v.Pos, OpLOONG64SGT, typ.Bool)
  8191  		v1.AddArg2(x, y)
  8192  		v.AddArg2(v0, v1)
  8193  		return true
  8194  	}
  8195  }
  8196  func rewriteValueLOONG64_OpLeq64F(v *Value) bool {
  8197  	v_1 := v.Args[1]
  8198  	v_0 := v.Args[0]
  8199  	b := v.Block
  8200  	// match: (Leq64F x y)
  8201  	// result: (FPFlagTrue (CMPGED y x))
  8202  	for {
  8203  		x := v_0
  8204  		y := v_1
  8205  		v.reset(OpLOONG64FPFlagTrue)
  8206  		v0 := b.NewValue0(v.Pos, OpLOONG64CMPGED, types.TypeFlags)
  8207  		v0.AddArg2(y, x)
  8208  		v.AddArg(v0)
  8209  		return true
  8210  	}
  8211  }
  8212  func rewriteValueLOONG64_OpLeq64U(v *Value) bool {
  8213  	v_1 := v.Args[1]
  8214  	v_0 := v.Args[0]
  8215  	b := v.Block
  8216  	typ := &b.Func.Config.Types
  8217  	// match: (Leq64U x y)
  8218  	// result: (XOR (MOVVconst [1]) (SGTU x y))
  8219  	for {
  8220  		x := v_0
  8221  		y := v_1
  8222  		v.reset(OpLOONG64XOR)
  8223  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  8224  		v0.AuxInt = int64ToAuxInt(1)
  8225  		v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  8226  		v1.AddArg2(x, y)
  8227  		v.AddArg2(v0, v1)
  8228  		return true
  8229  	}
  8230  }
  8231  func rewriteValueLOONG64_OpLeq8(v *Value) bool {
  8232  	v_1 := v.Args[1]
  8233  	v_0 := v.Args[0]
  8234  	b := v.Block
  8235  	typ := &b.Func.Config.Types
  8236  	// match: (Leq8 x y)
  8237  	// result: (XOR (MOVVconst [1]) (SGT (SignExt8to64 x) (SignExt8to64 y)))
  8238  	for {
  8239  		x := v_0
  8240  		y := v_1
  8241  		v.reset(OpLOONG64XOR)
  8242  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  8243  		v0.AuxInt = int64ToAuxInt(1)
  8244  		v1 := b.NewValue0(v.Pos, OpLOONG64SGT, typ.Bool)
  8245  		v2 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  8246  		v2.AddArg(x)
  8247  		v3 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  8248  		v3.AddArg(y)
  8249  		v1.AddArg2(v2, v3)
  8250  		v.AddArg2(v0, v1)
  8251  		return true
  8252  	}
  8253  }
  8254  func rewriteValueLOONG64_OpLeq8U(v *Value) bool {
  8255  	v_1 := v.Args[1]
  8256  	v_0 := v.Args[0]
  8257  	b := v.Block
  8258  	typ := &b.Func.Config.Types
  8259  	// match: (Leq8U x y)
  8260  	// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt8to64 x) (ZeroExt8to64 y)))
  8261  	for {
  8262  		x := v_0
  8263  		y := v_1
  8264  		v.reset(OpLOONG64XOR)
  8265  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  8266  		v0.AuxInt = int64ToAuxInt(1)
  8267  		v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  8268  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8269  		v2.AddArg(x)
  8270  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8271  		v3.AddArg(y)
  8272  		v1.AddArg2(v2, v3)
  8273  		v.AddArg2(v0, v1)
  8274  		return true
  8275  	}
  8276  }
  8277  func rewriteValueLOONG64_OpLess16(v *Value) bool {
  8278  	v_1 := v.Args[1]
  8279  	v_0 := v.Args[0]
  8280  	b := v.Block
  8281  	typ := &b.Func.Config.Types
  8282  	// match: (Less16 x y)
  8283  	// result: (SGT (SignExt16to64 y) (SignExt16to64 x))
  8284  	for {
  8285  		x := v_0
  8286  		y := v_1
  8287  		v.reset(OpLOONG64SGT)
  8288  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  8289  		v0.AddArg(y)
  8290  		v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  8291  		v1.AddArg(x)
  8292  		v.AddArg2(v0, v1)
  8293  		return true
  8294  	}
  8295  }
  8296  func rewriteValueLOONG64_OpLess16U(v *Value) bool {
  8297  	v_1 := v.Args[1]
  8298  	v_0 := v.Args[0]
  8299  	b := v.Block
  8300  	typ := &b.Func.Config.Types
  8301  	// match: (Less16U x y)
  8302  	// result: (SGTU (ZeroExt16to64 y) (ZeroExt16to64 x))
  8303  	for {
  8304  		x := v_0
  8305  		y := v_1
  8306  		v.reset(OpLOONG64SGTU)
  8307  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8308  		v0.AddArg(y)
  8309  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8310  		v1.AddArg(x)
  8311  		v.AddArg2(v0, v1)
  8312  		return true
  8313  	}
  8314  }
  8315  func rewriteValueLOONG64_OpLess32(v *Value) bool {
  8316  	v_1 := v.Args[1]
  8317  	v_0 := v.Args[0]
  8318  	b := v.Block
  8319  	typ := &b.Func.Config.Types
  8320  	// match: (Less32 x y)
  8321  	// result: (SGT (SignExt32to64 y) (SignExt32to64 x))
  8322  	for {
  8323  		x := v_0
  8324  		y := v_1
  8325  		v.reset(OpLOONG64SGT)
  8326  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  8327  		v0.AddArg(y)
  8328  		v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  8329  		v1.AddArg(x)
  8330  		v.AddArg2(v0, v1)
  8331  		return true
  8332  	}
  8333  }
  8334  func rewriteValueLOONG64_OpLess32F(v *Value) bool {
  8335  	v_1 := v.Args[1]
  8336  	v_0 := v.Args[0]
  8337  	b := v.Block
  8338  	// match: (Less32F x y)
  8339  	// result: (FPFlagTrue (CMPGTF y x))
  8340  	for {
  8341  		x := v_0
  8342  		y := v_1
  8343  		v.reset(OpLOONG64FPFlagTrue)
  8344  		v0 := b.NewValue0(v.Pos, OpLOONG64CMPGTF, types.TypeFlags)
  8345  		v0.AddArg2(y, x)
  8346  		v.AddArg(v0)
  8347  		return true
  8348  	}
  8349  }
  8350  func rewriteValueLOONG64_OpLess32U(v *Value) bool {
  8351  	v_1 := v.Args[1]
  8352  	v_0 := v.Args[0]
  8353  	b := v.Block
  8354  	typ := &b.Func.Config.Types
  8355  	// match: (Less32U x y)
  8356  	// result: (SGTU (ZeroExt32to64 y) (ZeroExt32to64 x))
  8357  	for {
  8358  		x := v_0
  8359  		y := v_1
  8360  		v.reset(OpLOONG64SGTU)
  8361  		v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8362  		v0.AddArg(y)
  8363  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8364  		v1.AddArg(x)
  8365  		v.AddArg2(v0, v1)
  8366  		return true
  8367  	}
  8368  }
  8369  func rewriteValueLOONG64_OpLess64(v *Value) bool {
  8370  	v_1 := v.Args[1]
  8371  	v_0 := v.Args[0]
  8372  	// match: (Less64 x y)
  8373  	// result: (SGT y x)
  8374  	for {
  8375  		x := v_0
  8376  		y := v_1
  8377  		v.reset(OpLOONG64SGT)
  8378  		v.AddArg2(y, x)
  8379  		return true
  8380  	}
  8381  }
  8382  func rewriteValueLOONG64_OpLess64F(v *Value) bool {
  8383  	v_1 := v.Args[1]
  8384  	v_0 := v.Args[0]
  8385  	b := v.Block
  8386  	// match: (Less64F x y)
  8387  	// result: (FPFlagTrue (CMPGTD y x))
  8388  	for {
  8389  		x := v_0
  8390  		y := v_1
  8391  		v.reset(OpLOONG64FPFlagTrue)
  8392  		v0 := b.NewValue0(v.Pos, OpLOONG64CMPGTD, types.TypeFlags)
  8393  		v0.AddArg2(y, x)
  8394  		v.AddArg(v0)
  8395  		return true
  8396  	}
  8397  }
  8398  func rewriteValueLOONG64_OpLess64U(v *Value) bool {
  8399  	v_1 := v.Args[1]
  8400  	v_0 := v.Args[0]
  8401  	// match: (Less64U x y)
  8402  	// result: (SGTU y x)
  8403  	for {
  8404  		x := v_0
  8405  		y := v_1
  8406  		v.reset(OpLOONG64SGTU)
  8407  		v.AddArg2(y, x)
  8408  		return true
  8409  	}
  8410  }
  8411  func rewriteValueLOONG64_OpLess8(v *Value) bool {
  8412  	v_1 := v.Args[1]
  8413  	v_0 := v.Args[0]
  8414  	b := v.Block
  8415  	typ := &b.Func.Config.Types
  8416  	// match: (Less8 x y)
  8417  	// result: (SGT (SignExt8to64 y) (SignExt8to64 x))
  8418  	for {
  8419  		x := v_0
  8420  		y := v_1
  8421  		v.reset(OpLOONG64SGT)
  8422  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  8423  		v0.AddArg(y)
  8424  		v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  8425  		v1.AddArg(x)
  8426  		v.AddArg2(v0, v1)
  8427  		return true
  8428  	}
  8429  }
  8430  func rewriteValueLOONG64_OpLess8U(v *Value) bool {
  8431  	v_1 := v.Args[1]
  8432  	v_0 := v.Args[0]
  8433  	b := v.Block
  8434  	typ := &b.Func.Config.Types
  8435  	// match: (Less8U x y)
  8436  	// result: (SGTU (ZeroExt8to64 y) (ZeroExt8to64 x))
  8437  	for {
  8438  		x := v_0
  8439  		y := v_1
  8440  		v.reset(OpLOONG64SGTU)
  8441  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8442  		v0.AddArg(y)
  8443  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8444  		v1.AddArg(x)
  8445  		v.AddArg2(v0, v1)
  8446  		return true
  8447  	}
  8448  }
  8449  func rewriteValueLOONG64_OpLoad(v *Value) bool {
  8450  	v_1 := v.Args[1]
  8451  	v_0 := v.Args[0]
  8452  	// match: (Load <t> ptr mem)
  8453  	// cond: t.IsBoolean()
  8454  	// result: (MOVBUload ptr mem)
  8455  	for {
  8456  		t := v.Type
  8457  		ptr := v_0
  8458  		mem := v_1
  8459  		if !(t.IsBoolean()) {
  8460  			break
  8461  		}
  8462  		v.reset(OpLOONG64MOVBUload)
  8463  		v.AddArg2(ptr, mem)
  8464  		return true
  8465  	}
  8466  	// match: (Load <t> ptr mem)
  8467  	// cond: (is8BitInt(t) && t.IsSigned())
  8468  	// result: (MOVBload ptr mem)
  8469  	for {
  8470  		t := v.Type
  8471  		ptr := v_0
  8472  		mem := v_1
  8473  		if !(is8BitInt(t) && t.IsSigned()) {
  8474  			break
  8475  		}
  8476  		v.reset(OpLOONG64MOVBload)
  8477  		v.AddArg2(ptr, mem)
  8478  		return true
  8479  	}
  8480  	// match: (Load <t> ptr mem)
  8481  	// cond: (is8BitInt(t) && !t.IsSigned())
  8482  	// result: (MOVBUload ptr mem)
  8483  	for {
  8484  		t := v.Type
  8485  		ptr := v_0
  8486  		mem := v_1
  8487  		if !(is8BitInt(t) && !t.IsSigned()) {
  8488  			break
  8489  		}
  8490  		v.reset(OpLOONG64MOVBUload)
  8491  		v.AddArg2(ptr, mem)
  8492  		return true
  8493  	}
  8494  	// match: (Load <t> ptr mem)
  8495  	// cond: (is16BitInt(t) && t.IsSigned())
  8496  	// result: (MOVHload ptr mem)
  8497  	for {
  8498  		t := v.Type
  8499  		ptr := v_0
  8500  		mem := v_1
  8501  		if !(is16BitInt(t) && t.IsSigned()) {
  8502  			break
  8503  		}
  8504  		v.reset(OpLOONG64MOVHload)
  8505  		v.AddArg2(ptr, mem)
  8506  		return true
  8507  	}
  8508  	// match: (Load <t> ptr mem)
  8509  	// cond: (is16BitInt(t) && !t.IsSigned())
  8510  	// result: (MOVHUload ptr mem)
  8511  	for {
  8512  		t := v.Type
  8513  		ptr := v_0
  8514  		mem := v_1
  8515  		if !(is16BitInt(t) && !t.IsSigned()) {
  8516  			break
  8517  		}
  8518  		v.reset(OpLOONG64MOVHUload)
  8519  		v.AddArg2(ptr, mem)
  8520  		return true
  8521  	}
  8522  	// match: (Load <t> ptr mem)
  8523  	// cond: (is32BitInt(t) && t.IsSigned())
  8524  	// result: (MOVWload ptr mem)
  8525  	for {
  8526  		t := v.Type
  8527  		ptr := v_0
  8528  		mem := v_1
  8529  		if !(is32BitInt(t) && t.IsSigned()) {
  8530  			break
  8531  		}
  8532  		v.reset(OpLOONG64MOVWload)
  8533  		v.AddArg2(ptr, mem)
  8534  		return true
  8535  	}
  8536  	// match: (Load <t> ptr mem)
  8537  	// cond: (is32BitInt(t) && !t.IsSigned())
  8538  	// result: (MOVWUload ptr mem)
  8539  	for {
  8540  		t := v.Type
  8541  		ptr := v_0
  8542  		mem := v_1
  8543  		if !(is32BitInt(t) && !t.IsSigned()) {
  8544  			break
  8545  		}
  8546  		v.reset(OpLOONG64MOVWUload)
  8547  		v.AddArg2(ptr, mem)
  8548  		return true
  8549  	}
  8550  	// match: (Load <t> ptr mem)
  8551  	// cond: (is64BitInt(t) || isPtr(t))
  8552  	// result: (MOVVload ptr mem)
  8553  	for {
  8554  		t := v.Type
  8555  		ptr := v_0
  8556  		mem := v_1
  8557  		if !(is64BitInt(t) || isPtr(t)) {
  8558  			break
  8559  		}
  8560  		v.reset(OpLOONG64MOVVload)
  8561  		v.AddArg2(ptr, mem)
  8562  		return true
  8563  	}
  8564  	// match: (Load <t> ptr mem)
  8565  	// cond: is32BitFloat(t)
  8566  	// result: (MOVFload ptr mem)
  8567  	for {
  8568  		t := v.Type
  8569  		ptr := v_0
  8570  		mem := v_1
  8571  		if !(is32BitFloat(t)) {
  8572  			break
  8573  		}
  8574  		v.reset(OpLOONG64MOVFload)
  8575  		v.AddArg2(ptr, mem)
  8576  		return true
  8577  	}
  8578  	// match: (Load <t> ptr mem)
  8579  	// cond: is64BitFloat(t)
  8580  	// result: (MOVDload ptr mem)
  8581  	for {
  8582  		t := v.Type
  8583  		ptr := v_0
  8584  		mem := v_1
  8585  		if !(is64BitFloat(t)) {
  8586  			break
  8587  		}
  8588  		v.reset(OpLOONG64MOVDload)
  8589  		v.AddArg2(ptr, mem)
  8590  		return true
  8591  	}
  8592  	return false
  8593  }
  8594  func rewriteValueLOONG64_OpLocalAddr(v *Value) bool {
  8595  	v_1 := v.Args[1]
  8596  	v_0 := v.Args[0]
  8597  	b := v.Block
  8598  	typ := &b.Func.Config.Types
  8599  	// match: (LocalAddr <t> {sym} base mem)
  8600  	// cond: t.Elem().HasPointers()
  8601  	// result: (MOVVaddr {sym} (SPanchored base mem))
  8602  	for {
  8603  		t := v.Type
  8604  		sym := auxToSym(v.Aux)
  8605  		base := v_0
  8606  		mem := v_1
  8607  		if !(t.Elem().HasPointers()) {
  8608  			break
  8609  		}
  8610  		v.reset(OpLOONG64MOVVaddr)
  8611  		v.Aux = symToAux(sym)
  8612  		v0 := b.NewValue0(v.Pos, OpSPanchored, typ.Uintptr)
  8613  		v0.AddArg2(base, mem)
  8614  		v.AddArg(v0)
  8615  		return true
  8616  	}
  8617  	// match: (LocalAddr <t> {sym} base _)
  8618  	// cond: !t.Elem().HasPointers()
  8619  	// result: (MOVVaddr {sym} base)
  8620  	for {
  8621  		t := v.Type
  8622  		sym := auxToSym(v.Aux)
  8623  		base := v_0
  8624  		if !(!t.Elem().HasPointers()) {
  8625  			break
  8626  		}
  8627  		v.reset(OpLOONG64MOVVaddr)
  8628  		v.Aux = symToAux(sym)
  8629  		v.AddArg(base)
  8630  		return true
  8631  	}
  8632  	return false
  8633  }
  8634  func rewriteValueLOONG64_OpLsh16x16(v *Value) bool {
  8635  	v_1 := v.Args[1]
  8636  	v_0 := v.Args[0]
  8637  	b := v.Block
  8638  	typ := &b.Func.Config.Types
  8639  	// match: (Lsh16x16 x y)
  8640  	// cond: shiftIsBounded(v)
  8641  	// result: (SLLV x y)
  8642  	for {
  8643  		x := v_0
  8644  		y := v_1
  8645  		if !(shiftIsBounded(v)) {
  8646  			break
  8647  		}
  8648  		v.reset(OpLOONG64SLLV)
  8649  		v.AddArg2(x, y)
  8650  		return true
  8651  	}
  8652  	// match: (Lsh16x16 <t> x y)
  8653  	// cond: !shiftIsBounded(v)
  8654  	// result: (MASKEQZ (SLLV <t> x (ZeroExt16to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y)))
  8655  	for {
  8656  		t := v.Type
  8657  		x := v_0
  8658  		y := v_1
  8659  		if !(!shiftIsBounded(v)) {
  8660  			break
  8661  		}
  8662  		v.reset(OpLOONG64MASKEQZ)
  8663  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  8664  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8665  		v1.AddArg(y)
  8666  		v0.AddArg2(x, v1)
  8667  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  8668  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  8669  		v3.AuxInt = int64ToAuxInt(64)
  8670  		v2.AddArg2(v3, v1)
  8671  		v.AddArg2(v0, v2)
  8672  		return true
  8673  	}
  8674  	return false
  8675  }
  8676  func rewriteValueLOONG64_OpLsh16x32(v *Value) bool {
  8677  	v_1 := v.Args[1]
  8678  	v_0 := v.Args[0]
  8679  	b := v.Block
  8680  	typ := &b.Func.Config.Types
  8681  	// match: (Lsh16x32 x y)
  8682  	// cond: shiftIsBounded(v)
  8683  	// result: (SLLV x y)
  8684  	for {
  8685  		x := v_0
  8686  		y := v_1
  8687  		if !(shiftIsBounded(v)) {
  8688  			break
  8689  		}
  8690  		v.reset(OpLOONG64SLLV)
  8691  		v.AddArg2(x, y)
  8692  		return true
  8693  	}
  8694  	// match: (Lsh16x32 <t> x y)
  8695  	// cond: !shiftIsBounded(v)
  8696  	// result: (MASKEQZ (SLLV <t> x (ZeroExt32to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y)))
  8697  	for {
  8698  		t := v.Type
  8699  		x := v_0
  8700  		y := v_1
  8701  		if !(!shiftIsBounded(v)) {
  8702  			break
  8703  		}
  8704  		v.reset(OpLOONG64MASKEQZ)
  8705  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  8706  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8707  		v1.AddArg(y)
  8708  		v0.AddArg2(x, v1)
  8709  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  8710  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  8711  		v3.AuxInt = int64ToAuxInt(64)
  8712  		v2.AddArg2(v3, v1)
  8713  		v.AddArg2(v0, v2)
  8714  		return true
  8715  	}
  8716  	return false
  8717  }
  8718  func rewriteValueLOONG64_OpLsh16x64(v *Value) bool {
  8719  	v_1 := v.Args[1]
  8720  	v_0 := v.Args[0]
  8721  	b := v.Block
  8722  	typ := &b.Func.Config.Types
  8723  	// match: (Lsh16x64 x y)
  8724  	// cond: shiftIsBounded(v)
  8725  	// result: (SLLV x y)
  8726  	for {
  8727  		x := v_0
  8728  		y := v_1
  8729  		if !(shiftIsBounded(v)) {
  8730  			break
  8731  		}
  8732  		v.reset(OpLOONG64SLLV)
  8733  		v.AddArg2(x, y)
  8734  		return true
  8735  	}
  8736  	// match: (Lsh16x64 <t> x y)
  8737  	// cond: !shiftIsBounded(v)
  8738  	// result: (MASKEQZ (SLLV <t> x y) (SGTU (MOVVconst <typ.UInt64> [64]) y))
  8739  	for {
  8740  		t := v.Type
  8741  		x := v_0
  8742  		y := v_1
  8743  		if !(!shiftIsBounded(v)) {
  8744  			break
  8745  		}
  8746  		v.reset(OpLOONG64MASKEQZ)
  8747  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  8748  		v0.AddArg2(x, y)
  8749  		v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  8750  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  8751  		v2.AuxInt = int64ToAuxInt(64)
  8752  		v1.AddArg2(v2, y)
  8753  		v.AddArg2(v0, v1)
  8754  		return true
  8755  	}
  8756  	return false
  8757  }
  8758  func rewriteValueLOONG64_OpLsh16x8(v *Value) bool {
  8759  	v_1 := v.Args[1]
  8760  	v_0 := v.Args[0]
  8761  	b := v.Block
  8762  	typ := &b.Func.Config.Types
  8763  	// match: (Lsh16x8 x y)
  8764  	// cond: shiftIsBounded(v)
  8765  	// result: (SLLV x y)
  8766  	for {
  8767  		x := v_0
  8768  		y := v_1
  8769  		if !(shiftIsBounded(v)) {
  8770  			break
  8771  		}
  8772  		v.reset(OpLOONG64SLLV)
  8773  		v.AddArg2(x, y)
  8774  		return true
  8775  	}
  8776  	// match: (Lsh16x8 <t> x y)
  8777  	// cond: !shiftIsBounded(v)
  8778  	// result: (MASKEQZ (SLLV <t> x (ZeroExt8to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y)))
  8779  	for {
  8780  		t := v.Type
  8781  		x := v_0
  8782  		y := v_1
  8783  		if !(!shiftIsBounded(v)) {
  8784  			break
  8785  		}
  8786  		v.reset(OpLOONG64MASKEQZ)
  8787  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  8788  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8789  		v1.AddArg(y)
  8790  		v0.AddArg2(x, v1)
  8791  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  8792  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  8793  		v3.AuxInt = int64ToAuxInt(64)
  8794  		v2.AddArg2(v3, v1)
  8795  		v.AddArg2(v0, v2)
  8796  		return true
  8797  	}
  8798  	return false
  8799  }
  8800  func rewriteValueLOONG64_OpLsh32x16(v *Value) bool {
  8801  	v_1 := v.Args[1]
  8802  	v_0 := v.Args[0]
  8803  	b := v.Block
  8804  	typ := &b.Func.Config.Types
  8805  	// match: (Lsh32x16 x y)
  8806  	// cond: shiftIsBounded(v)
  8807  	// result: (SLL x y)
  8808  	for {
  8809  		x := v_0
  8810  		y := v_1
  8811  		if !(shiftIsBounded(v)) {
  8812  			break
  8813  		}
  8814  		v.reset(OpLOONG64SLL)
  8815  		v.AddArg2(x, y)
  8816  		return true
  8817  	}
  8818  	// match: (Lsh32x16 <t> x y)
  8819  	// cond: !shiftIsBounded(v)
  8820  	// result: (MASKEQZ (SLL <t> x (ZeroExt16to64 y)) (SGTU (MOVVconst <typ.UInt64> [32]) (ZeroExt16to64 y)))
  8821  	for {
  8822  		t := v.Type
  8823  		x := v_0
  8824  		y := v_1
  8825  		if !(!shiftIsBounded(v)) {
  8826  			break
  8827  		}
  8828  		v.reset(OpLOONG64MASKEQZ)
  8829  		v0 := b.NewValue0(v.Pos, OpLOONG64SLL, t)
  8830  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8831  		v1.AddArg(y)
  8832  		v0.AddArg2(x, v1)
  8833  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  8834  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  8835  		v3.AuxInt = int64ToAuxInt(32)
  8836  		v2.AddArg2(v3, v1)
  8837  		v.AddArg2(v0, v2)
  8838  		return true
  8839  	}
  8840  	return false
  8841  }
  8842  func rewriteValueLOONG64_OpLsh32x32(v *Value) bool {
  8843  	v_1 := v.Args[1]
  8844  	v_0 := v.Args[0]
  8845  	b := v.Block
  8846  	typ := &b.Func.Config.Types
  8847  	// match: (Lsh32x32 x y)
  8848  	// cond: shiftIsBounded(v)
  8849  	// result: (SLL x y)
  8850  	for {
  8851  		x := v_0
  8852  		y := v_1
  8853  		if !(shiftIsBounded(v)) {
  8854  			break
  8855  		}
  8856  		v.reset(OpLOONG64SLL)
  8857  		v.AddArg2(x, y)
  8858  		return true
  8859  	}
  8860  	// match: (Lsh32x32 <t> x y)
  8861  	// cond: !shiftIsBounded(v)
  8862  	// result: (MASKEQZ (SLL <t> x (ZeroExt32to64 y)) (SGTU (MOVVconst <typ.UInt64> [32]) (ZeroExt32to64 y)))
  8863  	for {
  8864  		t := v.Type
  8865  		x := v_0
  8866  		y := v_1
  8867  		if !(!shiftIsBounded(v)) {
  8868  			break
  8869  		}
  8870  		v.reset(OpLOONG64MASKEQZ)
  8871  		v0 := b.NewValue0(v.Pos, OpLOONG64SLL, t)
  8872  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8873  		v1.AddArg(y)
  8874  		v0.AddArg2(x, v1)
  8875  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  8876  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  8877  		v3.AuxInt = int64ToAuxInt(32)
  8878  		v2.AddArg2(v3, v1)
  8879  		v.AddArg2(v0, v2)
  8880  		return true
  8881  	}
  8882  	return false
  8883  }
  8884  func rewriteValueLOONG64_OpLsh32x64(v *Value) bool {
  8885  	v_1 := v.Args[1]
  8886  	v_0 := v.Args[0]
  8887  	b := v.Block
  8888  	typ := &b.Func.Config.Types
  8889  	// match: (Lsh32x64 x y)
  8890  	// cond: shiftIsBounded(v)
  8891  	// result: (SLL x y)
  8892  	for {
  8893  		x := v_0
  8894  		y := v_1
  8895  		if !(shiftIsBounded(v)) {
  8896  			break
  8897  		}
  8898  		v.reset(OpLOONG64SLL)
  8899  		v.AddArg2(x, y)
  8900  		return true
  8901  	}
  8902  	// match: (Lsh32x64 <t> x y)
  8903  	// cond: !shiftIsBounded(v)
  8904  	// result: (MASKEQZ (SLL <t> x y) (SGTU (MOVVconst <typ.UInt64> [32]) y))
  8905  	for {
  8906  		t := v.Type
  8907  		x := v_0
  8908  		y := v_1
  8909  		if !(!shiftIsBounded(v)) {
  8910  			break
  8911  		}
  8912  		v.reset(OpLOONG64MASKEQZ)
  8913  		v0 := b.NewValue0(v.Pos, OpLOONG64SLL, t)
  8914  		v0.AddArg2(x, y)
  8915  		v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  8916  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  8917  		v2.AuxInt = int64ToAuxInt(32)
  8918  		v1.AddArg2(v2, y)
  8919  		v.AddArg2(v0, v1)
  8920  		return true
  8921  	}
  8922  	return false
  8923  }
  8924  func rewriteValueLOONG64_OpLsh32x8(v *Value) bool {
  8925  	v_1 := v.Args[1]
  8926  	v_0 := v.Args[0]
  8927  	b := v.Block
  8928  	typ := &b.Func.Config.Types
  8929  	// match: (Lsh32x8 x y)
  8930  	// cond: shiftIsBounded(v)
  8931  	// result: (SLL x y)
  8932  	for {
  8933  		x := v_0
  8934  		y := v_1
  8935  		if !(shiftIsBounded(v)) {
  8936  			break
  8937  		}
  8938  		v.reset(OpLOONG64SLL)
  8939  		v.AddArg2(x, y)
  8940  		return true
  8941  	}
  8942  	// match: (Lsh32x8 <t> x y)
  8943  	// cond: !shiftIsBounded(v)
  8944  	// result: (MASKEQZ (SLL <t> x (ZeroExt8to64 y)) (SGTU (MOVVconst <typ.UInt64> [32]) (ZeroExt8to64 y)))
  8945  	for {
  8946  		t := v.Type
  8947  		x := v_0
  8948  		y := v_1
  8949  		if !(!shiftIsBounded(v)) {
  8950  			break
  8951  		}
  8952  		v.reset(OpLOONG64MASKEQZ)
  8953  		v0 := b.NewValue0(v.Pos, OpLOONG64SLL, t)
  8954  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8955  		v1.AddArg(y)
  8956  		v0.AddArg2(x, v1)
  8957  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  8958  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  8959  		v3.AuxInt = int64ToAuxInt(32)
  8960  		v2.AddArg2(v3, v1)
  8961  		v.AddArg2(v0, v2)
  8962  		return true
  8963  	}
  8964  	return false
  8965  }
  8966  func rewriteValueLOONG64_OpLsh64x16(v *Value) bool {
  8967  	v_1 := v.Args[1]
  8968  	v_0 := v.Args[0]
  8969  	b := v.Block
  8970  	typ := &b.Func.Config.Types
  8971  	// match: (Lsh64x16 x y)
  8972  	// cond: shiftIsBounded(v)
  8973  	// result: (SLLV x y)
  8974  	for {
  8975  		x := v_0
  8976  		y := v_1
  8977  		if !(shiftIsBounded(v)) {
  8978  			break
  8979  		}
  8980  		v.reset(OpLOONG64SLLV)
  8981  		v.AddArg2(x, y)
  8982  		return true
  8983  	}
  8984  	// match: (Lsh64x16 <t> x y)
  8985  	// cond: !shiftIsBounded(v)
  8986  	// result: (MASKEQZ (SLLV <t> x (ZeroExt16to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y)))
  8987  	for {
  8988  		t := v.Type
  8989  		x := v_0
  8990  		y := v_1
  8991  		if !(!shiftIsBounded(v)) {
  8992  			break
  8993  		}
  8994  		v.reset(OpLOONG64MASKEQZ)
  8995  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  8996  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8997  		v1.AddArg(y)
  8998  		v0.AddArg2(x, v1)
  8999  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  9000  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9001  		v3.AuxInt = int64ToAuxInt(64)
  9002  		v2.AddArg2(v3, v1)
  9003  		v.AddArg2(v0, v2)
  9004  		return true
  9005  	}
  9006  	return false
  9007  }
  9008  func rewriteValueLOONG64_OpLsh64x32(v *Value) bool {
  9009  	v_1 := v.Args[1]
  9010  	v_0 := v.Args[0]
  9011  	b := v.Block
  9012  	typ := &b.Func.Config.Types
  9013  	// match: (Lsh64x32 x y)
  9014  	// cond: shiftIsBounded(v)
  9015  	// result: (SLLV x y)
  9016  	for {
  9017  		x := v_0
  9018  		y := v_1
  9019  		if !(shiftIsBounded(v)) {
  9020  			break
  9021  		}
  9022  		v.reset(OpLOONG64SLLV)
  9023  		v.AddArg2(x, y)
  9024  		return true
  9025  	}
  9026  	// match: (Lsh64x32 <t> x y)
  9027  	// cond: !shiftIsBounded(v)
  9028  	// result: (MASKEQZ (SLLV <t> x (ZeroExt32to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y)))
  9029  	for {
  9030  		t := v.Type
  9031  		x := v_0
  9032  		y := v_1
  9033  		if !(!shiftIsBounded(v)) {
  9034  			break
  9035  		}
  9036  		v.reset(OpLOONG64MASKEQZ)
  9037  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  9038  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  9039  		v1.AddArg(y)
  9040  		v0.AddArg2(x, v1)
  9041  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  9042  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9043  		v3.AuxInt = int64ToAuxInt(64)
  9044  		v2.AddArg2(v3, v1)
  9045  		v.AddArg2(v0, v2)
  9046  		return true
  9047  	}
  9048  	return false
  9049  }
  9050  func rewriteValueLOONG64_OpLsh64x64(v *Value) bool {
  9051  	v_1 := v.Args[1]
  9052  	v_0 := v.Args[0]
  9053  	b := v.Block
  9054  	typ := &b.Func.Config.Types
  9055  	// match: (Lsh64x64 x y)
  9056  	// cond: shiftIsBounded(v)
  9057  	// result: (SLLV x y)
  9058  	for {
  9059  		x := v_0
  9060  		y := v_1
  9061  		if !(shiftIsBounded(v)) {
  9062  			break
  9063  		}
  9064  		v.reset(OpLOONG64SLLV)
  9065  		v.AddArg2(x, y)
  9066  		return true
  9067  	}
  9068  	// match: (Lsh64x64 <t> x y)
  9069  	// cond: !shiftIsBounded(v)
  9070  	// result: (MASKEQZ (SLLV <t> x y) (SGTU (MOVVconst <typ.UInt64> [64]) y))
  9071  	for {
  9072  		t := v.Type
  9073  		x := v_0
  9074  		y := v_1
  9075  		if !(!shiftIsBounded(v)) {
  9076  			break
  9077  		}
  9078  		v.reset(OpLOONG64MASKEQZ)
  9079  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  9080  		v0.AddArg2(x, y)
  9081  		v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  9082  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9083  		v2.AuxInt = int64ToAuxInt(64)
  9084  		v1.AddArg2(v2, y)
  9085  		v.AddArg2(v0, v1)
  9086  		return true
  9087  	}
  9088  	return false
  9089  }
  9090  func rewriteValueLOONG64_OpLsh64x8(v *Value) bool {
  9091  	v_1 := v.Args[1]
  9092  	v_0 := v.Args[0]
  9093  	b := v.Block
  9094  	typ := &b.Func.Config.Types
  9095  	// match: (Lsh64x8 x y)
  9096  	// cond: shiftIsBounded(v)
  9097  	// result: (SLLV x y)
  9098  	for {
  9099  		x := v_0
  9100  		y := v_1
  9101  		if !(shiftIsBounded(v)) {
  9102  			break
  9103  		}
  9104  		v.reset(OpLOONG64SLLV)
  9105  		v.AddArg2(x, y)
  9106  		return true
  9107  	}
  9108  	// match: (Lsh64x8 <t> x y)
  9109  	// cond: !shiftIsBounded(v)
  9110  	// result: (MASKEQZ (SLLV <t> x (ZeroExt8to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y)))
  9111  	for {
  9112  		t := v.Type
  9113  		x := v_0
  9114  		y := v_1
  9115  		if !(!shiftIsBounded(v)) {
  9116  			break
  9117  		}
  9118  		v.reset(OpLOONG64MASKEQZ)
  9119  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  9120  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  9121  		v1.AddArg(y)
  9122  		v0.AddArg2(x, v1)
  9123  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  9124  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9125  		v3.AuxInt = int64ToAuxInt(64)
  9126  		v2.AddArg2(v3, v1)
  9127  		v.AddArg2(v0, v2)
  9128  		return true
  9129  	}
  9130  	return false
  9131  }
  9132  func rewriteValueLOONG64_OpLsh8x16(v *Value) bool {
  9133  	v_1 := v.Args[1]
  9134  	v_0 := v.Args[0]
  9135  	b := v.Block
  9136  	typ := &b.Func.Config.Types
  9137  	// match: (Lsh8x16 x y)
  9138  	// cond: shiftIsBounded(v)
  9139  	// result: (SLLV x y)
  9140  	for {
  9141  		x := v_0
  9142  		y := v_1
  9143  		if !(shiftIsBounded(v)) {
  9144  			break
  9145  		}
  9146  		v.reset(OpLOONG64SLLV)
  9147  		v.AddArg2(x, y)
  9148  		return true
  9149  	}
  9150  	// match: (Lsh8x16 <t> x y)
  9151  	// cond: !shiftIsBounded(v)
  9152  	// result: (MASKEQZ (SLLV <t> x (ZeroExt16to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y)))
  9153  	for {
  9154  		t := v.Type
  9155  		x := v_0
  9156  		y := v_1
  9157  		if !(!shiftIsBounded(v)) {
  9158  			break
  9159  		}
  9160  		v.reset(OpLOONG64MASKEQZ)
  9161  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  9162  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  9163  		v1.AddArg(y)
  9164  		v0.AddArg2(x, v1)
  9165  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  9166  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9167  		v3.AuxInt = int64ToAuxInt(64)
  9168  		v2.AddArg2(v3, v1)
  9169  		v.AddArg2(v0, v2)
  9170  		return true
  9171  	}
  9172  	return false
  9173  }
  9174  func rewriteValueLOONG64_OpLsh8x32(v *Value) bool {
  9175  	v_1 := v.Args[1]
  9176  	v_0 := v.Args[0]
  9177  	b := v.Block
  9178  	typ := &b.Func.Config.Types
  9179  	// match: (Lsh8x32 x y)
  9180  	// cond: shiftIsBounded(v)
  9181  	// result: (SLLV x y)
  9182  	for {
  9183  		x := v_0
  9184  		y := v_1
  9185  		if !(shiftIsBounded(v)) {
  9186  			break
  9187  		}
  9188  		v.reset(OpLOONG64SLLV)
  9189  		v.AddArg2(x, y)
  9190  		return true
  9191  	}
  9192  	// match: (Lsh8x32 <t> x y)
  9193  	// cond: !shiftIsBounded(v)
  9194  	// result: (MASKEQZ (SLLV <t> x (ZeroExt32to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y)))
  9195  	for {
  9196  		t := v.Type
  9197  		x := v_0
  9198  		y := v_1
  9199  		if !(!shiftIsBounded(v)) {
  9200  			break
  9201  		}
  9202  		v.reset(OpLOONG64MASKEQZ)
  9203  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  9204  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  9205  		v1.AddArg(y)
  9206  		v0.AddArg2(x, v1)
  9207  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  9208  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9209  		v3.AuxInt = int64ToAuxInt(64)
  9210  		v2.AddArg2(v3, v1)
  9211  		v.AddArg2(v0, v2)
  9212  		return true
  9213  	}
  9214  	return false
  9215  }
  9216  func rewriteValueLOONG64_OpLsh8x64(v *Value) bool {
  9217  	v_1 := v.Args[1]
  9218  	v_0 := v.Args[0]
  9219  	b := v.Block
  9220  	typ := &b.Func.Config.Types
  9221  	// match: (Lsh8x64 x y)
  9222  	// cond: shiftIsBounded(v)
  9223  	// result: (SLLV x y)
  9224  	for {
  9225  		x := v_0
  9226  		y := v_1
  9227  		if !(shiftIsBounded(v)) {
  9228  			break
  9229  		}
  9230  		v.reset(OpLOONG64SLLV)
  9231  		v.AddArg2(x, y)
  9232  		return true
  9233  	}
  9234  	// match: (Lsh8x64 <t> x y)
  9235  	// cond: !shiftIsBounded(v)
  9236  	// result: (MASKEQZ (SLLV <t> x y) (SGTU (MOVVconst <typ.UInt64> [64]) y))
  9237  	for {
  9238  		t := v.Type
  9239  		x := v_0
  9240  		y := v_1
  9241  		if !(!shiftIsBounded(v)) {
  9242  			break
  9243  		}
  9244  		v.reset(OpLOONG64MASKEQZ)
  9245  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  9246  		v0.AddArg2(x, y)
  9247  		v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  9248  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9249  		v2.AuxInt = int64ToAuxInt(64)
  9250  		v1.AddArg2(v2, y)
  9251  		v.AddArg2(v0, v1)
  9252  		return true
  9253  	}
  9254  	return false
  9255  }
  9256  func rewriteValueLOONG64_OpLsh8x8(v *Value) bool {
  9257  	v_1 := v.Args[1]
  9258  	v_0 := v.Args[0]
  9259  	b := v.Block
  9260  	typ := &b.Func.Config.Types
  9261  	// match: (Lsh8x8 x y)
  9262  	// cond: shiftIsBounded(v)
  9263  	// result: (SLLV x y)
  9264  	for {
  9265  		x := v_0
  9266  		y := v_1
  9267  		if !(shiftIsBounded(v)) {
  9268  			break
  9269  		}
  9270  		v.reset(OpLOONG64SLLV)
  9271  		v.AddArg2(x, y)
  9272  		return true
  9273  	}
  9274  	// match: (Lsh8x8 <t> x y)
  9275  	// cond: !shiftIsBounded(v)
  9276  	// result: (MASKEQZ (SLLV <t> x (ZeroExt8to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y)))
  9277  	for {
  9278  		t := v.Type
  9279  		x := v_0
  9280  		y := v_1
  9281  		if !(!shiftIsBounded(v)) {
  9282  			break
  9283  		}
  9284  		v.reset(OpLOONG64MASKEQZ)
  9285  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  9286  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  9287  		v1.AddArg(y)
  9288  		v0.AddArg2(x, v1)
  9289  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  9290  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9291  		v3.AuxInt = int64ToAuxInt(64)
  9292  		v2.AddArg2(v3, v1)
  9293  		v.AddArg2(v0, v2)
  9294  		return true
  9295  	}
  9296  	return false
  9297  }
  9298  func rewriteValueLOONG64_OpMod16(v *Value) bool {
  9299  	v_1 := v.Args[1]
  9300  	v_0 := v.Args[0]
  9301  	b := v.Block
  9302  	typ := &b.Func.Config.Types
  9303  	// match: (Mod16 x y)
  9304  	// result: (REMV (SignExt16to64 x) (SignExt16to64 y))
  9305  	for {
  9306  		x := v_0
  9307  		y := v_1
  9308  		v.reset(OpLOONG64REMV)
  9309  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  9310  		v0.AddArg(x)
  9311  		v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  9312  		v1.AddArg(y)
  9313  		v.AddArg2(v0, v1)
  9314  		return true
  9315  	}
  9316  }
  9317  func rewriteValueLOONG64_OpMod16u(v *Value) bool {
  9318  	v_1 := v.Args[1]
  9319  	v_0 := v.Args[0]
  9320  	b := v.Block
  9321  	typ := &b.Func.Config.Types
  9322  	// match: (Mod16u x y)
  9323  	// result: (REMVU (ZeroExt16to64 x) (ZeroExt16to64 y))
  9324  	for {
  9325  		x := v_0
  9326  		y := v_1
  9327  		v.reset(OpLOONG64REMVU)
  9328  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  9329  		v0.AddArg(x)
  9330  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  9331  		v1.AddArg(y)
  9332  		v.AddArg2(v0, v1)
  9333  		return true
  9334  	}
  9335  }
  9336  func rewriteValueLOONG64_OpMod32(v *Value) bool {
  9337  	v_1 := v.Args[1]
  9338  	v_0 := v.Args[0]
  9339  	b := v.Block
  9340  	typ := &b.Func.Config.Types
  9341  	// match: (Mod32 x y)
  9342  	// result: (REMV (SignExt32to64 x) (SignExt32to64 y))
  9343  	for {
  9344  		x := v_0
  9345  		y := v_1
  9346  		v.reset(OpLOONG64REMV)
  9347  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  9348  		v0.AddArg(x)
  9349  		v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  9350  		v1.AddArg(y)
  9351  		v.AddArg2(v0, v1)
  9352  		return true
  9353  	}
  9354  }
  9355  func rewriteValueLOONG64_OpMod32u(v *Value) bool {
  9356  	v_1 := v.Args[1]
  9357  	v_0 := v.Args[0]
  9358  	b := v.Block
  9359  	typ := &b.Func.Config.Types
  9360  	// match: (Mod32u x y)
  9361  	// result: (REMVU (ZeroExt32to64 x) (ZeroExt32to64 y))
  9362  	for {
  9363  		x := v_0
  9364  		y := v_1
  9365  		v.reset(OpLOONG64REMVU)
  9366  		v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  9367  		v0.AddArg(x)
  9368  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  9369  		v1.AddArg(y)
  9370  		v.AddArg2(v0, v1)
  9371  		return true
  9372  	}
  9373  }
  9374  func rewriteValueLOONG64_OpMod64(v *Value) bool {
  9375  	v_1 := v.Args[1]
  9376  	v_0 := v.Args[0]
  9377  	// match: (Mod64 x y)
  9378  	// result: (REMV x y)
  9379  	for {
  9380  		x := v_0
  9381  		y := v_1
  9382  		v.reset(OpLOONG64REMV)
  9383  		v.AddArg2(x, y)
  9384  		return true
  9385  	}
  9386  }
  9387  func rewriteValueLOONG64_OpMod8(v *Value) bool {
  9388  	v_1 := v.Args[1]
  9389  	v_0 := v.Args[0]
  9390  	b := v.Block
  9391  	typ := &b.Func.Config.Types
  9392  	// match: (Mod8 x y)
  9393  	// result: (REMV (SignExt8to64 x) (SignExt8to64 y))
  9394  	for {
  9395  		x := v_0
  9396  		y := v_1
  9397  		v.reset(OpLOONG64REMV)
  9398  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  9399  		v0.AddArg(x)
  9400  		v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  9401  		v1.AddArg(y)
  9402  		v.AddArg2(v0, v1)
  9403  		return true
  9404  	}
  9405  }
  9406  func rewriteValueLOONG64_OpMod8u(v *Value) bool {
  9407  	v_1 := v.Args[1]
  9408  	v_0 := v.Args[0]
  9409  	b := v.Block
  9410  	typ := &b.Func.Config.Types
  9411  	// match: (Mod8u x y)
  9412  	// result: (REMVU (ZeroExt8to64 x) (ZeroExt8to64 y))
  9413  	for {
  9414  		x := v_0
  9415  		y := v_1
  9416  		v.reset(OpLOONG64REMVU)
  9417  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  9418  		v0.AddArg(x)
  9419  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  9420  		v1.AddArg(y)
  9421  		v.AddArg2(v0, v1)
  9422  		return true
  9423  	}
  9424  }
  9425  func rewriteValueLOONG64_OpMove(v *Value) bool {
  9426  	v_2 := v.Args[2]
  9427  	v_1 := v.Args[1]
  9428  	v_0 := v.Args[0]
  9429  	b := v.Block
  9430  	typ := &b.Func.Config.Types
  9431  	// match: (Move [0] _ _ mem)
  9432  	// result: mem
  9433  	for {
  9434  		if auxIntToInt64(v.AuxInt) != 0 {
  9435  			break
  9436  		}
  9437  		mem := v_2
  9438  		v.copyOf(mem)
  9439  		return true
  9440  	}
  9441  	// match: (Move [1] dst src mem)
  9442  	// result: (MOVBstore dst (MOVBUload src mem) mem)
  9443  	for {
  9444  		if auxIntToInt64(v.AuxInt) != 1 {
  9445  			break
  9446  		}
  9447  		dst := v_0
  9448  		src := v_1
  9449  		mem := v_2
  9450  		v.reset(OpLOONG64MOVBstore)
  9451  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVBUload, typ.UInt8)
  9452  		v0.AddArg2(src, mem)
  9453  		v.AddArg3(dst, v0, mem)
  9454  		return true
  9455  	}
  9456  	// match: (Move [2] dst src mem)
  9457  	// result: (MOVHstore dst (MOVHUload src mem) mem)
  9458  	for {
  9459  		if auxIntToInt64(v.AuxInt) != 2 {
  9460  			break
  9461  		}
  9462  		dst := v_0
  9463  		src := v_1
  9464  		mem := v_2
  9465  		v.reset(OpLOONG64MOVHstore)
  9466  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVHUload, typ.UInt16)
  9467  		v0.AddArg2(src, mem)
  9468  		v.AddArg3(dst, v0, mem)
  9469  		return true
  9470  	}
  9471  	// match: (Move [3] dst src mem)
  9472  	// result: (MOVBstore [2] dst (MOVBUload [2] src mem) (MOVHstore dst (MOVHUload src mem) mem))
  9473  	for {
  9474  		if auxIntToInt64(v.AuxInt) != 3 {
  9475  			break
  9476  		}
  9477  		dst := v_0
  9478  		src := v_1
  9479  		mem := v_2
  9480  		v.reset(OpLOONG64MOVBstore)
  9481  		v.AuxInt = int32ToAuxInt(2)
  9482  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVBUload, typ.UInt8)
  9483  		v0.AuxInt = int32ToAuxInt(2)
  9484  		v0.AddArg2(src, mem)
  9485  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVHstore, types.TypeMem)
  9486  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVHUload, typ.UInt16)
  9487  		v2.AddArg2(src, mem)
  9488  		v1.AddArg3(dst, v2, mem)
  9489  		v.AddArg3(dst, v0, v1)
  9490  		return true
  9491  	}
  9492  	// match: (Move [4] dst src mem)
  9493  	// result: (MOVWstore dst (MOVWUload src mem) mem)
  9494  	for {
  9495  		if auxIntToInt64(v.AuxInt) != 4 {
  9496  			break
  9497  		}
  9498  		dst := v_0
  9499  		src := v_1
  9500  		mem := v_2
  9501  		v.reset(OpLOONG64MOVWstore)
  9502  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVWUload, typ.UInt32)
  9503  		v0.AddArg2(src, mem)
  9504  		v.AddArg3(dst, v0, mem)
  9505  		return true
  9506  	}
  9507  	// match: (Move [5] dst src mem)
  9508  	// result: (MOVBstore [4] dst (MOVBUload [4] src mem) (MOVWstore dst (MOVWUload src mem) mem))
  9509  	for {
  9510  		if auxIntToInt64(v.AuxInt) != 5 {
  9511  			break
  9512  		}
  9513  		dst := v_0
  9514  		src := v_1
  9515  		mem := v_2
  9516  		v.reset(OpLOONG64MOVBstore)
  9517  		v.AuxInt = int32ToAuxInt(4)
  9518  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVBUload, typ.UInt8)
  9519  		v0.AuxInt = int32ToAuxInt(4)
  9520  		v0.AddArg2(src, mem)
  9521  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVWstore, types.TypeMem)
  9522  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVWUload, typ.UInt32)
  9523  		v2.AddArg2(src, mem)
  9524  		v1.AddArg3(dst, v2, mem)
  9525  		v.AddArg3(dst, v0, v1)
  9526  		return true
  9527  	}
  9528  	// match: (Move [6] dst src mem)
  9529  	// result: (MOVHstore [4] dst (MOVHUload [4] src mem) (MOVWstore dst (MOVWUload src mem) mem))
  9530  	for {
  9531  		if auxIntToInt64(v.AuxInt) != 6 {
  9532  			break
  9533  		}
  9534  		dst := v_0
  9535  		src := v_1
  9536  		mem := v_2
  9537  		v.reset(OpLOONG64MOVHstore)
  9538  		v.AuxInt = int32ToAuxInt(4)
  9539  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVHUload, typ.UInt16)
  9540  		v0.AuxInt = int32ToAuxInt(4)
  9541  		v0.AddArg2(src, mem)
  9542  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVWstore, types.TypeMem)
  9543  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVWUload, typ.UInt32)
  9544  		v2.AddArg2(src, mem)
  9545  		v1.AddArg3(dst, v2, mem)
  9546  		v.AddArg3(dst, v0, v1)
  9547  		return true
  9548  	}
  9549  	// match: (Move [7] dst src mem)
  9550  	// result: (MOVWstore [3] dst (MOVWUload [3] src mem) (MOVWstore dst (MOVWUload src mem) mem))
  9551  	for {
  9552  		if auxIntToInt64(v.AuxInt) != 7 {
  9553  			break
  9554  		}
  9555  		dst := v_0
  9556  		src := v_1
  9557  		mem := v_2
  9558  		v.reset(OpLOONG64MOVWstore)
  9559  		v.AuxInt = int32ToAuxInt(3)
  9560  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVWUload, typ.UInt32)
  9561  		v0.AuxInt = int32ToAuxInt(3)
  9562  		v0.AddArg2(src, mem)
  9563  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVWstore, types.TypeMem)
  9564  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVWUload, typ.UInt32)
  9565  		v2.AddArg2(src, mem)
  9566  		v1.AddArg3(dst, v2, mem)
  9567  		v.AddArg3(dst, v0, v1)
  9568  		return true
  9569  	}
  9570  	// match: (Move [8] dst src mem)
  9571  	// result: (MOVVstore dst (MOVVload src mem) mem)
  9572  	for {
  9573  		if auxIntToInt64(v.AuxInt) != 8 {
  9574  			break
  9575  		}
  9576  		dst := v_0
  9577  		src := v_1
  9578  		mem := v_2
  9579  		v.reset(OpLOONG64MOVVstore)
  9580  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  9581  		v0.AddArg2(src, mem)
  9582  		v.AddArg3(dst, v0, mem)
  9583  		return true
  9584  	}
  9585  	// match: (Move [9] dst src mem)
  9586  	// result: (MOVBstore [8] dst (MOVBUload [8] src mem) (MOVVstore dst (MOVVload src mem) mem))
  9587  	for {
  9588  		if auxIntToInt64(v.AuxInt) != 9 {
  9589  			break
  9590  		}
  9591  		dst := v_0
  9592  		src := v_1
  9593  		mem := v_2
  9594  		v.reset(OpLOONG64MOVBstore)
  9595  		v.AuxInt = int32ToAuxInt(8)
  9596  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVBUload, typ.UInt8)
  9597  		v0.AuxInt = int32ToAuxInt(8)
  9598  		v0.AddArg2(src, mem)
  9599  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
  9600  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  9601  		v2.AddArg2(src, mem)
  9602  		v1.AddArg3(dst, v2, mem)
  9603  		v.AddArg3(dst, v0, v1)
  9604  		return true
  9605  	}
  9606  	// match: (Move [10] dst src mem)
  9607  	// result: (MOVHstore [8] dst (MOVHUload [8] src mem) (MOVVstore dst (MOVVload src mem) mem))
  9608  	for {
  9609  		if auxIntToInt64(v.AuxInt) != 10 {
  9610  			break
  9611  		}
  9612  		dst := v_0
  9613  		src := v_1
  9614  		mem := v_2
  9615  		v.reset(OpLOONG64MOVHstore)
  9616  		v.AuxInt = int32ToAuxInt(8)
  9617  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVHUload, typ.UInt16)
  9618  		v0.AuxInt = int32ToAuxInt(8)
  9619  		v0.AddArg2(src, mem)
  9620  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
  9621  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  9622  		v2.AddArg2(src, mem)
  9623  		v1.AddArg3(dst, v2, mem)
  9624  		v.AddArg3(dst, v0, v1)
  9625  		return true
  9626  	}
  9627  	// match: (Move [11] dst src mem)
  9628  	// result: (MOVWstore [7] dst (MOVWload [7] src mem) (MOVVstore dst (MOVVload src mem) mem))
  9629  	for {
  9630  		if auxIntToInt64(v.AuxInt) != 11 {
  9631  			break
  9632  		}
  9633  		dst := v_0
  9634  		src := v_1
  9635  		mem := v_2
  9636  		v.reset(OpLOONG64MOVWstore)
  9637  		v.AuxInt = int32ToAuxInt(7)
  9638  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVWload, typ.Int32)
  9639  		v0.AuxInt = int32ToAuxInt(7)
  9640  		v0.AddArg2(src, mem)
  9641  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
  9642  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  9643  		v2.AddArg2(src, mem)
  9644  		v1.AddArg3(dst, v2, mem)
  9645  		v.AddArg3(dst, v0, v1)
  9646  		return true
  9647  	}
  9648  	// match: (Move [12] dst src mem)
  9649  	// result: (MOVWstore [8] dst (MOVWUload [8] src mem) (MOVVstore dst (MOVVload src mem) mem))
  9650  	for {
  9651  		if auxIntToInt64(v.AuxInt) != 12 {
  9652  			break
  9653  		}
  9654  		dst := v_0
  9655  		src := v_1
  9656  		mem := v_2
  9657  		v.reset(OpLOONG64MOVWstore)
  9658  		v.AuxInt = int32ToAuxInt(8)
  9659  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVWUload, typ.UInt32)
  9660  		v0.AuxInt = int32ToAuxInt(8)
  9661  		v0.AddArg2(src, mem)
  9662  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
  9663  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  9664  		v2.AddArg2(src, mem)
  9665  		v1.AddArg3(dst, v2, mem)
  9666  		v.AddArg3(dst, v0, v1)
  9667  		return true
  9668  	}
  9669  	// match: (Move [13] dst src mem)
  9670  	// result: (MOVVstore [5] dst (MOVVload [5] src mem) (MOVVstore dst (MOVVload src mem) mem))
  9671  	for {
  9672  		if auxIntToInt64(v.AuxInt) != 13 {
  9673  			break
  9674  		}
  9675  		dst := v_0
  9676  		src := v_1
  9677  		mem := v_2
  9678  		v.reset(OpLOONG64MOVVstore)
  9679  		v.AuxInt = int32ToAuxInt(5)
  9680  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  9681  		v0.AuxInt = int32ToAuxInt(5)
  9682  		v0.AddArg2(src, mem)
  9683  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
  9684  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  9685  		v2.AddArg2(src, mem)
  9686  		v1.AddArg3(dst, v2, mem)
  9687  		v.AddArg3(dst, v0, v1)
  9688  		return true
  9689  	}
  9690  	// match: (Move [14] dst src mem)
  9691  	// result: (MOVVstore [6] dst (MOVVload [6] src mem) (MOVVstore dst (MOVVload src mem) mem))
  9692  	for {
  9693  		if auxIntToInt64(v.AuxInt) != 14 {
  9694  			break
  9695  		}
  9696  		dst := v_0
  9697  		src := v_1
  9698  		mem := v_2
  9699  		v.reset(OpLOONG64MOVVstore)
  9700  		v.AuxInt = int32ToAuxInt(6)
  9701  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  9702  		v0.AuxInt = int32ToAuxInt(6)
  9703  		v0.AddArg2(src, mem)
  9704  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
  9705  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  9706  		v2.AddArg2(src, mem)
  9707  		v1.AddArg3(dst, v2, mem)
  9708  		v.AddArg3(dst, v0, v1)
  9709  		return true
  9710  	}
  9711  	// match: (Move [15] dst src mem)
  9712  	// result: (MOVVstore [7] dst (MOVVload [7] src mem) (MOVVstore dst (MOVVload src mem) mem))
  9713  	for {
  9714  		if auxIntToInt64(v.AuxInt) != 15 {
  9715  			break
  9716  		}
  9717  		dst := v_0
  9718  		src := v_1
  9719  		mem := v_2
  9720  		v.reset(OpLOONG64MOVVstore)
  9721  		v.AuxInt = int32ToAuxInt(7)
  9722  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  9723  		v0.AuxInt = int32ToAuxInt(7)
  9724  		v0.AddArg2(src, mem)
  9725  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
  9726  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  9727  		v2.AddArg2(src, mem)
  9728  		v1.AddArg3(dst, v2, mem)
  9729  		v.AddArg3(dst, v0, v1)
  9730  		return true
  9731  	}
  9732  	// match: (Move [16] dst src mem)
  9733  	// result: (MOVVstore [8] dst (MOVVload [8] src mem) (MOVVstore dst (MOVVload src mem) mem))
  9734  	for {
  9735  		if auxIntToInt64(v.AuxInt) != 16 {
  9736  			break
  9737  		}
  9738  		dst := v_0
  9739  		src := v_1
  9740  		mem := v_2
  9741  		v.reset(OpLOONG64MOVVstore)
  9742  		v.AuxInt = int32ToAuxInt(8)
  9743  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  9744  		v0.AuxInt = int32ToAuxInt(8)
  9745  		v0.AddArg2(src, mem)
  9746  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
  9747  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  9748  		v2.AddArg2(src, mem)
  9749  		v1.AddArg3(dst, v2, mem)
  9750  		v.AddArg3(dst, v0, v1)
  9751  		return true
  9752  	}
  9753  	// match: (Move [s] dst src mem)
  9754  	// cond: s > 16 && s < 192 && logLargeCopy(v, s)
  9755  	// result: (LoweredMove [s] dst src mem)
  9756  	for {
  9757  		s := auxIntToInt64(v.AuxInt)
  9758  		dst := v_0
  9759  		src := v_1
  9760  		mem := v_2
  9761  		if !(s > 16 && s < 192 && logLargeCopy(v, s)) {
  9762  			break
  9763  		}
  9764  		v.reset(OpLOONG64LoweredMove)
  9765  		v.AuxInt = int64ToAuxInt(s)
  9766  		v.AddArg3(dst, src, mem)
  9767  		return true
  9768  	}
  9769  	// match: (Move [s] dst src mem)
  9770  	// cond: s >= 192 && logLargeCopy(v, s)
  9771  	// result: (LoweredMoveLoop [s] dst src mem)
  9772  	for {
  9773  		s := auxIntToInt64(v.AuxInt)
  9774  		dst := v_0
  9775  		src := v_1
  9776  		mem := v_2
  9777  		if !(s >= 192 && logLargeCopy(v, s)) {
  9778  			break
  9779  		}
  9780  		v.reset(OpLOONG64LoweredMoveLoop)
  9781  		v.AuxInt = int64ToAuxInt(s)
  9782  		v.AddArg3(dst, src, mem)
  9783  		return true
  9784  	}
  9785  	return false
  9786  }
  9787  func rewriteValueLOONG64_OpNeq16(v *Value) bool {
  9788  	v_1 := v.Args[1]
  9789  	v_0 := v.Args[0]
  9790  	b := v.Block
  9791  	typ := &b.Func.Config.Types
  9792  	// match: (Neq16 x y)
  9793  	// result: (SGTU (XOR (ZeroExt16to32 x) (ZeroExt16to64 y)) (MOVVconst [0]))
  9794  	for {
  9795  		x := v_0
  9796  		y := v_1
  9797  		v.reset(OpLOONG64SGTU)
  9798  		v0 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.UInt64)
  9799  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  9800  		v1.AddArg(x)
  9801  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  9802  		v2.AddArg(y)
  9803  		v0.AddArg2(v1, v2)
  9804  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9805  		v3.AuxInt = int64ToAuxInt(0)
  9806  		v.AddArg2(v0, v3)
  9807  		return true
  9808  	}
  9809  }
  9810  func rewriteValueLOONG64_OpNeq32(v *Value) bool {
  9811  	v_1 := v.Args[1]
  9812  	v_0 := v.Args[0]
  9813  	b := v.Block
  9814  	typ := &b.Func.Config.Types
  9815  	// match: (Neq32 x y)
  9816  	// result: (SGTU (XOR (ZeroExt32to64 x) (ZeroExt32to64 y)) (MOVVconst [0]))
  9817  	for {
  9818  		x := v_0
  9819  		y := v_1
  9820  		v.reset(OpLOONG64SGTU)
  9821  		v0 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.UInt64)
  9822  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  9823  		v1.AddArg(x)
  9824  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  9825  		v2.AddArg(y)
  9826  		v0.AddArg2(v1, v2)
  9827  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9828  		v3.AuxInt = int64ToAuxInt(0)
  9829  		v.AddArg2(v0, v3)
  9830  		return true
  9831  	}
  9832  }
  9833  func rewriteValueLOONG64_OpNeq32F(v *Value) bool {
  9834  	v_1 := v.Args[1]
  9835  	v_0 := v.Args[0]
  9836  	b := v.Block
  9837  	// match: (Neq32F x y)
  9838  	// result: (FPFlagFalse (CMPEQF x y))
  9839  	for {
  9840  		x := v_0
  9841  		y := v_1
  9842  		v.reset(OpLOONG64FPFlagFalse)
  9843  		v0 := b.NewValue0(v.Pos, OpLOONG64CMPEQF, types.TypeFlags)
  9844  		v0.AddArg2(x, y)
  9845  		v.AddArg(v0)
  9846  		return true
  9847  	}
  9848  }
  9849  func rewriteValueLOONG64_OpNeq64(v *Value) bool {
  9850  	v_1 := v.Args[1]
  9851  	v_0 := v.Args[0]
  9852  	b := v.Block
  9853  	typ := &b.Func.Config.Types
  9854  	// match: (Neq64 x y)
  9855  	// result: (SGTU (XOR x y) (MOVVconst [0]))
  9856  	for {
  9857  		x := v_0
  9858  		y := v_1
  9859  		v.reset(OpLOONG64SGTU)
  9860  		v0 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.UInt64)
  9861  		v0.AddArg2(x, y)
  9862  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9863  		v1.AuxInt = int64ToAuxInt(0)
  9864  		v.AddArg2(v0, v1)
  9865  		return true
  9866  	}
  9867  }
  9868  func rewriteValueLOONG64_OpNeq64F(v *Value) bool {
  9869  	v_1 := v.Args[1]
  9870  	v_0 := v.Args[0]
  9871  	b := v.Block
  9872  	// match: (Neq64F x y)
  9873  	// result: (FPFlagFalse (CMPEQD x y))
  9874  	for {
  9875  		x := v_0
  9876  		y := v_1
  9877  		v.reset(OpLOONG64FPFlagFalse)
  9878  		v0 := b.NewValue0(v.Pos, OpLOONG64CMPEQD, types.TypeFlags)
  9879  		v0.AddArg2(x, y)
  9880  		v.AddArg(v0)
  9881  		return true
  9882  	}
  9883  }
  9884  func rewriteValueLOONG64_OpNeq8(v *Value) bool {
  9885  	v_1 := v.Args[1]
  9886  	v_0 := v.Args[0]
  9887  	b := v.Block
  9888  	typ := &b.Func.Config.Types
  9889  	// match: (Neq8 x y)
  9890  	// result: (SGTU (XOR (ZeroExt8to64 x) (ZeroExt8to64 y)) (MOVVconst [0]))
  9891  	for {
  9892  		x := v_0
  9893  		y := v_1
  9894  		v.reset(OpLOONG64SGTU)
  9895  		v0 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.UInt64)
  9896  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  9897  		v1.AddArg(x)
  9898  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  9899  		v2.AddArg(y)
  9900  		v0.AddArg2(v1, v2)
  9901  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9902  		v3.AuxInt = int64ToAuxInt(0)
  9903  		v.AddArg2(v0, v3)
  9904  		return true
  9905  	}
  9906  }
  9907  func rewriteValueLOONG64_OpNeqPtr(v *Value) bool {
  9908  	v_1 := v.Args[1]
  9909  	v_0 := v.Args[0]
  9910  	b := v.Block
  9911  	typ := &b.Func.Config.Types
  9912  	// match: (NeqPtr x y)
  9913  	// result: (SGTU (XOR x y) (MOVVconst [0]))
  9914  	for {
  9915  		x := v_0
  9916  		y := v_1
  9917  		v.reset(OpLOONG64SGTU)
  9918  		v0 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.UInt64)
  9919  		v0.AddArg2(x, y)
  9920  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9921  		v1.AuxInt = int64ToAuxInt(0)
  9922  		v.AddArg2(v0, v1)
  9923  		return true
  9924  	}
  9925  }
  9926  func rewriteValueLOONG64_OpNot(v *Value) bool {
  9927  	v_0 := v.Args[0]
  9928  	// match: (Not x)
  9929  	// result: (XORconst [1] x)
  9930  	for {
  9931  		x := v_0
  9932  		v.reset(OpLOONG64XORconst)
  9933  		v.AuxInt = int64ToAuxInt(1)
  9934  		v.AddArg(x)
  9935  		return true
  9936  	}
  9937  }
  9938  func rewriteValueLOONG64_OpOffPtr(v *Value) bool {
  9939  	v_0 := v.Args[0]
  9940  	// match: (OffPtr [off] ptr:(SP))
  9941  	// result: (MOVVaddr [int32(off)] ptr)
  9942  	for {
  9943  		off := auxIntToInt64(v.AuxInt)
  9944  		ptr := v_0
  9945  		if ptr.Op != OpSP {
  9946  			break
  9947  		}
  9948  		v.reset(OpLOONG64MOVVaddr)
  9949  		v.AuxInt = int32ToAuxInt(int32(off))
  9950  		v.AddArg(ptr)
  9951  		return true
  9952  	}
  9953  	// match: (OffPtr [off] ptr)
  9954  	// result: (ADDVconst [off] ptr)
  9955  	for {
  9956  		off := auxIntToInt64(v.AuxInt)
  9957  		ptr := v_0
  9958  		v.reset(OpLOONG64ADDVconst)
  9959  		v.AuxInt = int64ToAuxInt(off)
  9960  		v.AddArg(ptr)
  9961  		return true
  9962  	}
  9963  }
  9964  func rewriteValueLOONG64_OpPopCount16(v *Value) bool {
  9965  	v_0 := v.Args[0]
  9966  	b := v.Block
  9967  	typ := &b.Func.Config.Types
  9968  	// match: (PopCount16 <t> x)
  9969  	// result: (MOVWfpgp <t> (VPCNT16 <typ.Float32> (MOVWgpfp <typ.Float32> (ZeroExt16to32 x))))
  9970  	for {
  9971  		t := v.Type
  9972  		x := v_0
  9973  		v.reset(OpLOONG64MOVWfpgp)
  9974  		v.Type = t
  9975  		v0 := b.NewValue0(v.Pos, OpLOONG64VPCNT16, typ.Float32)
  9976  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVWgpfp, typ.Float32)
  9977  		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  9978  		v2.AddArg(x)
  9979  		v1.AddArg(v2)
  9980  		v0.AddArg(v1)
  9981  		v.AddArg(v0)
  9982  		return true
  9983  	}
  9984  }
  9985  func rewriteValueLOONG64_OpPopCount32(v *Value) bool {
  9986  	v_0 := v.Args[0]
  9987  	b := v.Block
  9988  	typ := &b.Func.Config.Types
  9989  	// match: (PopCount32 <t> x)
  9990  	// result: (MOVWfpgp <t> (VPCNT32 <typ.Float32> (MOVWgpfp <typ.Float32> x)))
  9991  	for {
  9992  		t := v.Type
  9993  		x := v_0
  9994  		v.reset(OpLOONG64MOVWfpgp)
  9995  		v.Type = t
  9996  		v0 := b.NewValue0(v.Pos, OpLOONG64VPCNT32, typ.Float32)
  9997  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVWgpfp, typ.Float32)
  9998  		v1.AddArg(x)
  9999  		v0.AddArg(v1)
 10000  		v.AddArg(v0)
 10001  		return true
 10002  	}
 10003  }
 10004  func rewriteValueLOONG64_OpPopCount64(v *Value) bool {
 10005  	v_0 := v.Args[0]
 10006  	b := v.Block
 10007  	typ := &b.Func.Config.Types
 10008  	// match: (PopCount64 <t> x)
 10009  	// result: (MOVVfpgp <t> (VPCNT64 <typ.Float64> (MOVVgpfp <typ.Float64> x)))
 10010  	for {
 10011  		t := v.Type
 10012  		x := v_0
 10013  		v.reset(OpLOONG64MOVVfpgp)
 10014  		v.Type = t
 10015  		v0 := b.NewValue0(v.Pos, OpLOONG64VPCNT64, typ.Float64)
 10016  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVgpfp, typ.Float64)
 10017  		v1.AddArg(x)
 10018  		v0.AddArg(v1)
 10019  		v.AddArg(v0)
 10020  		return true
 10021  	}
 10022  }
 10023  func rewriteValueLOONG64_OpPrefetchCache(v *Value) bool {
 10024  	v_1 := v.Args[1]
 10025  	v_0 := v.Args[0]
 10026  	// match: (PrefetchCache addr mem)
 10027  	// result: (PRELD addr mem [0])
 10028  	for {
 10029  		addr := v_0
 10030  		mem := v_1
 10031  		v.reset(OpLOONG64PRELD)
 10032  		v.AuxInt = int64ToAuxInt(0)
 10033  		v.AddArg2(addr, mem)
 10034  		return true
 10035  	}
 10036  }
 10037  func rewriteValueLOONG64_OpPrefetchCacheStreamed(v *Value) bool {
 10038  	v_1 := v.Args[1]
 10039  	v_0 := v.Args[0]
 10040  	// match: (PrefetchCacheStreamed addr mem)
 10041  	// result: (PRELDX addr mem [(((512 << 1) + (1 << 12)) << 5) + 2])
 10042  	for {
 10043  		addr := v_0
 10044  		mem := v_1
 10045  		v.reset(OpLOONG64PRELDX)
 10046  		v.AuxInt = int64ToAuxInt((((512 << 1) + (1 << 12)) << 5) + 2)
 10047  		v.AddArg2(addr, mem)
 10048  		return true
 10049  	}
 10050  }
 10051  func rewriteValueLOONG64_OpRotateLeft16(v *Value) bool {
 10052  	v_1 := v.Args[1]
 10053  	v_0 := v.Args[0]
 10054  	b := v.Block
 10055  	typ := &b.Func.Config.Types
 10056  	// match: (RotateLeft16 <t> x (MOVVconst [c]))
 10057  	// result: (Or16 (Lsh16x64 <t> x (MOVVconst [c&15])) (Rsh16Ux64 <t> x (MOVVconst [-c&15])))
 10058  	for {
 10059  		t := v.Type
 10060  		x := v_0
 10061  		if v_1.Op != OpLOONG64MOVVconst {
 10062  			break
 10063  		}
 10064  		c := auxIntToInt64(v_1.AuxInt)
 10065  		v.reset(OpOr16)
 10066  		v0 := b.NewValue0(v.Pos, OpLsh16x64, t)
 10067  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10068  		v1.AuxInt = int64ToAuxInt(c & 15)
 10069  		v0.AddArg2(x, v1)
 10070  		v2 := b.NewValue0(v.Pos, OpRsh16Ux64, t)
 10071  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10072  		v3.AuxInt = int64ToAuxInt(-c & 15)
 10073  		v2.AddArg2(x, v3)
 10074  		v.AddArg2(v0, v2)
 10075  		return true
 10076  	}
 10077  	// match: (RotateLeft16 <t> x y)
 10078  	// result: (ROTR <t> (OR <typ.UInt32> (ZeroExt16to32 x) (SLLVconst <t> (ZeroExt16to32 x) [16])) (NEGV <typ.Int64> y))
 10079  	for {
 10080  		t := v.Type
 10081  		x := v_0
 10082  		y := v_1
 10083  		v.reset(OpLOONG64ROTR)
 10084  		v.Type = t
 10085  		v0 := b.NewValue0(v.Pos, OpLOONG64OR, typ.UInt32)
 10086  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
 10087  		v1.AddArg(x)
 10088  		v2 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, t)
 10089  		v2.AuxInt = int64ToAuxInt(16)
 10090  		v2.AddArg(v1)
 10091  		v0.AddArg2(v1, v2)
 10092  		v3 := b.NewValue0(v.Pos, OpLOONG64NEGV, typ.Int64)
 10093  		v3.AddArg(y)
 10094  		v.AddArg2(v0, v3)
 10095  		return true
 10096  	}
 10097  }
 10098  func rewriteValueLOONG64_OpRotateLeft32(v *Value) bool {
 10099  	v_1 := v.Args[1]
 10100  	v_0 := v.Args[0]
 10101  	b := v.Block
 10102  	// match: (RotateLeft32 x y)
 10103  	// result: (ROTR x (NEGV <y.Type> y))
 10104  	for {
 10105  		x := v_0
 10106  		y := v_1
 10107  		v.reset(OpLOONG64ROTR)
 10108  		v0 := b.NewValue0(v.Pos, OpLOONG64NEGV, y.Type)
 10109  		v0.AddArg(y)
 10110  		v.AddArg2(x, v0)
 10111  		return true
 10112  	}
 10113  }
 10114  func rewriteValueLOONG64_OpRotateLeft64(v *Value) bool {
 10115  	v_1 := v.Args[1]
 10116  	v_0 := v.Args[0]
 10117  	b := v.Block
 10118  	// match: (RotateLeft64 x y)
 10119  	// result: (ROTRV x (NEGV <y.Type> y))
 10120  	for {
 10121  		x := v_0
 10122  		y := v_1
 10123  		v.reset(OpLOONG64ROTRV)
 10124  		v0 := b.NewValue0(v.Pos, OpLOONG64NEGV, y.Type)
 10125  		v0.AddArg(y)
 10126  		v.AddArg2(x, v0)
 10127  		return true
 10128  	}
 10129  }
 10130  func rewriteValueLOONG64_OpRotateLeft8(v *Value) bool {
 10131  	v_1 := v.Args[1]
 10132  	v_0 := v.Args[0]
 10133  	b := v.Block
 10134  	typ := &b.Func.Config.Types
 10135  	// match: (RotateLeft8 <t> x (MOVVconst [c]))
 10136  	// result: (Or8 (Lsh8x64 <t> x (MOVVconst [c&7])) (Rsh8Ux64 <t> x (MOVVconst [-c&7])))
 10137  	for {
 10138  		t := v.Type
 10139  		x := v_0
 10140  		if v_1.Op != OpLOONG64MOVVconst {
 10141  			break
 10142  		}
 10143  		c := auxIntToInt64(v_1.AuxInt)
 10144  		v.reset(OpOr8)
 10145  		v0 := b.NewValue0(v.Pos, OpLsh8x64, t)
 10146  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10147  		v1.AuxInt = int64ToAuxInt(c & 7)
 10148  		v0.AddArg2(x, v1)
 10149  		v2 := b.NewValue0(v.Pos, OpRsh8Ux64, t)
 10150  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10151  		v3.AuxInt = int64ToAuxInt(-c & 7)
 10152  		v2.AddArg2(x, v3)
 10153  		v.AddArg2(v0, v2)
 10154  		return true
 10155  	}
 10156  	// match: (RotateLeft8 <t> x y)
 10157  	// result: (OR <t> (SLLV <t> x (ANDconst <typ.Int64> [7] y)) (SRLV <t> (ZeroExt8to64 x) (ANDconst <typ.Int64> [7] (NEGV <typ.Int64> y))))
 10158  	for {
 10159  		t := v.Type
 10160  		x := v_0
 10161  		y := v_1
 10162  		v.reset(OpLOONG64OR)
 10163  		v.Type = t
 10164  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
 10165  		v1 := b.NewValue0(v.Pos, OpLOONG64ANDconst, typ.Int64)
 10166  		v1.AuxInt = int64ToAuxInt(7)
 10167  		v1.AddArg(y)
 10168  		v0.AddArg2(x, v1)
 10169  		v2 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
 10170  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 10171  		v3.AddArg(x)
 10172  		v4 := b.NewValue0(v.Pos, OpLOONG64ANDconst, typ.Int64)
 10173  		v4.AuxInt = int64ToAuxInt(7)
 10174  		v5 := b.NewValue0(v.Pos, OpLOONG64NEGV, typ.Int64)
 10175  		v5.AddArg(y)
 10176  		v4.AddArg(v5)
 10177  		v2.AddArg2(v3, v4)
 10178  		v.AddArg2(v0, v2)
 10179  		return true
 10180  	}
 10181  }
 10182  func rewriteValueLOONG64_OpRsh16Ux16(v *Value) bool {
 10183  	v_1 := v.Args[1]
 10184  	v_0 := v.Args[0]
 10185  	b := v.Block
 10186  	typ := &b.Func.Config.Types
 10187  	// match: (Rsh16Ux16 x y)
 10188  	// cond: shiftIsBounded(v)
 10189  	// result: (SRLV (ZeroExt16to64 x) y)
 10190  	for {
 10191  		x := v_0
 10192  		y := v_1
 10193  		if !(shiftIsBounded(v)) {
 10194  			break
 10195  		}
 10196  		v.reset(OpLOONG64SRLV)
 10197  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 10198  		v0.AddArg(x)
 10199  		v.AddArg2(v0, y)
 10200  		return true
 10201  	}
 10202  	// match: (Rsh16Ux16 <t> x y)
 10203  	// cond: !shiftIsBounded(v)
 10204  	// result: (MASKEQZ (SRLV <t> (ZeroExt16to64 x) (ZeroExt16to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y)))
 10205  	for {
 10206  		t := v.Type
 10207  		x := v_0
 10208  		y := v_1
 10209  		if !(!shiftIsBounded(v)) {
 10210  			break
 10211  		}
 10212  		v.reset(OpLOONG64MASKEQZ)
 10213  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
 10214  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 10215  		v1.AddArg(x)
 10216  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 10217  		v2.AddArg(y)
 10218  		v0.AddArg2(v1, v2)
 10219  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10220  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10221  		v4.AuxInt = int64ToAuxInt(64)
 10222  		v3.AddArg2(v4, v2)
 10223  		v.AddArg2(v0, v3)
 10224  		return true
 10225  	}
 10226  	return false
 10227  }
 10228  func rewriteValueLOONG64_OpRsh16Ux32(v *Value) bool {
 10229  	v_1 := v.Args[1]
 10230  	v_0 := v.Args[0]
 10231  	b := v.Block
 10232  	typ := &b.Func.Config.Types
 10233  	// match: (Rsh16Ux32 x y)
 10234  	// cond: shiftIsBounded(v)
 10235  	// result: (SRLV (ZeroExt16to64 x) y)
 10236  	for {
 10237  		x := v_0
 10238  		y := v_1
 10239  		if !(shiftIsBounded(v)) {
 10240  			break
 10241  		}
 10242  		v.reset(OpLOONG64SRLV)
 10243  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 10244  		v0.AddArg(x)
 10245  		v.AddArg2(v0, y)
 10246  		return true
 10247  	}
 10248  	// match: (Rsh16Ux32 <t> x y)
 10249  	// cond: !shiftIsBounded(v)
 10250  	// result: (MASKEQZ (SRLV <t> (ZeroExt16to64 x) (ZeroExt32to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y)))
 10251  	for {
 10252  		t := v.Type
 10253  		x := v_0
 10254  		y := v_1
 10255  		if !(!shiftIsBounded(v)) {
 10256  			break
 10257  		}
 10258  		v.reset(OpLOONG64MASKEQZ)
 10259  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
 10260  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 10261  		v1.AddArg(x)
 10262  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 10263  		v2.AddArg(y)
 10264  		v0.AddArg2(v1, v2)
 10265  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10266  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10267  		v4.AuxInt = int64ToAuxInt(64)
 10268  		v3.AddArg2(v4, v2)
 10269  		v.AddArg2(v0, v3)
 10270  		return true
 10271  	}
 10272  	return false
 10273  }
 10274  func rewriteValueLOONG64_OpRsh16Ux64(v *Value) bool {
 10275  	v_1 := v.Args[1]
 10276  	v_0 := v.Args[0]
 10277  	b := v.Block
 10278  	typ := &b.Func.Config.Types
 10279  	// match: (Rsh16Ux64 x y)
 10280  	// cond: shiftIsBounded(v)
 10281  	// result: (SRLV (ZeroExt16to64 x) y)
 10282  	for {
 10283  		x := v_0
 10284  		y := v_1
 10285  		if !(shiftIsBounded(v)) {
 10286  			break
 10287  		}
 10288  		v.reset(OpLOONG64SRLV)
 10289  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 10290  		v0.AddArg(x)
 10291  		v.AddArg2(v0, y)
 10292  		return true
 10293  	}
 10294  	// match: (Rsh16Ux64 <t> x y)
 10295  	// cond: !shiftIsBounded(v)
 10296  	// result: (MASKEQZ (SRLV <t> (ZeroExt16to64 x) y) (SGTU (MOVVconst <typ.UInt64> [64]) y))
 10297  	for {
 10298  		t := v.Type
 10299  		x := v_0
 10300  		y := v_1
 10301  		if !(!shiftIsBounded(v)) {
 10302  			break
 10303  		}
 10304  		v.reset(OpLOONG64MASKEQZ)
 10305  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
 10306  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 10307  		v1.AddArg(x)
 10308  		v0.AddArg2(v1, y)
 10309  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10310  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10311  		v3.AuxInt = int64ToAuxInt(64)
 10312  		v2.AddArg2(v3, y)
 10313  		v.AddArg2(v0, v2)
 10314  		return true
 10315  	}
 10316  	return false
 10317  }
 10318  func rewriteValueLOONG64_OpRsh16Ux8(v *Value) bool {
 10319  	v_1 := v.Args[1]
 10320  	v_0 := v.Args[0]
 10321  	b := v.Block
 10322  	typ := &b.Func.Config.Types
 10323  	// match: (Rsh16Ux8 x y)
 10324  	// cond: shiftIsBounded(v)
 10325  	// result: (SRLV (ZeroExt16to64 x) y)
 10326  	for {
 10327  		x := v_0
 10328  		y := v_1
 10329  		if !(shiftIsBounded(v)) {
 10330  			break
 10331  		}
 10332  		v.reset(OpLOONG64SRLV)
 10333  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 10334  		v0.AddArg(x)
 10335  		v.AddArg2(v0, y)
 10336  		return true
 10337  	}
 10338  	// match: (Rsh16Ux8 <t> x y)
 10339  	// cond: !shiftIsBounded(v)
 10340  	// result: (MASKEQZ (SRLV <t> (ZeroExt16to64 x) (ZeroExt8to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y)))
 10341  	for {
 10342  		t := v.Type
 10343  		x := v_0
 10344  		y := v_1
 10345  		if !(!shiftIsBounded(v)) {
 10346  			break
 10347  		}
 10348  		v.reset(OpLOONG64MASKEQZ)
 10349  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
 10350  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 10351  		v1.AddArg(x)
 10352  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 10353  		v2.AddArg(y)
 10354  		v0.AddArg2(v1, v2)
 10355  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10356  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10357  		v4.AuxInt = int64ToAuxInt(64)
 10358  		v3.AddArg2(v4, v2)
 10359  		v.AddArg2(v0, v3)
 10360  		return true
 10361  	}
 10362  	return false
 10363  }
 10364  func rewriteValueLOONG64_OpRsh16x16(v *Value) bool {
 10365  	v_1 := v.Args[1]
 10366  	v_0 := v.Args[0]
 10367  	b := v.Block
 10368  	typ := &b.Func.Config.Types
 10369  	// match: (Rsh16x16 x y)
 10370  	// cond: shiftIsBounded(v)
 10371  	// result: (SRAV (SignExt16to64 x) y)
 10372  	for {
 10373  		x := v_0
 10374  		y := v_1
 10375  		if !(shiftIsBounded(v)) {
 10376  			break
 10377  		}
 10378  		v.reset(OpLOONG64SRAV)
 10379  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
 10380  		v0.AddArg(x)
 10381  		v.AddArg2(v0, y)
 10382  		return true
 10383  	}
 10384  	// match: (Rsh16x16 <t> x y)
 10385  	// cond: !shiftIsBounded(v)
 10386  	// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y)))
 10387  	for {
 10388  		t := v.Type
 10389  		x := v_0
 10390  		y := v_1
 10391  		if !(!shiftIsBounded(v)) {
 10392  			break
 10393  		}
 10394  		v.reset(OpLOONG64SRAV)
 10395  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
 10396  		v0.AddArg(x)
 10397  		v1 := b.NewValue0(v.Pos, OpLOONG64OR, t)
 10398  		v2 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
 10399  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10400  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 10401  		v4.AddArg(y)
 10402  		v5 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10403  		v5.AuxInt = int64ToAuxInt(63)
 10404  		v3.AddArg2(v4, v5)
 10405  		v2.AddArg(v3)
 10406  		v1.AddArg2(v2, v4)
 10407  		v.AddArg2(v0, v1)
 10408  		return true
 10409  	}
 10410  	return false
 10411  }
 10412  func rewriteValueLOONG64_OpRsh16x32(v *Value) bool {
 10413  	v_1 := v.Args[1]
 10414  	v_0 := v.Args[0]
 10415  	b := v.Block
 10416  	typ := &b.Func.Config.Types
 10417  	// match: (Rsh16x32 x y)
 10418  	// cond: shiftIsBounded(v)
 10419  	// result: (SRAV (SignExt16to64 x) y)
 10420  	for {
 10421  		x := v_0
 10422  		y := v_1
 10423  		if !(shiftIsBounded(v)) {
 10424  			break
 10425  		}
 10426  		v.reset(OpLOONG64SRAV)
 10427  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
 10428  		v0.AddArg(x)
 10429  		v.AddArg2(v0, y)
 10430  		return true
 10431  	}
 10432  	// match: (Rsh16x32 <t> x y)
 10433  	// cond: !shiftIsBounded(v)
 10434  	// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y)))
 10435  	for {
 10436  		t := v.Type
 10437  		x := v_0
 10438  		y := v_1
 10439  		if !(!shiftIsBounded(v)) {
 10440  			break
 10441  		}
 10442  		v.reset(OpLOONG64SRAV)
 10443  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
 10444  		v0.AddArg(x)
 10445  		v1 := b.NewValue0(v.Pos, OpLOONG64OR, t)
 10446  		v2 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
 10447  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10448  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 10449  		v4.AddArg(y)
 10450  		v5 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10451  		v5.AuxInt = int64ToAuxInt(63)
 10452  		v3.AddArg2(v4, v5)
 10453  		v2.AddArg(v3)
 10454  		v1.AddArg2(v2, v4)
 10455  		v.AddArg2(v0, v1)
 10456  		return true
 10457  	}
 10458  	return false
 10459  }
 10460  func rewriteValueLOONG64_OpRsh16x64(v *Value) bool {
 10461  	v_1 := v.Args[1]
 10462  	v_0 := v.Args[0]
 10463  	b := v.Block
 10464  	typ := &b.Func.Config.Types
 10465  	// match: (Rsh16x64 x y)
 10466  	// cond: shiftIsBounded(v)
 10467  	// result: (SRAV (SignExt16to64 x) y)
 10468  	for {
 10469  		x := v_0
 10470  		y := v_1
 10471  		if !(shiftIsBounded(v)) {
 10472  			break
 10473  		}
 10474  		v.reset(OpLOONG64SRAV)
 10475  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
 10476  		v0.AddArg(x)
 10477  		v.AddArg2(v0, y)
 10478  		return true
 10479  	}
 10480  	// match: (Rsh16x64 <t> x y)
 10481  	// cond: !shiftIsBounded(v)
 10482  	// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y))
 10483  	for {
 10484  		t := v.Type
 10485  		x := v_0
 10486  		y := v_1
 10487  		if !(!shiftIsBounded(v)) {
 10488  			break
 10489  		}
 10490  		v.reset(OpLOONG64SRAV)
 10491  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
 10492  		v0.AddArg(x)
 10493  		v1 := b.NewValue0(v.Pos, OpLOONG64OR, t)
 10494  		v2 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
 10495  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10496  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10497  		v4.AuxInt = int64ToAuxInt(63)
 10498  		v3.AddArg2(y, v4)
 10499  		v2.AddArg(v3)
 10500  		v1.AddArg2(v2, y)
 10501  		v.AddArg2(v0, v1)
 10502  		return true
 10503  	}
 10504  	return false
 10505  }
 10506  func rewriteValueLOONG64_OpRsh16x8(v *Value) bool {
 10507  	v_1 := v.Args[1]
 10508  	v_0 := v.Args[0]
 10509  	b := v.Block
 10510  	typ := &b.Func.Config.Types
 10511  	// match: (Rsh16x8 x y)
 10512  	// cond: shiftIsBounded(v)
 10513  	// result: (SRAV (SignExt16to64 x) y)
 10514  	for {
 10515  		x := v_0
 10516  		y := v_1
 10517  		if !(shiftIsBounded(v)) {
 10518  			break
 10519  		}
 10520  		v.reset(OpLOONG64SRAV)
 10521  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
 10522  		v0.AddArg(x)
 10523  		v.AddArg2(v0, y)
 10524  		return true
 10525  	}
 10526  	// match: (Rsh16x8 <t> x y)
 10527  	// cond: !shiftIsBounded(v)
 10528  	// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
 10529  	for {
 10530  		t := v.Type
 10531  		x := v_0
 10532  		y := v_1
 10533  		if !(!shiftIsBounded(v)) {
 10534  			break
 10535  		}
 10536  		v.reset(OpLOONG64SRAV)
 10537  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
 10538  		v0.AddArg(x)
 10539  		v1 := b.NewValue0(v.Pos, OpLOONG64OR, t)
 10540  		v2 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
 10541  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10542  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 10543  		v4.AddArg(y)
 10544  		v5 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10545  		v5.AuxInt = int64ToAuxInt(63)
 10546  		v3.AddArg2(v4, v5)
 10547  		v2.AddArg(v3)
 10548  		v1.AddArg2(v2, v4)
 10549  		v.AddArg2(v0, v1)
 10550  		return true
 10551  	}
 10552  	return false
 10553  }
 10554  func rewriteValueLOONG64_OpRsh32Ux16(v *Value) bool {
 10555  	v_1 := v.Args[1]
 10556  	v_0 := v.Args[0]
 10557  	b := v.Block
 10558  	typ := &b.Func.Config.Types
 10559  	// match: (Rsh32Ux16 x y)
 10560  	// cond: shiftIsBounded(v)
 10561  	// result: (SRL x y)
 10562  	for {
 10563  		x := v_0
 10564  		y := v_1
 10565  		if !(shiftIsBounded(v)) {
 10566  			break
 10567  		}
 10568  		v.reset(OpLOONG64SRL)
 10569  		v.AddArg2(x, y)
 10570  		return true
 10571  	}
 10572  	// match: (Rsh32Ux16 <t> x y)
 10573  	// cond: !shiftIsBounded(v)
 10574  	// result: (MASKEQZ (SRL <t> x (ZeroExt16to64 y)) (SGTU (MOVVconst <typ.UInt64> [32]) (ZeroExt16to64 y)))
 10575  	for {
 10576  		t := v.Type
 10577  		x := v_0
 10578  		y := v_1
 10579  		if !(!shiftIsBounded(v)) {
 10580  			break
 10581  		}
 10582  		v.reset(OpLOONG64MASKEQZ)
 10583  		v0 := b.NewValue0(v.Pos, OpLOONG64SRL, t)
 10584  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 10585  		v1.AddArg(y)
 10586  		v0.AddArg2(x, v1)
 10587  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10588  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10589  		v3.AuxInt = int64ToAuxInt(32)
 10590  		v2.AddArg2(v3, v1)
 10591  		v.AddArg2(v0, v2)
 10592  		return true
 10593  	}
 10594  	return false
 10595  }
 10596  func rewriteValueLOONG64_OpRsh32Ux32(v *Value) bool {
 10597  	v_1 := v.Args[1]
 10598  	v_0 := v.Args[0]
 10599  	b := v.Block
 10600  	typ := &b.Func.Config.Types
 10601  	// match: (Rsh32Ux32 x y)
 10602  	// cond: shiftIsBounded(v)
 10603  	// result: (SRL x y)
 10604  	for {
 10605  		x := v_0
 10606  		y := v_1
 10607  		if !(shiftIsBounded(v)) {
 10608  			break
 10609  		}
 10610  		v.reset(OpLOONG64SRL)
 10611  		v.AddArg2(x, y)
 10612  		return true
 10613  	}
 10614  	// match: (Rsh32Ux32 <t> x y)
 10615  	// cond: !shiftIsBounded(v)
 10616  	// result: (MASKEQZ (SRL <t> x (ZeroExt32to64 y)) (SGTU (MOVVconst <typ.UInt64> [32]) (ZeroExt32to64 y)))
 10617  	for {
 10618  		t := v.Type
 10619  		x := v_0
 10620  		y := v_1
 10621  		if !(!shiftIsBounded(v)) {
 10622  			break
 10623  		}
 10624  		v.reset(OpLOONG64MASKEQZ)
 10625  		v0 := b.NewValue0(v.Pos, OpLOONG64SRL, t)
 10626  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 10627  		v1.AddArg(y)
 10628  		v0.AddArg2(x, v1)
 10629  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10630  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10631  		v3.AuxInt = int64ToAuxInt(32)
 10632  		v2.AddArg2(v3, v1)
 10633  		v.AddArg2(v0, v2)
 10634  		return true
 10635  	}
 10636  	return false
 10637  }
 10638  func rewriteValueLOONG64_OpRsh32Ux64(v *Value) bool {
 10639  	v_1 := v.Args[1]
 10640  	v_0 := v.Args[0]
 10641  	b := v.Block
 10642  	typ := &b.Func.Config.Types
 10643  	// match: (Rsh32Ux64 x y)
 10644  	// cond: shiftIsBounded(v)
 10645  	// result: (SRL x y)
 10646  	for {
 10647  		x := v_0
 10648  		y := v_1
 10649  		if !(shiftIsBounded(v)) {
 10650  			break
 10651  		}
 10652  		v.reset(OpLOONG64SRL)
 10653  		v.AddArg2(x, y)
 10654  		return true
 10655  	}
 10656  	// match: (Rsh32Ux64 <t> x y)
 10657  	// cond: !shiftIsBounded(v)
 10658  	// result: (MASKEQZ (SRL <t> x y) (SGTU (MOVVconst <typ.UInt64> [32]) y))
 10659  	for {
 10660  		t := v.Type
 10661  		x := v_0
 10662  		y := v_1
 10663  		if !(!shiftIsBounded(v)) {
 10664  			break
 10665  		}
 10666  		v.reset(OpLOONG64MASKEQZ)
 10667  		v0 := b.NewValue0(v.Pos, OpLOONG64SRL, t)
 10668  		v0.AddArg2(x, y)
 10669  		v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10670  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10671  		v2.AuxInt = int64ToAuxInt(32)
 10672  		v1.AddArg2(v2, y)
 10673  		v.AddArg2(v0, v1)
 10674  		return true
 10675  	}
 10676  	return false
 10677  }
 10678  func rewriteValueLOONG64_OpRsh32Ux8(v *Value) bool {
 10679  	v_1 := v.Args[1]
 10680  	v_0 := v.Args[0]
 10681  	b := v.Block
 10682  	typ := &b.Func.Config.Types
 10683  	// match: (Rsh32Ux8 x y)
 10684  	// cond: shiftIsBounded(v)
 10685  	// result: (SRL x y)
 10686  	for {
 10687  		x := v_0
 10688  		y := v_1
 10689  		if !(shiftIsBounded(v)) {
 10690  			break
 10691  		}
 10692  		v.reset(OpLOONG64SRL)
 10693  		v.AddArg2(x, y)
 10694  		return true
 10695  	}
 10696  	// match: (Rsh32Ux8 <t> x y)
 10697  	// cond: !shiftIsBounded(v)
 10698  	// result: (MASKEQZ (SRL <t> x (ZeroExt8to64 y)) (SGTU (MOVVconst <typ.UInt64> [32]) (ZeroExt8to64 y)))
 10699  	for {
 10700  		t := v.Type
 10701  		x := v_0
 10702  		y := v_1
 10703  		if !(!shiftIsBounded(v)) {
 10704  			break
 10705  		}
 10706  		v.reset(OpLOONG64MASKEQZ)
 10707  		v0 := b.NewValue0(v.Pos, OpLOONG64SRL, t)
 10708  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 10709  		v1.AddArg(y)
 10710  		v0.AddArg2(x, v1)
 10711  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10712  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10713  		v3.AuxInt = int64ToAuxInt(32)
 10714  		v2.AddArg2(v3, v1)
 10715  		v.AddArg2(v0, v2)
 10716  		return true
 10717  	}
 10718  	return false
 10719  }
 10720  func rewriteValueLOONG64_OpRsh32x16(v *Value) bool {
 10721  	v_1 := v.Args[1]
 10722  	v_0 := v.Args[0]
 10723  	b := v.Block
 10724  	typ := &b.Func.Config.Types
 10725  	// match: (Rsh32x16 x y)
 10726  	// cond: shiftIsBounded(v)
 10727  	// result: (SRA x y)
 10728  	for {
 10729  		x := v_0
 10730  		y := v_1
 10731  		if !(shiftIsBounded(v)) {
 10732  			break
 10733  		}
 10734  		v.reset(OpLOONG64SRA)
 10735  		v.AddArg2(x, y)
 10736  		return true
 10737  	}
 10738  	// match: (Rsh32x16 <t> x y)
 10739  	// cond: !shiftIsBounded(v)
 10740  	// result: (SRA x (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [31]))) (ZeroExt16to64 y)))
 10741  	for {
 10742  		t := v.Type
 10743  		x := v_0
 10744  		y := v_1
 10745  		if !(!shiftIsBounded(v)) {
 10746  			break
 10747  		}
 10748  		v.reset(OpLOONG64SRA)
 10749  		v0 := b.NewValue0(v.Pos, OpLOONG64OR, t)
 10750  		v1 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
 10751  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10752  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 10753  		v3.AddArg(y)
 10754  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10755  		v4.AuxInt = int64ToAuxInt(31)
 10756  		v2.AddArg2(v3, v4)
 10757  		v1.AddArg(v2)
 10758  		v0.AddArg2(v1, v3)
 10759  		v.AddArg2(x, v0)
 10760  		return true
 10761  	}
 10762  	return false
 10763  }
 10764  func rewriteValueLOONG64_OpRsh32x32(v *Value) bool {
 10765  	v_1 := v.Args[1]
 10766  	v_0 := v.Args[0]
 10767  	b := v.Block
 10768  	typ := &b.Func.Config.Types
 10769  	// match: (Rsh32x32 x y)
 10770  	// cond: shiftIsBounded(v)
 10771  	// result: (SRA x y)
 10772  	for {
 10773  		x := v_0
 10774  		y := v_1
 10775  		if !(shiftIsBounded(v)) {
 10776  			break
 10777  		}
 10778  		v.reset(OpLOONG64SRA)
 10779  		v.AddArg2(x, y)
 10780  		return true
 10781  	}
 10782  	// match: (Rsh32x32 <t> x y)
 10783  	// cond: !shiftIsBounded(v)
 10784  	// result: (SRA x (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [31]))) (ZeroExt32to64 y)))
 10785  	for {
 10786  		t := v.Type
 10787  		x := v_0
 10788  		y := v_1
 10789  		if !(!shiftIsBounded(v)) {
 10790  			break
 10791  		}
 10792  		v.reset(OpLOONG64SRA)
 10793  		v0 := b.NewValue0(v.Pos, OpLOONG64OR, t)
 10794  		v1 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
 10795  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10796  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 10797  		v3.AddArg(y)
 10798  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10799  		v4.AuxInt = int64ToAuxInt(31)
 10800  		v2.AddArg2(v3, v4)
 10801  		v1.AddArg(v2)
 10802  		v0.AddArg2(v1, v3)
 10803  		v.AddArg2(x, v0)
 10804  		return true
 10805  	}
 10806  	return false
 10807  }
 10808  func rewriteValueLOONG64_OpRsh32x64(v *Value) bool {
 10809  	v_1 := v.Args[1]
 10810  	v_0 := v.Args[0]
 10811  	b := v.Block
 10812  	typ := &b.Func.Config.Types
 10813  	// match: (Rsh32x64 x y)
 10814  	// cond: shiftIsBounded(v)
 10815  	// result: (SRA x y)
 10816  	for {
 10817  		x := v_0
 10818  		y := v_1
 10819  		if !(shiftIsBounded(v)) {
 10820  			break
 10821  		}
 10822  		v.reset(OpLOONG64SRA)
 10823  		v.AddArg2(x, y)
 10824  		return true
 10825  	}
 10826  	// match: (Rsh32x64 <t> x y)
 10827  	// cond: !shiftIsBounded(v)
 10828  	// result: (SRA x (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [31]))) y))
 10829  	for {
 10830  		t := v.Type
 10831  		x := v_0
 10832  		y := v_1
 10833  		if !(!shiftIsBounded(v)) {
 10834  			break
 10835  		}
 10836  		v.reset(OpLOONG64SRA)
 10837  		v0 := b.NewValue0(v.Pos, OpLOONG64OR, t)
 10838  		v1 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
 10839  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10840  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10841  		v3.AuxInt = int64ToAuxInt(31)
 10842  		v2.AddArg2(y, v3)
 10843  		v1.AddArg(v2)
 10844  		v0.AddArg2(v1, y)
 10845  		v.AddArg2(x, v0)
 10846  		return true
 10847  	}
 10848  	return false
 10849  }
 10850  func rewriteValueLOONG64_OpRsh32x8(v *Value) bool {
 10851  	v_1 := v.Args[1]
 10852  	v_0 := v.Args[0]
 10853  	b := v.Block
 10854  	typ := &b.Func.Config.Types
 10855  	// match: (Rsh32x8 x y)
 10856  	// cond: shiftIsBounded(v)
 10857  	// result: (SRA x y)
 10858  	for {
 10859  		x := v_0
 10860  		y := v_1
 10861  		if !(shiftIsBounded(v)) {
 10862  			break
 10863  		}
 10864  		v.reset(OpLOONG64SRA)
 10865  		v.AddArg2(x, y)
 10866  		return true
 10867  	}
 10868  	// match: (Rsh32x8 <t> x y)
 10869  	// cond: !shiftIsBounded(v)
 10870  	// result: (SRA x (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [31]))) (ZeroExt8to64 y)))
 10871  	for {
 10872  		t := v.Type
 10873  		x := v_0
 10874  		y := v_1
 10875  		if !(!shiftIsBounded(v)) {
 10876  			break
 10877  		}
 10878  		v.reset(OpLOONG64SRA)
 10879  		v0 := b.NewValue0(v.Pos, OpLOONG64OR, t)
 10880  		v1 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
 10881  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10882  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 10883  		v3.AddArg(y)
 10884  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10885  		v4.AuxInt = int64ToAuxInt(31)
 10886  		v2.AddArg2(v3, v4)
 10887  		v1.AddArg(v2)
 10888  		v0.AddArg2(v1, v3)
 10889  		v.AddArg2(x, v0)
 10890  		return true
 10891  	}
 10892  	return false
 10893  }
 10894  func rewriteValueLOONG64_OpRsh64Ux16(v *Value) bool {
 10895  	v_1 := v.Args[1]
 10896  	v_0 := v.Args[0]
 10897  	b := v.Block
 10898  	typ := &b.Func.Config.Types
 10899  	// match: (Rsh64Ux16 x y)
 10900  	// cond: shiftIsBounded(v)
 10901  	// result: (SRLV x y)
 10902  	for {
 10903  		x := v_0
 10904  		y := v_1
 10905  		if !(shiftIsBounded(v)) {
 10906  			break
 10907  		}
 10908  		v.reset(OpLOONG64SRLV)
 10909  		v.AddArg2(x, y)
 10910  		return true
 10911  	}
 10912  	// match: (Rsh64Ux16 <t> x y)
 10913  	// cond: !shiftIsBounded(v)
 10914  	// result: (MASKEQZ (SRLV <t> x (ZeroExt16to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y)))
 10915  	for {
 10916  		t := v.Type
 10917  		x := v_0
 10918  		y := v_1
 10919  		if !(!shiftIsBounded(v)) {
 10920  			break
 10921  		}
 10922  		v.reset(OpLOONG64MASKEQZ)
 10923  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
 10924  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 10925  		v1.AddArg(y)
 10926  		v0.AddArg2(x, v1)
 10927  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10928  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10929  		v3.AuxInt = int64ToAuxInt(64)
 10930  		v2.AddArg2(v3, v1)
 10931  		v.AddArg2(v0, v2)
 10932  		return true
 10933  	}
 10934  	return false
 10935  }
 10936  func rewriteValueLOONG64_OpRsh64Ux32(v *Value) bool {
 10937  	v_1 := v.Args[1]
 10938  	v_0 := v.Args[0]
 10939  	b := v.Block
 10940  	typ := &b.Func.Config.Types
 10941  	// match: (Rsh64Ux32 x y)
 10942  	// cond: shiftIsBounded(v)
 10943  	// result: (SRLV x y)
 10944  	for {
 10945  		x := v_0
 10946  		y := v_1
 10947  		if !(shiftIsBounded(v)) {
 10948  			break
 10949  		}
 10950  		v.reset(OpLOONG64SRLV)
 10951  		v.AddArg2(x, y)
 10952  		return true
 10953  	}
 10954  	// match: (Rsh64Ux32 <t> x y)
 10955  	// cond: !shiftIsBounded(v)
 10956  	// result: (MASKEQZ (SRLV <t> x (ZeroExt32to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y)))
 10957  	for {
 10958  		t := v.Type
 10959  		x := v_0
 10960  		y := v_1
 10961  		if !(!shiftIsBounded(v)) {
 10962  			break
 10963  		}
 10964  		v.reset(OpLOONG64MASKEQZ)
 10965  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
 10966  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 10967  		v1.AddArg(y)
 10968  		v0.AddArg2(x, v1)
 10969  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10970  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10971  		v3.AuxInt = int64ToAuxInt(64)
 10972  		v2.AddArg2(v3, v1)
 10973  		v.AddArg2(v0, v2)
 10974  		return true
 10975  	}
 10976  	return false
 10977  }
 10978  func rewriteValueLOONG64_OpRsh64Ux64(v *Value) bool {
 10979  	v_1 := v.Args[1]
 10980  	v_0 := v.Args[0]
 10981  	b := v.Block
 10982  	typ := &b.Func.Config.Types
 10983  	// match: (Rsh64Ux64 x y)
 10984  	// cond: shiftIsBounded(v)
 10985  	// result: (SRLV x y)
 10986  	for {
 10987  		x := v_0
 10988  		y := v_1
 10989  		if !(shiftIsBounded(v)) {
 10990  			break
 10991  		}
 10992  		v.reset(OpLOONG64SRLV)
 10993  		v.AddArg2(x, y)
 10994  		return true
 10995  	}
 10996  	// match: (Rsh64Ux64 <t> x y)
 10997  	// cond: !shiftIsBounded(v)
 10998  	// result: (MASKEQZ (SRLV <t> x y) (SGTU (MOVVconst <typ.UInt64> [64]) y))
 10999  	for {
 11000  		t := v.Type
 11001  		x := v_0
 11002  		y := v_1
 11003  		if !(!shiftIsBounded(v)) {
 11004  			break
 11005  		}
 11006  		v.reset(OpLOONG64MASKEQZ)
 11007  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
 11008  		v0.AddArg2(x, y)
 11009  		v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 11010  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11011  		v2.AuxInt = int64ToAuxInt(64)
 11012  		v1.AddArg2(v2, y)
 11013  		v.AddArg2(v0, v1)
 11014  		return true
 11015  	}
 11016  	return false
 11017  }
 11018  func rewriteValueLOONG64_OpRsh64Ux8(v *Value) bool {
 11019  	v_1 := v.Args[1]
 11020  	v_0 := v.Args[0]
 11021  	b := v.Block
 11022  	typ := &b.Func.Config.Types
 11023  	// match: (Rsh64Ux8 x y)
 11024  	// cond: shiftIsBounded(v)
 11025  	// result: (SRLV x y)
 11026  	for {
 11027  		x := v_0
 11028  		y := v_1
 11029  		if !(shiftIsBounded(v)) {
 11030  			break
 11031  		}
 11032  		v.reset(OpLOONG64SRLV)
 11033  		v.AddArg2(x, y)
 11034  		return true
 11035  	}
 11036  	// match: (Rsh64Ux8 <t> x y)
 11037  	// cond: !shiftIsBounded(v)
 11038  	// result: (MASKEQZ (SRLV <t> x (ZeroExt8to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y)))
 11039  	for {
 11040  		t := v.Type
 11041  		x := v_0
 11042  		y := v_1
 11043  		if !(!shiftIsBounded(v)) {
 11044  			break
 11045  		}
 11046  		v.reset(OpLOONG64MASKEQZ)
 11047  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
 11048  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 11049  		v1.AddArg(y)
 11050  		v0.AddArg2(x, v1)
 11051  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 11052  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11053  		v3.AuxInt = int64ToAuxInt(64)
 11054  		v2.AddArg2(v3, v1)
 11055  		v.AddArg2(v0, v2)
 11056  		return true
 11057  	}
 11058  	return false
 11059  }
 11060  func rewriteValueLOONG64_OpRsh64x16(v *Value) bool {
 11061  	v_1 := v.Args[1]
 11062  	v_0 := v.Args[0]
 11063  	b := v.Block
 11064  	typ := &b.Func.Config.Types
 11065  	// match: (Rsh64x16 x y)
 11066  	// cond: shiftIsBounded(v)
 11067  	// result: (SRAV x y)
 11068  	for {
 11069  		x := v_0
 11070  		y := v_1
 11071  		if !(shiftIsBounded(v)) {
 11072  			break
 11073  		}
 11074  		v.reset(OpLOONG64SRAV)
 11075  		v.AddArg2(x, y)
 11076  		return true
 11077  	}
 11078  	// match: (Rsh64x16 <t> x y)
 11079  	// cond: !shiftIsBounded(v)
 11080  	// result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y)))
 11081  	for {
 11082  		t := v.Type
 11083  		x := v_0
 11084  		y := v_1
 11085  		if !(!shiftIsBounded(v)) {
 11086  			break
 11087  		}
 11088  		v.reset(OpLOONG64SRAV)
 11089  		v0 := b.NewValue0(v.Pos, OpLOONG64OR, t)
 11090  		v1 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
 11091  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 11092  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 11093  		v3.AddArg(y)
 11094  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11095  		v4.AuxInt = int64ToAuxInt(63)
 11096  		v2.AddArg2(v3, v4)
 11097  		v1.AddArg(v2)
 11098  		v0.AddArg2(v1, v3)
 11099  		v.AddArg2(x, v0)
 11100  		return true
 11101  	}
 11102  	return false
 11103  }
 11104  func rewriteValueLOONG64_OpRsh64x32(v *Value) bool {
 11105  	v_1 := v.Args[1]
 11106  	v_0 := v.Args[0]
 11107  	b := v.Block
 11108  	typ := &b.Func.Config.Types
 11109  	// match: (Rsh64x32 x y)
 11110  	// cond: shiftIsBounded(v)
 11111  	// result: (SRAV x y)
 11112  	for {
 11113  		x := v_0
 11114  		y := v_1
 11115  		if !(shiftIsBounded(v)) {
 11116  			break
 11117  		}
 11118  		v.reset(OpLOONG64SRAV)
 11119  		v.AddArg2(x, y)
 11120  		return true
 11121  	}
 11122  	// match: (Rsh64x32 <t> x y)
 11123  	// cond: !shiftIsBounded(v)
 11124  	// result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y)))
 11125  	for {
 11126  		t := v.Type
 11127  		x := v_0
 11128  		y := v_1
 11129  		if !(!shiftIsBounded(v)) {
 11130  			break
 11131  		}
 11132  		v.reset(OpLOONG64SRAV)
 11133  		v0 := b.NewValue0(v.Pos, OpLOONG64OR, t)
 11134  		v1 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
 11135  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 11136  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 11137  		v3.AddArg(y)
 11138  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11139  		v4.AuxInt = int64ToAuxInt(63)
 11140  		v2.AddArg2(v3, v4)
 11141  		v1.AddArg(v2)
 11142  		v0.AddArg2(v1, v3)
 11143  		v.AddArg2(x, v0)
 11144  		return true
 11145  	}
 11146  	return false
 11147  }
 11148  func rewriteValueLOONG64_OpRsh64x64(v *Value) bool {
 11149  	v_1 := v.Args[1]
 11150  	v_0 := v.Args[0]
 11151  	b := v.Block
 11152  	typ := &b.Func.Config.Types
 11153  	// match: (Rsh64x64 x y)
 11154  	// cond: shiftIsBounded(v)
 11155  	// result: (SRAV x y)
 11156  	for {
 11157  		x := v_0
 11158  		y := v_1
 11159  		if !(shiftIsBounded(v)) {
 11160  			break
 11161  		}
 11162  		v.reset(OpLOONG64SRAV)
 11163  		v.AddArg2(x, y)
 11164  		return true
 11165  	}
 11166  	// match: (Rsh64x64 <t> x y)
 11167  	// cond: !shiftIsBounded(v)
 11168  	// result: (SRAV x (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y))
 11169  	for {
 11170  		t := v.Type
 11171  		x := v_0
 11172  		y := v_1
 11173  		if !(!shiftIsBounded(v)) {
 11174  			break
 11175  		}
 11176  		v.reset(OpLOONG64SRAV)
 11177  		v0 := b.NewValue0(v.Pos, OpLOONG64OR, t)
 11178  		v1 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
 11179  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 11180  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11181  		v3.AuxInt = int64ToAuxInt(63)
 11182  		v2.AddArg2(y, v3)
 11183  		v1.AddArg(v2)
 11184  		v0.AddArg2(v1, y)
 11185  		v.AddArg2(x, v0)
 11186  		return true
 11187  	}
 11188  	return false
 11189  }
 11190  func rewriteValueLOONG64_OpRsh64x8(v *Value) bool {
 11191  	v_1 := v.Args[1]
 11192  	v_0 := v.Args[0]
 11193  	b := v.Block
 11194  	typ := &b.Func.Config.Types
 11195  	// match: (Rsh64x8 x y)
 11196  	// cond: shiftIsBounded(v)
 11197  	// result: (SRAV x y)
 11198  	for {
 11199  		x := v_0
 11200  		y := v_1
 11201  		if !(shiftIsBounded(v)) {
 11202  			break
 11203  		}
 11204  		v.reset(OpLOONG64SRAV)
 11205  		v.AddArg2(x, y)
 11206  		return true
 11207  	}
 11208  	// match: (Rsh64x8 <t> x y)
 11209  	// cond: !shiftIsBounded(v)
 11210  	// result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
 11211  	for {
 11212  		t := v.Type
 11213  		x := v_0
 11214  		y := v_1
 11215  		if !(!shiftIsBounded(v)) {
 11216  			break
 11217  		}
 11218  		v.reset(OpLOONG64SRAV)
 11219  		v0 := b.NewValue0(v.Pos, OpLOONG64OR, t)
 11220  		v1 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
 11221  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 11222  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 11223  		v3.AddArg(y)
 11224  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11225  		v4.AuxInt = int64ToAuxInt(63)
 11226  		v2.AddArg2(v3, v4)
 11227  		v1.AddArg(v2)
 11228  		v0.AddArg2(v1, v3)
 11229  		v.AddArg2(x, v0)
 11230  		return true
 11231  	}
 11232  	return false
 11233  }
 11234  func rewriteValueLOONG64_OpRsh8Ux16(v *Value) bool {
 11235  	v_1 := v.Args[1]
 11236  	v_0 := v.Args[0]
 11237  	b := v.Block
 11238  	typ := &b.Func.Config.Types
 11239  	// match: (Rsh8Ux16 x y)
 11240  	// cond: shiftIsBounded(v)
 11241  	// result: (SRLV (ZeroExt8to64 x) y)
 11242  	for {
 11243  		x := v_0
 11244  		y := v_1
 11245  		if !(shiftIsBounded(v)) {
 11246  			break
 11247  		}
 11248  		v.reset(OpLOONG64SRLV)
 11249  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 11250  		v0.AddArg(x)
 11251  		v.AddArg2(v0, y)
 11252  		return true
 11253  	}
 11254  	// match: (Rsh8Ux16 <t> x y)
 11255  	// cond: !shiftIsBounded(v)
 11256  	// result: (MASKEQZ (SRLV <t> (ZeroExt8to64 x) (ZeroExt16to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y)))
 11257  	for {
 11258  		t := v.Type
 11259  		x := v_0
 11260  		y := v_1
 11261  		if !(!shiftIsBounded(v)) {
 11262  			break
 11263  		}
 11264  		v.reset(OpLOONG64MASKEQZ)
 11265  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
 11266  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 11267  		v1.AddArg(x)
 11268  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 11269  		v2.AddArg(y)
 11270  		v0.AddArg2(v1, v2)
 11271  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 11272  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11273  		v4.AuxInt = int64ToAuxInt(64)
 11274  		v3.AddArg2(v4, v2)
 11275  		v.AddArg2(v0, v3)
 11276  		return true
 11277  	}
 11278  	return false
 11279  }
 11280  func rewriteValueLOONG64_OpRsh8Ux32(v *Value) bool {
 11281  	v_1 := v.Args[1]
 11282  	v_0 := v.Args[0]
 11283  	b := v.Block
 11284  	typ := &b.Func.Config.Types
 11285  	// match: (Rsh8Ux32 x y)
 11286  	// cond: shiftIsBounded(v)
 11287  	// result: (SRLV (ZeroExt8to64 x) y)
 11288  	for {
 11289  		x := v_0
 11290  		y := v_1
 11291  		if !(shiftIsBounded(v)) {
 11292  			break
 11293  		}
 11294  		v.reset(OpLOONG64SRLV)
 11295  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 11296  		v0.AddArg(x)
 11297  		v.AddArg2(v0, y)
 11298  		return true
 11299  	}
 11300  	// match: (Rsh8Ux32 <t> x y)
 11301  	// cond: !shiftIsBounded(v)
 11302  	// result: (MASKEQZ (SRLV <t> (ZeroExt8to64 x) (ZeroExt32to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y)))
 11303  	for {
 11304  		t := v.Type
 11305  		x := v_0
 11306  		y := v_1
 11307  		if !(!shiftIsBounded(v)) {
 11308  			break
 11309  		}
 11310  		v.reset(OpLOONG64MASKEQZ)
 11311  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
 11312  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 11313  		v1.AddArg(x)
 11314  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 11315  		v2.AddArg(y)
 11316  		v0.AddArg2(v1, v2)
 11317  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 11318  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11319  		v4.AuxInt = int64ToAuxInt(64)
 11320  		v3.AddArg2(v4, v2)
 11321  		v.AddArg2(v0, v3)
 11322  		return true
 11323  	}
 11324  	return false
 11325  }
 11326  func rewriteValueLOONG64_OpRsh8Ux64(v *Value) bool {
 11327  	v_1 := v.Args[1]
 11328  	v_0 := v.Args[0]
 11329  	b := v.Block
 11330  	typ := &b.Func.Config.Types
 11331  	// match: (Rsh8Ux64 x y)
 11332  	// cond: shiftIsBounded(v)
 11333  	// result: (SRLV (ZeroExt8to64 x) y)
 11334  	for {
 11335  		x := v_0
 11336  		y := v_1
 11337  		if !(shiftIsBounded(v)) {
 11338  			break
 11339  		}
 11340  		v.reset(OpLOONG64SRLV)
 11341  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 11342  		v0.AddArg(x)
 11343  		v.AddArg2(v0, y)
 11344  		return true
 11345  	}
 11346  	// match: (Rsh8Ux64 <t> x y)
 11347  	// cond: !shiftIsBounded(v)
 11348  	// result: (MASKEQZ (SRLV <t> (ZeroExt8to64 x) y) (SGTU (MOVVconst <typ.UInt64> [64]) y))
 11349  	for {
 11350  		t := v.Type
 11351  		x := v_0
 11352  		y := v_1
 11353  		if !(!shiftIsBounded(v)) {
 11354  			break
 11355  		}
 11356  		v.reset(OpLOONG64MASKEQZ)
 11357  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
 11358  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 11359  		v1.AddArg(x)
 11360  		v0.AddArg2(v1, y)
 11361  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 11362  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11363  		v3.AuxInt = int64ToAuxInt(64)
 11364  		v2.AddArg2(v3, y)
 11365  		v.AddArg2(v0, v2)
 11366  		return true
 11367  	}
 11368  	return false
 11369  }
 11370  func rewriteValueLOONG64_OpRsh8Ux8(v *Value) bool {
 11371  	v_1 := v.Args[1]
 11372  	v_0 := v.Args[0]
 11373  	b := v.Block
 11374  	typ := &b.Func.Config.Types
 11375  	// match: (Rsh8Ux8 x y)
 11376  	// cond: shiftIsBounded(v)
 11377  	// result: (SRLV (ZeroExt8to64 x) y)
 11378  	for {
 11379  		x := v_0
 11380  		y := v_1
 11381  		if !(shiftIsBounded(v)) {
 11382  			break
 11383  		}
 11384  		v.reset(OpLOONG64SRLV)
 11385  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 11386  		v0.AddArg(x)
 11387  		v.AddArg2(v0, y)
 11388  		return true
 11389  	}
 11390  	// match: (Rsh8Ux8 <t> x y)
 11391  	// cond: !shiftIsBounded(v)
 11392  	// result: (MASKEQZ (SRLV <t> (ZeroExt8to64 x) (ZeroExt8to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y)))
 11393  	for {
 11394  		t := v.Type
 11395  		x := v_0
 11396  		y := v_1
 11397  		if !(!shiftIsBounded(v)) {
 11398  			break
 11399  		}
 11400  		v.reset(OpLOONG64MASKEQZ)
 11401  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
 11402  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 11403  		v1.AddArg(x)
 11404  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 11405  		v2.AddArg(y)
 11406  		v0.AddArg2(v1, v2)
 11407  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 11408  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11409  		v4.AuxInt = int64ToAuxInt(64)
 11410  		v3.AddArg2(v4, v2)
 11411  		v.AddArg2(v0, v3)
 11412  		return true
 11413  	}
 11414  	return false
 11415  }
 11416  func rewriteValueLOONG64_OpRsh8x16(v *Value) bool {
 11417  	v_1 := v.Args[1]
 11418  	v_0 := v.Args[0]
 11419  	b := v.Block
 11420  	typ := &b.Func.Config.Types
 11421  	// match: (Rsh8x16 x y)
 11422  	// cond: shiftIsBounded(v)
 11423  	// result: (SRAV (SignExt8to64 x) y)
 11424  	for {
 11425  		x := v_0
 11426  		y := v_1
 11427  		if !(shiftIsBounded(v)) {
 11428  			break
 11429  		}
 11430  		v.reset(OpLOONG64SRAV)
 11431  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
 11432  		v0.AddArg(x)
 11433  		v.AddArg2(v0, y)
 11434  		return true
 11435  	}
 11436  	// match: (Rsh8x16 <t> x y)
 11437  	// cond: !shiftIsBounded(v)
 11438  	// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y)))
 11439  	for {
 11440  		t := v.Type
 11441  		x := v_0
 11442  		y := v_1
 11443  		if !(!shiftIsBounded(v)) {
 11444  			break
 11445  		}
 11446  		v.reset(OpLOONG64SRAV)
 11447  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
 11448  		v0.AddArg(x)
 11449  		v1 := b.NewValue0(v.Pos, OpLOONG64OR, t)
 11450  		v2 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
 11451  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 11452  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 11453  		v4.AddArg(y)
 11454  		v5 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11455  		v5.AuxInt = int64ToAuxInt(63)
 11456  		v3.AddArg2(v4, v5)
 11457  		v2.AddArg(v3)
 11458  		v1.AddArg2(v2, v4)
 11459  		v.AddArg2(v0, v1)
 11460  		return true
 11461  	}
 11462  	return false
 11463  }
 11464  func rewriteValueLOONG64_OpRsh8x32(v *Value) bool {
 11465  	v_1 := v.Args[1]
 11466  	v_0 := v.Args[0]
 11467  	b := v.Block
 11468  	typ := &b.Func.Config.Types
 11469  	// match: (Rsh8x32 x y)
 11470  	// cond: shiftIsBounded(v)
 11471  	// result: (SRAV (SignExt8to64 x) y)
 11472  	for {
 11473  		x := v_0
 11474  		y := v_1
 11475  		if !(shiftIsBounded(v)) {
 11476  			break
 11477  		}
 11478  		v.reset(OpLOONG64SRAV)
 11479  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
 11480  		v0.AddArg(x)
 11481  		v.AddArg2(v0, y)
 11482  		return true
 11483  	}
 11484  	// match: (Rsh8x32 <t> x y)
 11485  	// cond: !shiftIsBounded(v)
 11486  	// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y)))
 11487  	for {
 11488  		t := v.Type
 11489  		x := v_0
 11490  		y := v_1
 11491  		if !(!shiftIsBounded(v)) {
 11492  			break
 11493  		}
 11494  		v.reset(OpLOONG64SRAV)
 11495  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
 11496  		v0.AddArg(x)
 11497  		v1 := b.NewValue0(v.Pos, OpLOONG64OR, t)
 11498  		v2 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
 11499  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 11500  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 11501  		v4.AddArg(y)
 11502  		v5 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11503  		v5.AuxInt = int64ToAuxInt(63)
 11504  		v3.AddArg2(v4, v5)
 11505  		v2.AddArg(v3)
 11506  		v1.AddArg2(v2, v4)
 11507  		v.AddArg2(v0, v1)
 11508  		return true
 11509  	}
 11510  	return false
 11511  }
 11512  func rewriteValueLOONG64_OpRsh8x64(v *Value) bool {
 11513  	v_1 := v.Args[1]
 11514  	v_0 := v.Args[0]
 11515  	b := v.Block
 11516  	typ := &b.Func.Config.Types
 11517  	// match: (Rsh8x64 x y)
 11518  	// cond: shiftIsBounded(v)
 11519  	// result: (SRAV (SignExt8to64 x) y)
 11520  	for {
 11521  		x := v_0
 11522  		y := v_1
 11523  		if !(shiftIsBounded(v)) {
 11524  			break
 11525  		}
 11526  		v.reset(OpLOONG64SRAV)
 11527  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
 11528  		v0.AddArg(x)
 11529  		v.AddArg2(v0, y)
 11530  		return true
 11531  	}
 11532  	// match: (Rsh8x64 <t> x y)
 11533  	// cond: !shiftIsBounded(v)
 11534  	// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y))
 11535  	for {
 11536  		t := v.Type
 11537  		x := v_0
 11538  		y := v_1
 11539  		if !(!shiftIsBounded(v)) {
 11540  			break
 11541  		}
 11542  		v.reset(OpLOONG64SRAV)
 11543  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
 11544  		v0.AddArg(x)
 11545  		v1 := b.NewValue0(v.Pos, OpLOONG64OR, t)
 11546  		v2 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
 11547  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 11548  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11549  		v4.AuxInt = int64ToAuxInt(63)
 11550  		v3.AddArg2(y, v4)
 11551  		v2.AddArg(v3)
 11552  		v1.AddArg2(v2, y)
 11553  		v.AddArg2(v0, v1)
 11554  		return true
 11555  	}
 11556  	return false
 11557  }
 11558  func rewriteValueLOONG64_OpRsh8x8(v *Value) bool {
 11559  	v_1 := v.Args[1]
 11560  	v_0 := v.Args[0]
 11561  	b := v.Block
 11562  	typ := &b.Func.Config.Types
 11563  	// match: (Rsh8x8 x y)
 11564  	// cond: shiftIsBounded(v)
 11565  	// result: (SRAV (SignExt8to64 x) y)
 11566  	for {
 11567  		x := v_0
 11568  		y := v_1
 11569  		if !(shiftIsBounded(v)) {
 11570  			break
 11571  		}
 11572  		v.reset(OpLOONG64SRAV)
 11573  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
 11574  		v0.AddArg(x)
 11575  		v.AddArg2(v0, y)
 11576  		return true
 11577  	}
 11578  	// match: (Rsh8x8 <t> x y)
 11579  	// cond: !shiftIsBounded(v)
 11580  	// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
 11581  	for {
 11582  		t := v.Type
 11583  		x := v_0
 11584  		y := v_1
 11585  		if !(!shiftIsBounded(v)) {
 11586  			break
 11587  		}
 11588  		v.reset(OpLOONG64SRAV)
 11589  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
 11590  		v0.AddArg(x)
 11591  		v1 := b.NewValue0(v.Pos, OpLOONG64OR, t)
 11592  		v2 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
 11593  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 11594  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 11595  		v4.AddArg(y)
 11596  		v5 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11597  		v5.AuxInt = int64ToAuxInt(63)
 11598  		v3.AddArg2(v4, v5)
 11599  		v2.AddArg(v3)
 11600  		v1.AddArg2(v2, v4)
 11601  		v.AddArg2(v0, v1)
 11602  		return true
 11603  	}
 11604  	return false
 11605  }
 11606  func rewriteValueLOONG64_OpSelect0(v *Value) bool {
 11607  	v_0 := v.Args[0]
 11608  	b := v.Block
 11609  	// match: (Select0 (Mul64uhilo x y))
 11610  	// result: (MULHVU x y)
 11611  	for {
 11612  		if v_0.Op != OpMul64uhilo {
 11613  			break
 11614  		}
 11615  		y := v_0.Args[1]
 11616  		x := v_0.Args[0]
 11617  		v.reset(OpLOONG64MULHVU)
 11618  		v.AddArg2(x, y)
 11619  		return true
 11620  	}
 11621  	// match: (Select0 (Mul64uover x y))
 11622  	// result: (MULV x y)
 11623  	for {
 11624  		if v_0.Op != OpMul64uover {
 11625  			break
 11626  		}
 11627  		y := v_0.Args[1]
 11628  		x := v_0.Args[0]
 11629  		v.reset(OpLOONG64MULV)
 11630  		v.AddArg2(x, y)
 11631  		return true
 11632  	}
 11633  	// match: (Select0 <t> (Add64carry x y c))
 11634  	// result: (ADDV (ADDV <t> x y) c)
 11635  	for {
 11636  		t := v.Type
 11637  		if v_0.Op != OpAdd64carry {
 11638  			break
 11639  		}
 11640  		c := v_0.Args[2]
 11641  		x := v_0.Args[0]
 11642  		y := v_0.Args[1]
 11643  		v.reset(OpLOONG64ADDV)
 11644  		v0 := b.NewValue0(v.Pos, OpLOONG64ADDV, t)
 11645  		v0.AddArg2(x, y)
 11646  		v.AddArg2(v0, c)
 11647  		return true
 11648  	}
 11649  	// match: (Select0 <t> (Sub64borrow x y c))
 11650  	// result: (SUBV (SUBV <t> x y) c)
 11651  	for {
 11652  		t := v.Type
 11653  		if v_0.Op != OpSub64borrow {
 11654  			break
 11655  		}
 11656  		c := v_0.Args[2]
 11657  		x := v_0.Args[0]
 11658  		y := v_0.Args[1]
 11659  		v.reset(OpLOONG64SUBV)
 11660  		v0 := b.NewValue0(v.Pos, OpLOONG64SUBV, t)
 11661  		v0.AddArg2(x, y)
 11662  		v.AddArg2(v0, c)
 11663  		return true
 11664  	}
 11665  	return false
 11666  }
 11667  func rewriteValueLOONG64_OpSelect1(v *Value) bool {
 11668  	v_0 := v.Args[0]
 11669  	b := v.Block
 11670  	typ := &b.Func.Config.Types
 11671  	// match: (Select1 (Mul64uhilo x y))
 11672  	// result: (MULV x y)
 11673  	for {
 11674  		if v_0.Op != OpMul64uhilo {
 11675  			break
 11676  		}
 11677  		y := v_0.Args[1]
 11678  		x := v_0.Args[0]
 11679  		v.reset(OpLOONG64MULV)
 11680  		v.AddArg2(x, y)
 11681  		return true
 11682  	}
 11683  	// match: (Select1 (Mul64uover x y))
 11684  	// result: (SGTU <typ.Bool> (MULHVU x y) (MOVVconst <typ.UInt64> [0]))
 11685  	for {
 11686  		if v_0.Op != OpMul64uover {
 11687  			break
 11688  		}
 11689  		y := v_0.Args[1]
 11690  		x := v_0.Args[0]
 11691  		v.reset(OpLOONG64SGTU)
 11692  		v.Type = typ.Bool
 11693  		v0 := b.NewValue0(v.Pos, OpLOONG64MULHVU, typ.UInt64)
 11694  		v0.AddArg2(x, y)
 11695  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11696  		v1.AuxInt = int64ToAuxInt(0)
 11697  		v.AddArg2(v0, v1)
 11698  		return true
 11699  	}
 11700  	// match: (Select1 <t> (Add64carry x y c))
 11701  	// result: (OR (SGTU <t> x s:(ADDV <t> x y)) (SGTU <t> s (ADDV <t> s c)))
 11702  	for {
 11703  		t := v.Type
 11704  		if v_0.Op != OpAdd64carry {
 11705  			break
 11706  		}
 11707  		c := v_0.Args[2]
 11708  		x := v_0.Args[0]
 11709  		y := v_0.Args[1]
 11710  		v.reset(OpLOONG64OR)
 11711  		v0 := b.NewValue0(v.Pos, OpLOONG64SGTU, t)
 11712  		s := b.NewValue0(v.Pos, OpLOONG64ADDV, t)
 11713  		s.AddArg2(x, y)
 11714  		v0.AddArg2(x, s)
 11715  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, t)
 11716  		v3 := b.NewValue0(v.Pos, OpLOONG64ADDV, t)
 11717  		v3.AddArg2(s, c)
 11718  		v2.AddArg2(s, v3)
 11719  		v.AddArg2(v0, v2)
 11720  		return true
 11721  	}
 11722  	// match: (Select1 <t> (Sub64borrow x y c))
 11723  	// result: (OR (SGTU <t> s:(SUBV <t> x y) x) (SGTU <t> (SUBV <t> s c) s))
 11724  	for {
 11725  		t := v.Type
 11726  		if v_0.Op != OpSub64borrow {
 11727  			break
 11728  		}
 11729  		c := v_0.Args[2]
 11730  		x := v_0.Args[0]
 11731  		y := v_0.Args[1]
 11732  		v.reset(OpLOONG64OR)
 11733  		v0 := b.NewValue0(v.Pos, OpLOONG64SGTU, t)
 11734  		s := b.NewValue0(v.Pos, OpLOONG64SUBV, t)
 11735  		s.AddArg2(x, y)
 11736  		v0.AddArg2(s, x)
 11737  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, t)
 11738  		v3 := b.NewValue0(v.Pos, OpLOONG64SUBV, t)
 11739  		v3.AddArg2(s, c)
 11740  		v2.AddArg2(v3, s)
 11741  		v.AddArg2(v0, v2)
 11742  		return true
 11743  	}
 11744  	return false
 11745  }
 11746  func rewriteValueLOONG64_OpSelectN(v *Value) bool {
 11747  	v_0 := v.Args[0]
 11748  	b := v.Block
 11749  	config := b.Func.Config
 11750  	// match: (SelectN [0] call:(CALLstatic {sym} dst src (MOVVconst [sz]) mem))
 11751  	// cond: sz >= 0 && isSameCall(sym, "runtime.memmove") && call.Uses == 1 && isInlinableMemmove(dst, src, sz, config) && clobber(call)
 11752  	// result: (Move [sz] dst src mem)
 11753  	for {
 11754  		if auxIntToInt64(v.AuxInt) != 0 {
 11755  			break
 11756  		}
 11757  		call := v_0
 11758  		if call.Op != OpLOONG64CALLstatic || len(call.Args) != 4 {
 11759  			break
 11760  		}
 11761  		sym := auxToCall(call.Aux)
 11762  		mem := call.Args[3]
 11763  		dst := call.Args[0]
 11764  		src := call.Args[1]
 11765  		call_2 := call.Args[2]
 11766  		if call_2.Op != OpLOONG64MOVVconst {
 11767  			break
 11768  		}
 11769  		sz := auxIntToInt64(call_2.AuxInt)
 11770  		if !(sz >= 0 && isSameCall(sym, "runtime.memmove") && call.Uses == 1 && isInlinableMemmove(dst, src, sz, config) && clobber(call)) {
 11771  			break
 11772  		}
 11773  		v.reset(OpMove)
 11774  		v.AuxInt = int64ToAuxInt(sz)
 11775  		v.AddArg3(dst, src, mem)
 11776  		return true
 11777  	}
 11778  	return false
 11779  }
 11780  func rewriteValueLOONG64_OpSlicemask(v *Value) bool {
 11781  	v_0 := v.Args[0]
 11782  	b := v.Block
 11783  	// match: (Slicemask <t> x)
 11784  	// result: (SRAVconst (NEGV <t> x) [63])
 11785  	for {
 11786  		t := v.Type
 11787  		x := v_0
 11788  		v.reset(OpLOONG64SRAVconst)
 11789  		v.AuxInt = int64ToAuxInt(63)
 11790  		v0 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
 11791  		v0.AddArg(x)
 11792  		v.AddArg(v0)
 11793  		return true
 11794  	}
 11795  }
 11796  func rewriteValueLOONG64_OpStore(v *Value) bool {
 11797  	v_2 := v.Args[2]
 11798  	v_1 := v.Args[1]
 11799  	v_0 := v.Args[0]
 11800  	// match: (Store {t} ptr val mem)
 11801  	// cond: t.Size() == 1
 11802  	// result: (MOVBstore ptr val mem)
 11803  	for {
 11804  		t := auxToType(v.Aux)
 11805  		ptr := v_0
 11806  		val := v_1
 11807  		mem := v_2
 11808  		if !(t.Size() == 1) {
 11809  			break
 11810  		}
 11811  		v.reset(OpLOONG64MOVBstore)
 11812  		v.AddArg3(ptr, val, mem)
 11813  		return true
 11814  	}
 11815  	// match: (Store {t} ptr val mem)
 11816  	// cond: t.Size() == 2
 11817  	// result: (MOVHstore ptr val mem)
 11818  	for {
 11819  		t := auxToType(v.Aux)
 11820  		ptr := v_0
 11821  		val := v_1
 11822  		mem := v_2
 11823  		if !(t.Size() == 2) {
 11824  			break
 11825  		}
 11826  		v.reset(OpLOONG64MOVHstore)
 11827  		v.AddArg3(ptr, val, mem)
 11828  		return true
 11829  	}
 11830  	// match: (Store {t} ptr val mem)
 11831  	// cond: t.Size() == 4 && !t.IsFloat()
 11832  	// result: (MOVWstore ptr val mem)
 11833  	for {
 11834  		t := auxToType(v.Aux)
 11835  		ptr := v_0
 11836  		val := v_1
 11837  		mem := v_2
 11838  		if !(t.Size() == 4 && !t.IsFloat()) {
 11839  			break
 11840  		}
 11841  		v.reset(OpLOONG64MOVWstore)
 11842  		v.AddArg3(ptr, val, mem)
 11843  		return true
 11844  	}
 11845  	// match: (Store {t} ptr val mem)
 11846  	// cond: t.Size() == 8 && !t.IsFloat()
 11847  	// result: (MOVVstore ptr val mem)
 11848  	for {
 11849  		t := auxToType(v.Aux)
 11850  		ptr := v_0
 11851  		val := v_1
 11852  		mem := v_2
 11853  		if !(t.Size() == 8 && !t.IsFloat()) {
 11854  			break
 11855  		}
 11856  		v.reset(OpLOONG64MOVVstore)
 11857  		v.AddArg3(ptr, val, mem)
 11858  		return true
 11859  	}
 11860  	// match: (Store {t} ptr val mem)
 11861  	// cond: t.Size() == 4 && t.IsFloat()
 11862  	// result: (MOVFstore ptr val mem)
 11863  	for {
 11864  		t := auxToType(v.Aux)
 11865  		ptr := v_0
 11866  		val := v_1
 11867  		mem := v_2
 11868  		if !(t.Size() == 4 && t.IsFloat()) {
 11869  			break
 11870  		}
 11871  		v.reset(OpLOONG64MOVFstore)
 11872  		v.AddArg3(ptr, val, mem)
 11873  		return true
 11874  	}
 11875  	// match: (Store {t} ptr val mem)
 11876  	// cond: t.Size() == 8 && t.IsFloat()
 11877  	// result: (MOVDstore ptr val mem)
 11878  	for {
 11879  		t := auxToType(v.Aux)
 11880  		ptr := v_0
 11881  		val := v_1
 11882  		mem := v_2
 11883  		if !(t.Size() == 8 && t.IsFloat()) {
 11884  			break
 11885  		}
 11886  		v.reset(OpLOONG64MOVDstore)
 11887  		v.AddArg3(ptr, val, mem)
 11888  		return true
 11889  	}
 11890  	return false
 11891  }
 11892  func rewriteValueLOONG64_OpZero(v *Value) bool {
 11893  	v_1 := v.Args[1]
 11894  	v_0 := v.Args[0]
 11895  	b := v.Block
 11896  	typ := &b.Func.Config.Types
 11897  	// match: (Zero [0] _ mem)
 11898  	// result: mem
 11899  	for {
 11900  		if auxIntToInt64(v.AuxInt) != 0 {
 11901  			break
 11902  		}
 11903  		mem := v_1
 11904  		v.copyOf(mem)
 11905  		return true
 11906  	}
 11907  	// match: (Zero [1] ptr mem)
 11908  	// result: (MOVBstore ptr (MOVVconst [0]) mem)
 11909  	for {
 11910  		if auxIntToInt64(v.AuxInt) != 1 {
 11911  			break
 11912  		}
 11913  		ptr := v_0
 11914  		mem := v_1
 11915  		v.reset(OpLOONG64MOVBstore)
 11916  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11917  		v0.AuxInt = int64ToAuxInt(0)
 11918  		v.AddArg3(ptr, v0, mem)
 11919  		return true
 11920  	}
 11921  	// match: (Zero [2] ptr mem)
 11922  	// result: (MOVHstore ptr (MOVVconst [0]) mem)
 11923  	for {
 11924  		if auxIntToInt64(v.AuxInt) != 2 {
 11925  			break
 11926  		}
 11927  		ptr := v_0
 11928  		mem := v_1
 11929  		v.reset(OpLOONG64MOVHstore)
 11930  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11931  		v0.AuxInt = int64ToAuxInt(0)
 11932  		v.AddArg3(ptr, v0, mem)
 11933  		return true
 11934  	}
 11935  	// match: (Zero [3] ptr mem)
 11936  	// result: (MOVBstore [2] ptr (MOVVconst [0]) (MOVHstore ptr (MOVVconst [0]) mem))
 11937  	for {
 11938  		if auxIntToInt64(v.AuxInt) != 3 {
 11939  			break
 11940  		}
 11941  		ptr := v_0
 11942  		mem := v_1
 11943  		v.reset(OpLOONG64MOVBstore)
 11944  		v.AuxInt = int32ToAuxInt(2)
 11945  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11946  		v0.AuxInt = int64ToAuxInt(0)
 11947  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVHstore, types.TypeMem)
 11948  		v1.AddArg3(ptr, v0, mem)
 11949  		v.AddArg3(ptr, v0, v1)
 11950  		return true
 11951  	}
 11952  	// match: (Zero [4] {t} ptr mem)
 11953  	// result: (MOVWstore ptr (MOVVconst [0]) mem)
 11954  	for {
 11955  		if auxIntToInt64(v.AuxInt) != 4 {
 11956  			break
 11957  		}
 11958  		ptr := v_0
 11959  		mem := v_1
 11960  		v.reset(OpLOONG64MOVWstore)
 11961  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11962  		v0.AuxInt = int64ToAuxInt(0)
 11963  		v.AddArg3(ptr, v0, mem)
 11964  		return true
 11965  	}
 11966  	// match: (Zero [5] ptr mem)
 11967  	// result: (MOVBstore [4] ptr (MOVVconst [0]) (MOVWstore ptr (MOVVconst [0]) mem))
 11968  	for {
 11969  		if auxIntToInt64(v.AuxInt) != 5 {
 11970  			break
 11971  		}
 11972  		ptr := v_0
 11973  		mem := v_1
 11974  		v.reset(OpLOONG64MOVBstore)
 11975  		v.AuxInt = int32ToAuxInt(4)
 11976  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11977  		v0.AuxInt = int64ToAuxInt(0)
 11978  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVWstore, types.TypeMem)
 11979  		v1.AddArg3(ptr, v0, mem)
 11980  		v.AddArg3(ptr, v0, v1)
 11981  		return true
 11982  	}
 11983  	// match: (Zero [6] ptr mem)
 11984  	// result: (MOVHstore [4] ptr (MOVVconst [0]) (MOVWstore ptr (MOVVconst [0]) mem))
 11985  	for {
 11986  		if auxIntToInt64(v.AuxInt) != 6 {
 11987  			break
 11988  		}
 11989  		ptr := v_0
 11990  		mem := v_1
 11991  		v.reset(OpLOONG64MOVHstore)
 11992  		v.AuxInt = int32ToAuxInt(4)
 11993  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11994  		v0.AuxInt = int64ToAuxInt(0)
 11995  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVWstore, types.TypeMem)
 11996  		v1.AddArg3(ptr, v0, mem)
 11997  		v.AddArg3(ptr, v0, v1)
 11998  		return true
 11999  	}
 12000  	// match: (Zero [7] ptr mem)
 12001  	// result: (MOVWstore [3] ptr (MOVVconst [0]) (MOVWstore ptr (MOVVconst [0]) mem))
 12002  	for {
 12003  		if auxIntToInt64(v.AuxInt) != 7 {
 12004  			break
 12005  		}
 12006  		ptr := v_0
 12007  		mem := v_1
 12008  		v.reset(OpLOONG64MOVWstore)
 12009  		v.AuxInt = int32ToAuxInt(3)
 12010  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 12011  		v0.AuxInt = int64ToAuxInt(0)
 12012  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVWstore, types.TypeMem)
 12013  		v1.AddArg3(ptr, v0, mem)
 12014  		v.AddArg3(ptr, v0, v1)
 12015  		return true
 12016  	}
 12017  	// match: (Zero [8] {t} ptr mem)
 12018  	// result: (MOVVstore ptr (MOVVconst [0]) mem)
 12019  	for {
 12020  		if auxIntToInt64(v.AuxInt) != 8 {
 12021  			break
 12022  		}
 12023  		ptr := v_0
 12024  		mem := v_1
 12025  		v.reset(OpLOONG64MOVVstore)
 12026  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 12027  		v0.AuxInt = int64ToAuxInt(0)
 12028  		v.AddArg3(ptr, v0, mem)
 12029  		return true
 12030  	}
 12031  	// match: (Zero [9] ptr mem)
 12032  	// result: (MOVBstore [8] ptr (MOVVconst [0]) (MOVVstore ptr (MOVVconst [0]) mem))
 12033  	for {
 12034  		if auxIntToInt64(v.AuxInt) != 9 {
 12035  			break
 12036  		}
 12037  		ptr := v_0
 12038  		mem := v_1
 12039  		v.reset(OpLOONG64MOVBstore)
 12040  		v.AuxInt = int32ToAuxInt(8)
 12041  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 12042  		v0.AuxInt = int64ToAuxInt(0)
 12043  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
 12044  		v1.AddArg3(ptr, v0, mem)
 12045  		v.AddArg3(ptr, v0, v1)
 12046  		return true
 12047  	}
 12048  	// match: (Zero [10] ptr mem)
 12049  	// result: (MOVHstore [8] ptr (MOVVconst [0]) (MOVVstore ptr (MOVVconst [0]) mem))
 12050  	for {
 12051  		if auxIntToInt64(v.AuxInt) != 10 {
 12052  			break
 12053  		}
 12054  		ptr := v_0
 12055  		mem := v_1
 12056  		v.reset(OpLOONG64MOVHstore)
 12057  		v.AuxInt = int32ToAuxInt(8)
 12058  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 12059  		v0.AuxInt = int64ToAuxInt(0)
 12060  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
 12061  		v1.AddArg3(ptr, v0, mem)
 12062  		v.AddArg3(ptr, v0, v1)
 12063  		return true
 12064  	}
 12065  	// match: (Zero [11] ptr mem)
 12066  	// result: (MOVWstore [7] ptr (MOVVconst [0]) (MOVVstore ptr (MOVVconst [0]) mem))
 12067  	for {
 12068  		if auxIntToInt64(v.AuxInt) != 11 {
 12069  			break
 12070  		}
 12071  		ptr := v_0
 12072  		mem := v_1
 12073  		v.reset(OpLOONG64MOVWstore)
 12074  		v.AuxInt = int32ToAuxInt(7)
 12075  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 12076  		v0.AuxInt = int64ToAuxInt(0)
 12077  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
 12078  		v1.AddArg3(ptr, v0, mem)
 12079  		v.AddArg3(ptr, v0, v1)
 12080  		return true
 12081  	}
 12082  	// match: (Zero [12] ptr mem)
 12083  	// result: (MOVWstore [8] ptr (MOVVconst [0]) (MOVVstore ptr (MOVVconst [0]) mem))
 12084  	for {
 12085  		if auxIntToInt64(v.AuxInt) != 12 {
 12086  			break
 12087  		}
 12088  		ptr := v_0
 12089  		mem := v_1
 12090  		v.reset(OpLOONG64MOVWstore)
 12091  		v.AuxInt = int32ToAuxInt(8)
 12092  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 12093  		v0.AuxInt = int64ToAuxInt(0)
 12094  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
 12095  		v1.AddArg3(ptr, v0, mem)
 12096  		v.AddArg3(ptr, v0, v1)
 12097  		return true
 12098  	}
 12099  	// match: (Zero [13] ptr mem)
 12100  	// result: (MOVVstore [5] ptr (MOVVconst [0]) (MOVVstore ptr (MOVVconst [0]) mem))
 12101  	for {
 12102  		if auxIntToInt64(v.AuxInt) != 13 {
 12103  			break
 12104  		}
 12105  		ptr := v_0
 12106  		mem := v_1
 12107  		v.reset(OpLOONG64MOVVstore)
 12108  		v.AuxInt = int32ToAuxInt(5)
 12109  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 12110  		v0.AuxInt = int64ToAuxInt(0)
 12111  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
 12112  		v1.AddArg3(ptr, v0, mem)
 12113  		v.AddArg3(ptr, v0, v1)
 12114  		return true
 12115  	}
 12116  	// match: (Zero [14] ptr mem)
 12117  	// result: (MOVVstore [6] ptr (MOVVconst [0]) (MOVVstore ptr (MOVVconst [0]) mem))
 12118  	for {
 12119  		if auxIntToInt64(v.AuxInt) != 14 {
 12120  			break
 12121  		}
 12122  		ptr := v_0
 12123  		mem := v_1
 12124  		v.reset(OpLOONG64MOVVstore)
 12125  		v.AuxInt = int32ToAuxInt(6)
 12126  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 12127  		v0.AuxInt = int64ToAuxInt(0)
 12128  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
 12129  		v1.AddArg3(ptr, v0, mem)
 12130  		v.AddArg3(ptr, v0, v1)
 12131  		return true
 12132  	}
 12133  	// match: (Zero [15] ptr mem)
 12134  	// result: (MOVVstore [7] ptr (MOVVconst [0]) (MOVVstore ptr (MOVVconst [0]) mem))
 12135  	for {
 12136  		if auxIntToInt64(v.AuxInt) != 15 {
 12137  			break
 12138  		}
 12139  		ptr := v_0
 12140  		mem := v_1
 12141  		v.reset(OpLOONG64MOVVstore)
 12142  		v.AuxInt = int32ToAuxInt(7)
 12143  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 12144  		v0.AuxInt = int64ToAuxInt(0)
 12145  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
 12146  		v1.AddArg3(ptr, v0, mem)
 12147  		v.AddArg3(ptr, v0, v1)
 12148  		return true
 12149  	}
 12150  	// match: (Zero [16] ptr mem)
 12151  	// result: (MOVVstore [8] ptr (MOVVconst [0]) (MOVVstore ptr (MOVVconst [0]) mem))
 12152  	for {
 12153  		if auxIntToInt64(v.AuxInt) != 16 {
 12154  			break
 12155  		}
 12156  		ptr := v_0
 12157  		mem := v_1
 12158  		v.reset(OpLOONG64MOVVstore)
 12159  		v.AuxInt = int32ToAuxInt(8)
 12160  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 12161  		v0.AuxInt = int64ToAuxInt(0)
 12162  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
 12163  		v1.AddArg3(ptr, v0, mem)
 12164  		v.AddArg3(ptr, v0, v1)
 12165  		return true
 12166  	}
 12167  	// match: (Zero [s] ptr mem)
 12168  	// cond: s > 16 && s < 192
 12169  	// result: (LoweredZero [s] ptr mem)
 12170  	for {
 12171  		s := auxIntToInt64(v.AuxInt)
 12172  		ptr := v_0
 12173  		mem := v_1
 12174  		if !(s > 16 && s < 192) {
 12175  			break
 12176  		}
 12177  		v.reset(OpLOONG64LoweredZero)
 12178  		v.AuxInt = int64ToAuxInt(s)
 12179  		v.AddArg2(ptr, mem)
 12180  		return true
 12181  	}
 12182  	// match: (Zero [s] ptr mem)
 12183  	// cond: s >= 192
 12184  	// result: (LoweredZeroLoop [s] ptr mem)
 12185  	for {
 12186  		s := auxIntToInt64(v.AuxInt)
 12187  		ptr := v_0
 12188  		mem := v_1
 12189  		if !(s >= 192) {
 12190  			break
 12191  		}
 12192  		v.reset(OpLOONG64LoweredZeroLoop)
 12193  		v.AuxInt = int64ToAuxInt(s)
 12194  		v.AddArg2(ptr, mem)
 12195  		return true
 12196  	}
 12197  	return false
 12198  }
 12199  func rewriteBlockLOONG64(b *Block) bool {
 12200  	typ := &b.Func.Config.Types
 12201  	switch b.Kind {
 12202  	case BlockLOONG64BEQ:
 12203  		// match: (BEQ (MOVVconst [0]) cond yes no)
 12204  		// result: (EQZ cond yes no)
 12205  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 12206  			v_0 := b.Controls[0]
 12207  			if auxIntToInt64(v_0.AuxInt) != 0 {
 12208  				break
 12209  			}
 12210  			cond := b.Controls[1]
 12211  			b.resetWithControl(BlockLOONG64EQZ, cond)
 12212  			return true
 12213  		}
 12214  		// match: (BEQ cond (MOVVconst [0]) yes no)
 12215  		// result: (EQZ cond yes no)
 12216  		for b.Controls[1].Op == OpLOONG64MOVVconst {
 12217  			cond := b.Controls[0]
 12218  			v_1 := b.Controls[1]
 12219  			if auxIntToInt64(v_1.AuxInt) != 0 {
 12220  				break
 12221  			}
 12222  			b.resetWithControl(BlockLOONG64EQZ, cond)
 12223  			return true
 12224  		}
 12225  	case BlockLOONG64BGE:
 12226  		// match: (BGE (MOVVconst [0]) cond yes no)
 12227  		// result: (LEZ cond yes no)
 12228  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 12229  			v_0 := b.Controls[0]
 12230  			if auxIntToInt64(v_0.AuxInt) != 0 {
 12231  				break
 12232  			}
 12233  			cond := b.Controls[1]
 12234  			b.resetWithControl(BlockLOONG64LEZ, cond)
 12235  			return true
 12236  		}
 12237  		// match: (BGE cond (MOVVconst [0]) yes no)
 12238  		// result: (GEZ cond yes no)
 12239  		for b.Controls[1].Op == OpLOONG64MOVVconst {
 12240  			cond := b.Controls[0]
 12241  			v_1 := b.Controls[1]
 12242  			if auxIntToInt64(v_1.AuxInt) != 0 {
 12243  				break
 12244  			}
 12245  			b.resetWithControl(BlockLOONG64GEZ, cond)
 12246  			return true
 12247  		}
 12248  	case BlockLOONG64BGEU:
 12249  		// match: (BGEU (MOVVconst [0]) cond yes no)
 12250  		// result: (EQZ cond yes no)
 12251  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 12252  			v_0 := b.Controls[0]
 12253  			if auxIntToInt64(v_0.AuxInt) != 0 {
 12254  				break
 12255  			}
 12256  			cond := b.Controls[1]
 12257  			b.resetWithControl(BlockLOONG64EQZ, cond)
 12258  			return true
 12259  		}
 12260  	case BlockLOONG64BLT:
 12261  		// match: (BLT (MOVVconst [0]) cond yes no)
 12262  		// result: (GTZ cond yes no)
 12263  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 12264  			v_0 := b.Controls[0]
 12265  			if auxIntToInt64(v_0.AuxInt) != 0 {
 12266  				break
 12267  			}
 12268  			cond := b.Controls[1]
 12269  			b.resetWithControl(BlockLOONG64GTZ, cond)
 12270  			return true
 12271  		}
 12272  		// match: (BLT cond (MOVVconst [0]) yes no)
 12273  		// result: (LTZ cond yes no)
 12274  		for b.Controls[1].Op == OpLOONG64MOVVconst {
 12275  			cond := b.Controls[0]
 12276  			v_1 := b.Controls[1]
 12277  			if auxIntToInt64(v_1.AuxInt) != 0 {
 12278  				break
 12279  			}
 12280  			b.resetWithControl(BlockLOONG64LTZ, cond)
 12281  			return true
 12282  		}
 12283  	case BlockLOONG64BLTU:
 12284  		// match: (BLTU (MOVVconst [0]) cond yes no)
 12285  		// result: (NEZ cond yes no)
 12286  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 12287  			v_0 := b.Controls[0]
 12288  			if auxIntToInt64(v_0.AuxInt) != 0 {
 12289  				break
 12290  			}
 12291  			cond := b.Controls[1]
 12292  			b.resetWithControl(BlockLOONG64NEZ, cond)
 12293  			return true
 12294  		}
 12295  	case BlockLOONG64BNE:
 12296  		// match: (BNE (MOVVconst [0]) cond yes no)
 12297  		// result: (NEZ cond yes no)
 12298  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 12299  			v_0 := b.Controls[0]
 12300  			if auxIntToInt64(v_0.AuxInt) != 0 {
 12301  				break
 12302  			}
 12303  			cond := b.Controls[1]
 12304  			b.resetWithControl(BlockLOONG64NEZ, cond)
 12305  			return true
 12306  		}
 12307  		// match: (BNE cond (MOVVconst [0]) yes no)
 12308  		// result: (NEZ cond yes no)
 12309  		for b.Controls[1].Op == OpLOONG64MOVVconst {
 12310  			cond := b.Controls[0]
 12311  			v_1 := b.Controls[1]
 12312  			if auxIntToInt64(v_1.AuxInt) != 0 {
 12313  				break
 12314  			}
 12315  			b.resetWithControl(BlockLOONG64NEZ, cond)
 12316  			return true
 12317  		}
 12318  	case BlockLOONG64EQZ:
 12319  		// match: (EQZ (FPFlagTrue cmp) yes no)
 12320  		// result: (FPF cmp yes no)
 12321  		for b.Controls[0].Op == OpLOONG64FPFlagTrue {
 12322  			v_0 := b.Controls[0]
 12323  			cmp := v_0.Args[0]
 12324  			b.resetWithControl(BlockLOONG64FPF, cmp)
 12325  			return true
 12326  		}
 12327  		// match: (EQZ (FPFlagFalse cmp) yes no)
 12328  		// result: (FPT cmp yes no)
 12329  		for b.Controls[0].Op == OpLOONG64FPFlagFalse {
 12330  			v_0 := b.Controls[0]
 12331  			cmp := v_0.Args[0]
 12332  			b.resetWithControl(BlockLOONG64FPT, cmp)
 12333  			return true
 12334  		}
 12335  		// match: (EQZ (XORconst [1] cmp:(SGT _ _)) yes no)
 12336  		// result: (NEZ cmp yes no)
 12337  		for b.Controls[0].Op == OpLOONG64XORconst {
 12338  			v_0 := b.Controls[0]
 12339  			if auxIntToInt64(v_0.AuxInt) != 1 {
 12340  				break
 12341  			}
 12342  			cmp := v_0.Args[0]
 12343  			if cmp.Op != OpLOONG64SGT {
 12344  				break
 12345  			}
 12346  			b.resetWithControl(BlockLOONG64NEZ, cmp)
 12347  			return true
 12348  		}
 12349  		// match: (EQZ (XORconst [1] cmp:(SGTU _ _)) yes no)
 12350  		// result: (NEZ cmp yes no)
 12351  		for b.Controls[0].Op == OpLOONG64XORconst {
 12352  			v_0 := b.Controls[0]
 12353  			if auxIntToInt64(v_0.AuxInt) != 1 {
 12354  				break
 12355  			}
 12356  			cmp := v_0.Args[0]
 12357  			if cmp.Op != OpLOONG64SGTU {
 12358  				break
 12359  			}
 12360  			b.resetWithControl(BlockLOONG64NEZ, cmp)
 12361  			return true
 12362  		}
 12363  		// match: (EQZ (XORconst [1] cmp:(SGTconst _)) yes no)
 12364  		// result: (NEZ cmp yes no)
 12365  		for b.Controls[0].Op == OpLOONG64XORconst {
 12366  			v_0 := b.Controls[0]
 12367  			if auxIntToInt64(v_0.AuxInt) != 1 {
 12368  				break
 12369  			}
 12370  			cmp := v_0.Args[0]
 12371  			if cmp.Op != OpLOONG64SGTconst {
 12372  				break
 12373  			}
 12374  			b.resetWithControl(BlockLOONG64NEZ, cmp)
 12375  			return true
 12376  		}
 12377  		// match: (EQZ (XORconst [1] cmp:(SGTUconst _)) yes no)
 12378  		// result: (NEZ cmp yes no)
 12379  		for b.Controls[0].Op == OpLOONG64XORconst {
 12380  			v_0 := b.Controls[0]
 12381  			if auxIntToInt64(v_0.AuxInt) != 1 {
 12382  				break
 12383  			}
 12384  			cmp := v_0.Args[0]
 12385  			if cmp.Op != OpLOONG64SGTUconst {
 12386  				break
 12387  			}
 12388  			b.resetWithControl(BlockLOONG64NEZ, cmp)
 12389  			return true
 12390  		}
 12391  		// match: (EQZ (SGTUconst [1] x) yes no)
 12392  		// result: (NEZ x yes no)
 12393  		for b.Controls[0].Op == OpLOONG64SGTUconst {
 12394  			v_0 := b.Controls[0]
 12395  			if auxIntToInt64(v_0.AuxInt) != 1 {
 12396  				break
 12397  			}
 12398  			x := v_0.Args[0]
 12399  			b.resetWithControl(BlockLOONG64NEZ, x)
 12400  			return true
 12401  		}
 12402  		// match: (EQZ (SGTU x (MOVVconst [0])) yes no)
 12403  		// result: (EQZ x yes no)
 12404  		for b.Controls[0].Op == OpLOONG64SGTU {
 12405  			v_0 := b.Controls[0]
 12406  			_ = v_0.Args[1]
 12407  			x := v_0.Args[0]
 12408  			v_0_1 := v_0.Args[1]
 12409  			if v_0_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
 12410  				break
 12411  			}
 12412  			b.resetWithControl(BlockLOONG64EQZ, x)
 12413  			return true
 12414  		}
 12415  		// match: (EQZ (SGTconst [0] x) yes no)
 12416  		// result: (GEZ x yes no)
 12417  		for b.Controls[0].Op == OpLOONG64SGTconst {
 12418  			v_0 := b.Controls[0]
 12419  			if auxIntToInt64(v_0.AuxInt) != 0 {
 12420  				break
 12421  			}
 12422  			x := v_0.Args[0]
 12423  			b.resetWithControl(BlockLOONG64GEZ, x)
 12424  			return true
 12425  		}
 12426  		// match: (EQZ (SGT x (MOVVconst [0])) yes no)
 12427  		// result: (LEZ x yes no)
 12428  		for b.Controls[0].Op == OpLOONG64SGT {
 12429  			v_0 := b.Controls[0]
 12430  			_ = v_0.Args[1]
 12431  			x := v_0.Args[0]
 12432  			v_0_1 := v_0.Args[1]
 12433  			if v_0_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
 12434  				break
 12435  			}
 12436  			b.resetWithControl(BlockLOONG64LEZ, x)
 12437  			return true
 12438  		}
 12439  		// match: (EQZ (SGTU (MOVVconst [c]) y) yes no)
 12440  		// cond: c >= -2048 && c <= 2047
 12441  		// result: (EQZ (SGTUconst [c] y) yes no)
 12442  		for b.Controls[0].Op == OpLOONG64SGTU {
 12443  			v_0 := b.Controls[0]
 12444  			y := v_0.Args[1]
 12445  			v_0_0 := v_0.Args[0]
 12446  			if v_0_0.Op != OpLOONG64MOVVconst {
 12447  				break
 12448  			}
 12449  			c := auxIntToInt64(v_0_0.AuxInt)
 12450  			if !(c >= -2048 && c <= 2047) {
 12451  				break
 12452  			}
 12453  			v0 := b.NewValue0(v_0.Pos, OpLOONG64SGTUconst, typ.Bool)
 12454  			v0.AuxInt = int64ToAuxInt(c)
 12455  			v0.AddArg(y)
 12456  			b.resetWithControl(BlockLOONG64EQZ, v0)
 12457  			return true
 12458  		}
 12459  		// match: (EQZ (SUBV x y) yes no)
 12460  		// result: (BEQ x y yes no)
 12461  		for b.Controls[0].Op == OpLOONG64SUBV {
 12462  			v_0 := b.Controls[0]
 12463  			y := v_0.Args[1]
 12464  			x := v_0.Args[0]
 12465  			b.resetWithControl2(BlockLOONG64BEQ, x, y)
 12466  			return true
 12467  		}
 12468  		// match: (EQZ (SGT x y) yes no)
 12469  		// result: (BGE y x yes no)
 12470  		for b.Controls[0].Op == OpLOONG64SGT {
 12471  			v_0 := b.Controls[0]
 12472  			y := v_0.Args[1]
 12473  			x := v_0.Args[0]
 12474  			b.resetWithControl2(BlockLOONG64BGE, y, x)
 12475  			return true
 12476  		}
 12477  		// match: (EQZ (SGTU x y) yes no)
 12478  		// result: (BGEU y x yes no)
 12479  		for b.Controls[0].Op == OpLOONG64SGTU {
 12480  			v_0 := b.Controls[0]
 12481  			y := v_0.Args[1]
 12482  			x := v_0.Args[0]
 12483  			b.resetWithControl2(BlockLOONG64BGEU, y, x)
 12484  			return true
 12485  		}
 12486  		// match: (EQZ (SGTconst [c] y) yes no)
 12487  		// result: (BGE y (MOVVconst [c]) yes no)
 12488  		for b.Controls[0].Op == OpLOONG64SGTconst {
 12489  			v_0 := b.Controls[0]
 12490  			c := auxIntToInt64(v_0.AuxInt)
 12491  			y := v_0.Args[0]
 12492  			v0 := b.NewValue0(b.Pos, OpLOONG64MOVVconst, typ.UInt64)
 12493  			v0.AuxInt = int64ToAuxInt(c)
 12494  			b.resetWithControl2(BlockLOONG64BGE, y, v0)
 12495  			return true
 12496  		}
 12497  		// match: (EQZ (SGTUconst [c] y) yes no)
 12498  		// result: (BGEU y (MOVVconst [c]) yes no)
 12499  		for b.Controls[0].Op == OpLOONG64SGTUconst {
 12500  			v_0 := b.Controls[0]
 12501  			c := auxIntToInt64(v_0.AuxInt)
 12502  			y := v_0.Args[0]
 12503  			v0 := b.NewValue0(b.Pos, OpLOONG64MOVVconst, typ.UInt64)
 12504  			v0.AuxInt = int64ToAuxInt(c)
 12505  			b.resetWithControl2(BlockLOONG64BGEU, y, v0)
 12506  			return true
 12507  		}
 12508  		// match: (EQZ (MOVVconst [0]) yes no)
 12509  		// result: (First yes no)
 12510  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 12511  			v_0 := b.Controls[0]
 12512  			if auxIntToInt64(v_0.AuxInt) != 0 {
 12513  				break
 12514  			}
 12515  			b.Reset(BlockFirst)
 12516  			return true
 12517  		}
 12518  		// match: (EQZ (MOVVconst [c]) yes no)
 12519  		// cond: c != 0
 12520  		// result: (First no yes)
 12521  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 12522  			v_0 := b.Controls[0]
 12523  			c := auxIntToInt64(v_0.AuxInt)
 12524  			if !(c != 0) {
 12525  				break
 12526  			}
 12527  			b.Reset(BlockFirst)
 12528  			b.swapSuccessors()
 12529  			return true
 12530  		}
 12531  		// match: (EQZ (NEGV x) yes no)
 12532  		// result: (EQZ x yes no)
 12533  		for b.Controls[0].Op == OpLOONG64NEGV {
 12534  			v_0 := b.Controls[0]
 12535  			x := v_0.Args[0]
 12536  			b.resetWithControl(BlockLOONG64EQZ, x)
 12537  			return true
 12538  		}
 12539  	case BlockLOONG64GEZ:
 12540  		// match: (GEZ (MOVVconst [c]) yes no)
 12541  		// cond: c >= 0
 12542  		// result: (First yes no)
 12543  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 12544  			v_0 := b.Controls[0]
 12545  			c := auxIntToInt64(v_0.AuxInt)
 12546  			if !(c >= 0) {
 12547  				break
 12548  			}
 12549  			b.Reset(BlockFirst)
 12550  			return true
 12551  		}
 12552  		// match: (GEZ (MOVVconst [c]) yes no)
 12553  		// cond: c < 0
 12554  		// result: (First no yes)
 12555  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 12556  			v_0 := b.Controls[0]
 12557  			c := auxIntToInt64(v_0.AuxInt)
 12558  			if !(c < 0) {
 12559  				break
 12560  			}
 12561  			b.Reset(BlockFirst)
 12562  			b.swapSuccessors()
 12563  			return true
 12564  		}
 12565  	case BlockLOONG64GTZ:
 12566  		// match: (GTZ (MOVVconst [c]) yes no)
 12567  		// cond: c > 0
 12568  		// result: (First yes no)
 12569  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 12570  			v_0 := b.Controls[0]
 12571  			c := auxIntToInt64(v_0.AuxInt)
 12572  			if !(c > 0) {
 12573  				break
 12574  			}
 12575  			b.Reset(BlockFirst)
 12576  			return true
 12577  		}
 12578  		// match: (GTZ (MOVVconst [c]) yes no)
 12579  		// cond: c <= 0
 12580  		// result: (First no yes)
 12581  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 12582  			v_0 := b.Controls[0]
 12583  			c := auxIntToInt64(v_0.AuxInt)
 12584  			if !(c <= 0) {
 12585  				break
 12586  			}
 12587  			b.Reset(BlockFirst)
 12588  			b.swapSuccessors()
 12589  			return true
 12590  		}
 12591  	case BlockIf:
 12592  		// match: (If cond yes no)
 12593  		// result: (NEZ (MOVBUreg <typ.UInt64> cond) yes no)
 12594  		for {
 12595  			cond := b.Controls[0]
 12596  			v0 := b.NewValue0(cond.Pos, OpLOONG64MOVBUreg, typ.UInt64)
 12597  			v0.AddArg(cond)
 12598  			b.resetWithControl(BlockLOONG64NEZ, v0)
 12599  			return true
 12600  		}
 12601  	case BlockJumpTable:
 12602  		// match: (JumpTable idx)
 12603  		// result: (JUMPTABLE {makeJumpTableSym(b)} idx (MOVVaddr <typ.Uintptr> {makeJumpTableSym(b)} (SB)))
 12604  		for {
 12605  			idx := b.Controls[0]
 12606  			v0 := b.NewValue0(b.Pos, OpLOONG64MOVVaddr, typ.Uintptr)
 12607  			v0.Aux = symToAux(makeJumpTableSym(b))
 12608  			v1 := b.NewValue0(b.Pos, OpSB, typ.Uintptr)
 12609  			v0.AddArg(v1)
 12610  			b.resetWithControl2(BlockLOONG64JUMPTABLE, idx, v0)
 12611  			b.Aux = symToAux(makeJumpTableSym(b))
 12612  			return true
 12613  		}
 12614  	case BlockLOONG64LEZ:
 12615  		// match: (LEZ (MOVVconst [c]) yes no)
 12616  		// cond: c <= 0
 12617  		// result: (First yes no)
 12618  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 12619  			v_0 := b.Controls[0]
 12620  			c := auxIntToInt64(v_0.AuxInt)
 12621  			if !(c <= 0) {
 12622  				break
 12623  			}
 12624  			b.Reset(BlockFirst)
 12625  			return true
 12626  		}
 12627  		// match: (LEZ (MOVVconst [c]) yes no)
 12628  		// cond: c > 0
 12629  		// result: (First no yes)
 12630  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 12631  			v_0 := b.Controls[0]
 12632  			c := auxIntToInt64(v_0.AuxInt)
 12633  			if !(c > 0) {
 12634  				break
 12635  			}
 12636  			b.Reset(BlockFirst)
 12637  			b.swapSuccessors()
 12638  			return true
 12639  		}
 12640  	case BlockLOONG64LTZ:
 12641  		// match: (LTZ (MOVVconst [c]) yes no)
 12642  		// cond: c < 0
 12643  		// result: (First yes no)
 12644  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 12645  			v_0 := b.Controls[0]
 12646  			c := auxIntToInt64(v_0.AuxInt)
 12647  			if !(c < 0) {
 12648  				break
 12649  			}
 12650  			b.Reset(BlockFirst)
 12651  			return true
 12652  		}
 12653  		// match: (LTZ (MOVVconst [c]) yes no)
 12654  		// cond: c >= 0
 12655  		// result: (First no yes)
 12656  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 12657  			v_0 := b.Controls[0]
 12658  			c := auxIntToInt64(v_0.AuxInt)
 12659  			if !(c >= 0) {
 12660  				break
 12661  			}
 12662  			b.Reset(BlockFirst)
 12663  			b.swapSuccessors()
 12664  			return true
 12665  		}
 12666  	case BlockLOONG64NEZ:
 12667  		// match: (NEZ (FPFlagTrue cmp) yes no)
 12668  		// result: (FPT cmp yes no)
 12669  		for b.Controls[0].Op == OpLOONG64FPFlagTrue {
 12670  			v_0 := b.Controls[0]
 12671  			cmp := v_0.Args[0]
 12672  			b.resetWithControl(BlockLOONG64FPT, cmp)
 12673  			return true
 12674  		}
 12675  		// match: (NEZ (FPFlagFalse cmp) yes no)
 12676  		// result: (FPF cmp yes no)
 12677  		for b.Controls[0].Op == OpLOONG64FPFlagFalse {
 12678  			v_0 := b.Controls[0]
 12679  			cmp := v_0.Args[0]
 12680  			b.resetWithControl(BlockLOONG64FPF, cmp)
 12681  			return true
 12682  		}
 12683  		// match: (NEZ (XORconst [1] cmp:(SGT _ _)) yes no)
 12684  		// result: (EQZ cmp yes no)
 12685  		for b.Controls[0].Op == OpLOONG64XORconst {
 12686  			v_0 := b.Controls[0]
 12687  			if auxIntToInt64(v_0.AuxInt) != 1 {
 12688  				break
 12689  			}
 12690  			cmp := v_0.Args[0]
 12691  			if cmp.Op != OpLOONG64SGT {
 12692  				break
 12693  			}
 12694  			b.resetWithControl(BlockLOONG64EQZ, cmp)
 12695  			return true
 12696  		}
 12697  		// match: (NEZ (XORconst [1] cmp:(SGTU _ _)) yes no)
 12698  		// result: (EQZ cmp yes no)
 12699  		for b.Controls[0].Op == OpLOONG64XORconst {
 12700  			v_0 := b.Controls[0]
 12701  			if auxIntToInt64(v_0.AuxInt) != 1 {
 12702  				break
 12703  			}
 12704  			cmp := v_0.Args[0]
 12705  			if cmp.Op != OpLOONG64SGTU {
 12706  				break
 12707  			}
 12708  			b.resetWithControl(BlockLOONG64EQZ, cmp)
 12709  			return true
 12710  		}
 12711  		// match: (NEZ (XORconst [1] cmp:(SGTconst _)) yes no)
 12712  		// result: (EQZ cmp yes no)
 12713  		for b.Controls[0].Op == OpLOONG64XORconst {
 12714  			v_0 := b.Controls[0]
 12715  			if auxIntToInt64(v_0.AuxInt) != 1 {
 12716  				break
 12717  			}
 12718  			cmp := v_0.Args[0]
 12719  			if cmp.Op != OpLOONG64SGTconst {
 12720  				break
 12721  			}
 12722  			b.resetWithControl(BlockLOONG64EQZ, cmp)
 12723  			return true
 12724  		}
 12725  		// match: (NEZ (XORconst [1] cmp:(SGTUconst _)) yes no)
 12726  		// result: (EQZ cmp yes no)
 12727  		for b.Controls[0].Op == OpLOONG64XORconst {
 12728  			v_0 := b.Controls[0]
 12729  			if auxIntToInt64(v_0.AuxInt) != 1 {
 12730  				break
 12731  			}
 12732  			cmp := v_0.Args[0]
 12733  			if cmp.Op != OpLOONG64SGTUconst {
 12734  				break
 12735  			}
 12736  			b.resetWithControl(BlockLOONG64EQZ, cmp)
 12737  			return true
 12738  		}
 12739  		// match: (NEZ (SGTUconst [1] x) yes no)
 12740  		// result: (EQZ x yes no)
 12741  		for b.Controls[0].Op == OpLOONG64SGTUconst {
 12742  			v_0 := b.Controls[0]
 12743  			if auxIntToInt64(v_0.AuxInt) != 1 {
 12744  				break
 12745  			}
 12746  			x := v_0.Args[0]
 12747  			b.resetWithControl(BlockLOONG64EQZ, x)
 12748  			return true
 12749  		}
 12750  		// match: (NEZ (SGTU x (MOVVconst [0])) yes no)
 12751  		// result: (NEZ x yes no)
 12752  		for b.Controls[0].Op == OpLOONG64SGTU {
 12753  			v_0 := b.Controls[0]
 12754  			_ = v_0.Args[1]
 12755  			x := v_0.Args[0]
 12756  			v_0_1 := v_0.Args[1]
 12757  			if v_0_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
 12758  				break
 12759  			}
 12760  			b.resetWithControl(BlockLOONG64NEZ, x)
 12761  			return true
 12762  		}
 12763  		// match: (NEZ (SGTconst [0] x) yes no)
 12764  		// result: (LTZ x yes no)
 12765  		for b.Controls[0].Op == OpLOONG64SGTconst {
 12766  			v_0 := b.Controls[0]
 12767  			if auxIntToInt64(v_0.AuxInt) != 0 {
 12768  				break
 12769  			}
 12770  			x := v_0.Args[0]
 12771  			b.resetWithControl(BlockLOONG64LTZ, x)
 12772  			return true
 12773  		}
 12774  		// match: (NEZ (SGT x (MOVVconst [0])) yes no)
 12775  		// result: (GTZ x yes no)
 12776  		for b.Controls[0].Op == OpLOONG64SGT {
 12777  			v_0 := b.Controls[0]
 12778  			_ = v_0.Args[1]
 12779  			x := v_0.Args[0]
 12780  			v_0_1 := v_0.Args[1]
 12781  			if v_0_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
 12782  				break
 12783  			}
 12784  			b.resetWithControl(BlockLOONG64GTZ, x)
 12785  			return true
 12786  		}
 12787  		// match: (NEZ (SGTU (MOVVconst [c]) y) yes no)
 12788  		// cond: c >= -2048 && c <= 2047
 12789  		// result: (NEZ (SGTUconst [c] y) yes no)
 12790  		for b.Controls[0].Op == OpLOONG64SGTU {
 12791  			v_0 := b.Controls[0]
 12792  			y := v_0.Args[1]
 12793  			v_0_0 := v_0.Args[0]
 12794  			if v_0_0.Op != OpLOONG64MOVVconst {
 12795  				break
 12796  			}
 12797  			c := auxIntToInt64(v_0_0.AuxInt)
 12798  			if !(c >= -2048 && c <= 2047) {
 12799  				break
 12800  			}
 12801  			v0 := b.NewValue0(v_0.Pos, OpLOONG64SGTUconst, typ.Bool)
 12802  			v0.AuxInt = int64ToAuxInt(c)
 12803  			v0.AddArg(y)
 12804  			b.resetWithControl(BlockLOONG64NEZ, v0)
 12805  			return true
 12806  		}
 12807  		// match: (NEZ (SUBV x y) yes no)
 12808  		// result: (BNE x y yes no)
 12809  		for b.Controls[0].Op == OpLOONG64SUBV {
 12810  			v_0 := b.Controls[0]
 12811  			y := v_0.Args[1]
 12812  			x := v_0.Args[0]
 12813  			b.resetWithControl2(BlockLOONG64BNE, x, y)
 12814  			return true
 12815  		}
 12816  		// match: (NEZ (SGT x y) yes no)
 12817  		// result: (BLT y x yes no)
 12818  		for b.Controls[0].Op == OpLOONG64SGT {
 12819  			v_0 := b.Controls[0]
 12820  			y := v_0.Args[1]
 12821  			x := v_0.Args[0]
 12822  			b.resetWithControl2(BlockLOONG64BLT, y, x)
 12823  			return true
 12824  		}
 12825  		// match: (NEZ (SGTU x y) yes no)
 12826  		// result: (BLTU y x yes no)
 12827  		for b.Controls[0].Op == OpLOONG64SGTU {
 12828  			v_0 := b.Controls[0]
 12829  			y := v_0.Args[1]
 12830  			x := v_0.Args[0]
 12831  			b.resetWithControl2(BlockLOONG64BLTU, y, x)
 12832  			return true
 12833  		}
 12834  		// match: (NEZ (SGTconst [c] y) yes no)
 12835  		// result: (BLT y (MOVVconst [c]) yes no)
 12836  		for b.Controls[0].Op == OpLOONG64SGTconst {
 12837  			v_0 := b.Controls[0]
 12838  			c := auxIntToInt64(v_0.AuxInt)
 12839  			y := v_0.Args[0]
 12840  			v0 := b.NewValue0(b.Pos, OpLOONG64MOVVconst, typ.UInt64)
 12841  			v0.AuxInt = int64ToAuxInt(c)
 12842  			b.resetWithControl2(BlockLOONG64BLT, y, v0)
 12843  			return true
 12844  		}
 12845  		// match: (NEZ (SGTUconst [c] y) yes no)
 12846  		// result: (BLTU y (MOVVconst [c]) yes no)
 12847  		for b.Controls[0].Op == OpLOONG64SGTUconst {
 12848  			v_0 := b.Controls[0]
 12849  			c := auxIntToInt64(v_0.AuxInt)
 12850  			y := v_0.Args[0]
 12851  			v0 := b.NewValue0(b.Pos, OpLOONG64MOVVconst, typ.UInt64)
 12852  			v0.AuxInt = int64ToAuxInt(c)
 12853  			b.resetWithControl2(BlockLOONG64BLTU, y, v0)
 12854  			return true
 12855  		}
 12856  		// match: (NEZ (MOVVconst [0]) yes no)
 12857  		// result: (First no yes)
 12858  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 12859  			v_0 := b.Controls[0]
 12860  			if auxIntToInt64(v_0.AuxInt) != 0 {
 12861  				break
 12862  			}
 12863  			b.Reset(BlockFirst)
 12864  			b.swapSuccessors()
 12865  			return true
 12866  		}
 12867  		// match: (NEZ (MOVVconst [c]) yes no)
 12868  		// cond: c != 0
 12869  		// result: (First yes no)
 12870  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 12871  			v_0 := b.Controls[0]
 12872  			c := auxIntToInt64(v_0.AuxInt)
 12873  			if !(c != 0) {
 12874  				break
 12875  			}
 12876  			b.Reset(BlockFirst)
 12877  			return true
 12878  		}
 12879  		// match: (NEZ (NEGV x) yes no)
 12880  		// result: (NEZ x yes no)
 12881  		for b.Controls[0].Op == OpLOONG64NEGV {
 12882  			v_0 := b.Controls[0]
 12883  			x := v_0.Args[0]
 12884  			b.resetWithControl(BlockLOONG64NEZ, x)
 12885  			return true
 12886  		}
 12887  	}
 12888  	return false
 12889  }
 12890  

View as plain text