Source file src/cmd/compile/internal/ssa/rewriteLOONG64.go

     1  // Code generated from _gen/LOONG64.rules using 'go generate'; DO NOT EDIT.
     2  
     3  package ssa
     4  
     5  import "cmd/compile/internal/types"
     6  
     7  func rewriteValueLOONG64(v *Value) bool {
     8  	switch v.Op {
     9  	case OpAbs:
    10  		v.Op = OpLOONG64ABSD
    11  		return true
    12  	case OpAdd16:
    13  		v.Op = OpLOONG64ADDV
    14  		return true
    15  	case OpAdd32:
    16  		v.Op = OpLOONG64ADDV
    17  		return true
    18  	case OpAdd32F:
    19  		v.Op = OpLOONG64ADDF
    20  		return true
    21  	case OpAdd64:
    22  		v.Op = OpLOONG64ADDV
    23  		return true
    24  	case OpAdd64F:
    25  		v.Op = OpLOONG64ADDD
    26  		return true
    27  	case OpAdd8:
    28  		v.Op = OpLOONG64ADDV
    29  		return true
    30  	case OpAddPtr:
    31  		v.Op = OpLOONG64ADDV
    32  		return true
    33  	case OpAddr:
    34  		return rewriteValueLOONG64_OpAddr(v)
    35  	case OpAnd16:
    36  		v.Op = OpLOONG64AND
    37  		return true
    38  	case OpAnd32:
    39  		v.Op = OpLOONG64AND
    40  		return true
    41  	case OpAnd64:
    42  		v.Op = OpLOONG64AND
    43  		return true
    44  	case OpAnd8:
    45  		v.Op = OpLOONG64AND
    46  		return true
    47  	case OpAndB:
    48  		v.Op = OpLOONG64AND
    49  		return true
    50  	case OpAtomicAdd32:
    51  		v.Op = OpLOONG64LoweredAtomicAdd32
    52  		return true
    53  	case OpAtomicAdd64:
    54  		v.Op = OpLOONG64LoweredAtomicAdd64
    55  		return true
    56  	case OpAtomicAnd32:
    57  		v.Op = OpLOONG64LoweredAtomicAnd32
    58  		return true
    59  	case OpAtomicAnd32value:
    60  		v.Op = OpLOONG64LoweredAtomicAnd32value
    61  		return true
    62  	case OpAtomicAnd64value:
    63  		v.Op = OpLOONG64LoweredAtomicAnd64value
    64  		return true
    65  	case OpAtomicAnd8:
    66  		return rewriteValueLOONG64_OpAtomicAnd8(v)
    67  	case OpAtomicCompareAndSwap32:
    68  		return rewriteValueLOONG64_OpAtomicCompareAndSwap32(v)
    69  	case OpAtomicCompareAndSwap32Variant:
    70  		return rewriteValueLOONG64_OpAtomicCompareAndSwap32Variant(v)
    71  	case OpAtomicCompareAndSwap64:
    72  		v.Op = OpLOONG64LoweredAtomicCas64
    73  		return true
    74  	case OpAtomicCompareAndSwap64Variant:
    75  		v.Op = OpLOONG64LoweredAtomicCas64Variant
    76  		return true
    77  	case OpAtomicExchange32:
    78  		v.Op = OpLOONG64LoweredAtomicExchange32
    79  		return true
    80  	case OpAtomicExchange64:
    81  		v.Op = OpLOONG64LoweredAtomicExchange64
    82  		return true
    83  	case OpAtomicExchange8Variant:
    84  		v.Op = OpLOONG64LoweredAtomicExchange8Variant
    85  		return true
    86  	case OpAtomicLoad32:
    87  		v.Op = OpLOONG64LoweredAtomicLoad32
    88  		return true
    89  	case OpAtomicLoad64:
    90  		v.Op = OpLOONG64LoweredAtomicLoad64
    91  		return true
    92  	case OpAtomicLoad8:
    93  		v.Op = OpLOONG64LoweredAtomicLoad8
    94  		return true
    95  	case OpAtomicLoadPtr:
    96  		v.Op = OpLOONG64LoweredAtomicLoad64
    97  		return true
    98  	case OpAtomicOr32:
    99  		v.Op = OpLOONG64LoweredAtomicOr32
   100  		return true
   101  	case OpAtomicOr32value:
   102  		v.Op = OpLOONG64LoweredAtomicOr32value
   103  		return true
   104  	case OpAtomicOr64value:
   105  		v.Op = OpLOONG64LoweredAtomicOr64value
   106  		return true
   107  	case OpAtomicOr8:
   108  		return rewriteValueLOONG64_OpAtomicOr8(v)
   109  	case OpAtomicStore32:
   110  		v.Op = OpLOONG64LoweredAtomicStore32
   111  		return true
   112  	case OpAtomicStore32Variant:
   113  		v.Op = OpLOONG64LoweredAtomicStore32Variant
   114  		return true
   115  	case OpAtomicStore64:
   116  		v.Op = OpLOONG64LoweredAtomicStore64
   117  		return true
   118  	case OpAtomicStore64Variant:
   119  		v.Op = OpLOONG64LoweredAtomicStore64Variant
   120  		return true
   121  	case OpAtomicStore8:
   122  		v.Op = OpLOONG64LoweredAtomicStore8
   123  		return true
   124  	case OpAtomicStore8Variant:
   125  		v.Op = OpLOONG64LoweredAtomicStore8Variant
   126  		return true
   127  	case OpAtomicStorePtrNoWB:
   128  		v.Op = OpLOONG64LoweredAtomicStore64
   129  		return true
   130  	case OpAvg64u:
   131  		return rewriteValueLOONG64_OpAvg64u(v)
   132  	case OpBitLen16:
   133  		return rewriteValueLOONG64_OpBitLen16(v)
   134  	case OpBitLen32:
   135  		return rewriteValueLOONG64_OpBitLen32(v)
   136  	case OpBitLen64:
   137  		return rewriteValueLOONG64_OpBitLen64(v)
   138  	case OpBitLen8:
   139  		return rewriteValueLOONG64_OpBitLen8(v)
   140  	case OpBitRev16:
   141  		return rewriteValueLOONG64_OpBitRev16(v)
   142  	case OpBitRev32:
   143  		v.Op = OpLOONG64BITREVW
   144  		return true
   145  	case OpBitRev64:
   146  		v.Op = OpLOONG64BITREVV
   147  		return true
   148  	case OpBitRev8:
   149  		v.Op = OpLOONG64BITREV4B
   150  		return true
   151  	case OpBswap16:
   152  		v.Op = OpLOONG64REVB2H
   153  		return true
   154  	case OpBswap32:
   155  		v.Op = OpLOONG64REVB2W
   156  		return true
   157  	case OpBswap64:
   158  		v.Op = OpLOONG64REVBV
   159  		return true
   160  	case OpClosureCall:
   161  		v.Op = OpLOONG64CALLclosure
   162  		return true
   163  	case OpCom16:
   164  		return rewriteValueLOONG64_OpCom16(v)
   165  	case OpCom32:
   166  		return rewriteValueLOONG64_OpCom32(v)
   167  	case OpCom64:
   168  		return rewriteValueLOONG64_OpCom64(v)
   169  	case OpCom8:
   170  		return rewriteValueLOONG64_OpCom8(v)
   171  	case OpCondSelect:
   172  		return rewriteValueLOONG64_OpCondSelect(v)
   173  	case OpConst16:
   174  		return rewriteValueLOONG64_OpConst16(v)
   175  	case OpConst32:
   176  		return rewriteValueLOONG64_OpConst32(v)
   177  	case OpConst32F:
   178  		return rewriteValueLOONG64_OpConst32F(v)
   179  	case OpConst64:
   180  		return rewriteValueLOONG64_OpConst64(v)
   181  	case OpConst64F:
   182  		return rewriteValueLOONG64_OpConst64F(v)
   183  	case OpConst8:
   184  		return rewriteValueLOONG64_OpConst8(v)
   185  	case OpConstBool:
   186  		return rewriteValueLOONG64_OpConstBool(v)
   187  	case OpConstNil:
   188  		return rewriteValueLOONG64_OpConstNil(v)
   189  	case OpCopysign:
   190  		v.Op = OpLOONG64FCOPYSGD
   191  		return true
   192  	case OpCtz16:
   193  		return rewriteValueLOONG64_OpCtz16(v)
   194  	case OpCtz16NonZero:
   195  		v.Op = OpCtz64
   196  		return true
   197  	case OpCtz32:
   198  		v.Op = OpLOONG64CTZW
   199  		return true
   200  	case OpCtz32NonZero:
   201  		v.Op = OpCtz64
   202  		return true
   203  	case OpCtz64:
   204  		v.Op = OpLOONG64CTZV
   205  		return true
   206  	case OpCtz64NonZero:
   207  		v.Op = OpCtz64
   208  		return true
   209  	case OpCtz8:
   210  		return rewriteValueLOONG64_OpCtz8(v)
   211  	case OpCtz8NonZero:
   212  		v.Op = OpCtz64
   213  		return true
   214  	case OpCvt32Fto32:
   215  		v.Op = OpLOONG64TRUNCFW
   216  		return true
   217  	case OpCvt32Fto64:
   218  		v.Op = OpLOONG64TRUNCFV
   219  		return true
   220  	case OpCvt32Fto64F:
   221  		v.Op = OpLOONG64MOVFD
   222  		return true
   223  	case OpCvt32to32F:
   224  		v.Op = OpLOONG64MOVWF
   225  		return true
   226  	case OpCvt32to64F:
   227  		v.Op = OpLOONG64MOVWD
   228  		return true
   229  	case OpCvt64Fto32:
   230  		v.Op = OpLOONG64TRUNCDW
   231  		return true
   232  	case OpCvt64Fto32F:
   233  		v.Op = OpLOONG64MOVDF
   234  		return true
   235  	case OpCvt64Fto64:
   236  		v.Op = OpLOONG64TRUNCDV
   237  		return true
   238  	case OpCvt64to32F:
   239  		v.Op = OpLOONG64MOVVF
   240  		return true
   241  	case OpCvt64to64F:
   242  		v.Op = OpLOONG64MOVVD
   243  		return true
   244  	case OpCvtBoolToUint8:
   245  		v.Op = OpCopy
   246  		return true
   247  	case OpDiv16:
   248  		return rewriteValueLOONG64_OpDiv16(v)
   249  	case OpDiv16u:
   250  		return rewriteValueLOONG64_OpDiv16u(v)
   251  	case OpDiv32:
   252  		return rewriteValueLOONG64_OpDiv32(v)
   253  	case OpDiv32F:
   254  		v.Op = OpLOONG64DIVF
   255  		return true
   256  	case OpDiv32u:
   257  		return rewriteValueLOONG64_OpDiv32u(v)
   258  	case OpDiv64:
   259  		return rewriteValueLOONG64_OpDiv64(v)
   260  	case OpDiv64F:
   261  		v.Op = OpLOONG64DIVD
   262  		return true
   263  	case OpDiv64u:
   264  		v.Op = OpLOONG64DIVVU
   265  		return true
   266  	case OpDiv8:
   267  		return rewriteValueLOONG64_OpDiv8(v)
   268  	case OpDiv8u:
   269  		return rewriteValueLOONG64_OpDiv8u(v)
   270  	case OpEq16:
   271  		return rewriteValueLOONG64_OpEq16(v)
   272  	case OpEq32:
   273  		return rewriteValueLOONG64_OpEq32(v)
   274  	case OpEq32F:
   275  		return rewriteValueLOONG64_OpEq32F(v)
   276  	case OpEq64:
   277  		return rewriteValueLOONG64_OpEq64(v)
   278  	case OpEq64F:
   279  		return rewriteValueLOONG64_OpEq64F(v)
   280  	case OpEq8:
   281  		return rewriteValueLOONG64_OpEq8(v)
   282  	case OpEqB:
   283  		return rewriteValueLOONG64_OpEqB(v)
   284  	case OpEqPtr:
   285  		return rewriteValueLOONG64_OpEqPtr(v)
   286  	case OpFMA:
   287  		v.Op = OpLOONG64FMADDD
   288  		return true
   289  	case OpGetCallerPC:
   290  		v.Op = OpLOONG64LoweredGetCallerPC
   291  		return true
   292  	case OpGetCallerSP:
   293  		v.Op = OpLOONG64LoweredGetCallerSP
   294  		return true
   295  	case OpGetClosurePtr:
   296  		v.Op = OpLOONG64LoweredGetClosurePtr
   297  		return true
   298  	case OpHmul32:
   299  		return rewriteValueLOONG64_OpHmul32(v)
   300  	case OpHmul32u:
   301  		return rewriteValueLOONG64_OpHmul32u(v)
   302  	case OpHmul64:
   303  		v.Op = OpLOONG64MULHV
   304  		return true
   305  	case OpHmul64u:
   306  		v.Op = OpLOONG64MULHVU
   307  		return true
   308  	case OpInterCall:
   309  		v.Op = OpLOONG64CALLinter
   310  		return true
   311  	case OpIsInBounds:
   312  		return rewriteValueLOONG64_OpIsInBounds(v)
   313  	case OpIsNonNil:
   314  		return rewriteValueLOONG64_OpIsNonNil(v)
   315  	case OpIsSliceInBounds:
   316  		return rewriteValueLOONG64_OpIsSliceInBounds(v)
   317  	case OpLOONG64ADDD:
   318  		return rewriteValueLOONG64_OpLOONG64ADDD(v)
   319  	case OpLOONG64ADDF:
   320  		return rewriteValueLOONG64_OpLOONG64ADDF(v)
   321  	case OpLOONG64ADDV:
   322  		return rewriteValueLOONG64_OpLOONG64ADDV(v)
   323  	case OpLOONG64ADDVconst:
   324  		return rewriteValueLOONG64_OpLOONG64ADDVconst(v)
   325  	case OpLOONG64AND:
   326  		return rewriteValueLOONG64_OpLOONG64AND(v)
   327  	case OpLOONG64ANDconst:
   328  		return rewriteValueLOONG64_OpLOONG64ANDconst(v)
   329  	case OpLOONG64DIVV:
   330  		return rewriteValueLOONG64_OpLOONG64DIVV(v)
   331  	case OpLOONG64DIVVU:
   332  		return rewriteValueLOONG64_OpLOONG64DIVVU(v)
   333  	case OpLOONG64MASKEQZ:
   334  		return rewriteValueLOONG64_OpLOONG64MASKEQZ(v)
   335  	case OpLOONG64MASKNEZ:
   336  		return rewriteValueLOONG64_OpLOONG64MASKNEZ(v)
   337  	case OpLOONG64MOVBUload:
   338  		return rewriteValueLOONG64_OpLOONG64MOVBUload(v)
   339  	case OpLOONG64MOVBUloadidx:
   340  		return rewriteValueLOONG64_OpLOONG64MOVBUloadidx(v)
   341  	case OpLOONG64MOVBUreg:
   342  		return rewriteValueLOONG64_OpLOONG64MOVBUreg(v)
   343  	case OpLOONG64MOVBload:
   344  		return rewriteValueLOONG64_OpLOONG64MOVBload(v)
   345  	case OpLOONG64MOVBloadidx:
   346  		return rewriteValueLOONG64_OpLOONG64MOVBloadidx(v)
   347  	case OpLOONG64MOVBreg:
   348  		return rewriteValueLOONG64_OpLOONG64MOVBreg(v)
   349  	case OpLOONG64MOVBstore:
   350  		return rewriteValueLOONG64_OpLOONG64MOVBstore(v)
   351  	case OpLOONG64MOVBstoreidx:
   352  		return rewriteValueLOONG64_OpLOONG64MOVBstoreidx(v)
   353  	case OpLOONG64MOVBstorezero:
   354  		return rewriteValueLOONG64_OpLOONG64MOVBstorezero(v)
   355  	case OpLOONG64MOVBstorezeroidx:
   356  		return rewriteValueLOONG64_OpLOONG64MOVBstorezeroidx(v)
   357  	case OpLOONG64MOVDload:
   358  		return rewriteValueLOONG64_OpLOONG64MOVDload(v)
   359  	case OpLOONG64MOVDloadidx:
   360  		return rewriteValueLOONG64_OpLOONG64MOVDloadidx(v)
   361  	case OpLOONG64MOVDstore:
   362  		return rewriteValueLOONG64_OpLOONG64MOVDstore(v)
   363  	case OpLOONG64MOVDstoreidx:
   364  		return rewriteValueLOONG64_OpLOONG64MOVDstoreidx(v)
   365  	case OpLOONG64MOVFload:
   366  		return rewriteValueLOONG64_OpLOONG64MOVFload(v)
   367  	case OpLOONG64MOVFloadidx:
   368  		return rewriteValueLOONG64_OpLOONG64MOVFloadidx(v)
   369  	case OpLOONG64MOVFstore:
   370  		return rewriteValueLOONG64_OpLOONG64MOVFstore(v)
   371  	case OpLOONG64MOVFstoreidx:
   372  		return rewriteValueLOONG64_OpLOONG64MOVFstoreidx(v)
   373  	case OpLOONG64MOVHUload:
   374  		return rewriteValueLOONG64_OpLOONG64MOVHUload(v)
   375  	case OpLOONG64MOVHUloadidx:
   376  		return rewriteValueLOONG64_OpLOONG64MOVHUloadidx(v)
   377  	case OpLOONG64MOVHUreg:
   378  		return rewriteValueLOONG64_OpLOONG64MOVHUreg(v)
   379  	case OpLOONG64MOVHload:
   380  		return rewriteValueLOONG64_OpLOONG64MOVHload(v)
   381  	case OpLOONG64MOVHloadidx:
   382  		return rewriteValueLOONG64_OpLOONG64MOVHloadidx(v)
   383  	case OpLOONG64MOVHreg:
   384  		return rewriteValueLOONG64_OpLOONG64MOVHreg(v)
   385  	case OpLOONG64MOVHstore:
   386  		return rewriteValueLOONG64_OpLOONG64MOVHstore(v)
   387  	case OpLOONG64MOVHstoreidx:
   388  		return rewriteValueLOONG64_OpLOONG64MOVHstoreidx(v)
   389  	case OpLOONG64MOVHstorezero:
   390  		return rewriteValueLOONG64_OpLOONG64MOVHstorezero(v)
   391  	case OpLOONG64MOVHstorezeroidx:
   392  		return rewriteValueLOONG64_OpLOONG64MOVHstorezeroidx(v)
   393  	case OpLOONG64MOVVload:
   394  		return rewriteValueLOONG64_OpLOONG64MOVVload(v)
   395  	case OpLOONG64MOVVloadidx:
   396  		return rewriteValueLOONG64_OpLOONG64MOVVloadidx(v)
   397  	case OpLOONG64MOVVnop:
   398  		return rewriteValueLOONG64_OpLOONG64MOVVnop(v)
   399  	case OpLOONG64MOVVreg:
   400  		return rewriteValueLOONG64_OpLOONG64MOVVreg(v)
   401  	case OpLOONG64MOVVstore:
   402  		return rewriteValueLOONG64_OpLOONG64MOVVstore(v)
   403  	case OpLOONG64MOVVstoreidx:
   404  		return rewriteValueLOONG64_OpLOONG64MOVVstoreidx(v)
   405  	case OpLOONG64MOVVstorezero:
   406  		return rewriteValueLOONG64_OpLOONG64MOVVstorezero(v)
   407  	case OpLOONG64MOVVstorezeroidx:
   408  		return rewriteValueLOONG64_OpLOONG64MOVVstorezeroidx(v)
   409  	case OpLOONG64MOVWUload:
   410  		return rewriteValueLOONG64_OpLOONG64MOVWUload(v)
   411  	case OpLOONG64MOVWUloadidx:
   412  		return rewriteValueLOONG64_OpLOONG64MOVWUloadidx(v)
   413  	case OpLOONG64MOVWUreg:
   414  		return rewriteValueLOONG64_OpLOONG64MOVWUreg(v)
   415  	case OpLOONG64MOVWload:
   416  		return rewriteValueLOONG64_OpLOONG64MOVWload(v)
   417  	case OpLOONG64MOVWloadidx:
   418  		return rewriteValueLOONG64_OpLOONG64MOVWloadidx(v)
   419  	case OpLOONG64MOVWreg:
   420  		return rewriteValueLOONG64_OpLOONG64MOVWreg(v)
   421  	case OpLOONG64MOVWstore:
   422  		return rewriteValueLOONG64_OpLOONG64MOVWstore(v)
   423  	case OpLOONG64MOVWstoreidx:
   424  		return rewriteValueLOONG64_OpLOONG64MOVWstoreidx(v)
   425  	case OpLOONG64MOVWstorezero:
   426  		return rewriteValueLOONG64_OpLOONG64MOVWstorezero(v)
   427  	case OpLOONG64MOVWstorezeroidx:
   428  		return rewriteValueLOONG64_OpLOONG64MOVWstorezeroidx(v)
   429  	case OpLOONG64MULV:
   430  		return rewriteValueLOONG64_OpLOONG64MULV(v)
   431  	case OpLOONG64NEGV:
   432  		return rewriteValueLOONG64_OpLOONG64NEGV(v)
   433  	case OpLOONG64NOR:
   434  		return rewriteValueLOONG64_OpLOONG64NOR(v)
   435  	case OpLOONG64NORconst:
   436  		return rewriteValueLOONG64_OpLOONG64NORconst(v)
   437  	case OpLOONG64OR:
   438  		return rewriteValueLOONG64_OpLOONG64OR(v)
   439  	case OpLOONG64ORN:
   440  		return rewriteValueLOONG64_OpLOONG64ORN(v)
   441  	case OpLOONG64ORconst:
   442  		return rewriteValueLOONG64_OpLOONG64ORconst(v)
   443  	case OpLOONG64REMV:
   444  		return rewriteValueLOONG64_OpLOONG64REMV(v)
   445  	case OpLOONG64REMVU:
   446  		return rewriteValueLOONG64_OpLOONG64REMVU(v)
   447  	case OpLOONG64ROTR:
   448  		return rewriteValueLOONG64_OpLOONG64ROTR(v)
   449  	case OpLOONG64ROTRV:
   450  		return rewriteValueLOONG64_OpLOONG64ROTRV(v)
   451  	case OpLOONG64SGT:
   452  		return rewriteValueLOONG64_OpLOONG64SGT(v)
   453  	case OpLOONG64SGTU:
   454  		return rewriteValueLOONG64_OpLOONG64SGTU(v)
   455  	case OpLOONG64SGTUconst:
   456  		return rewriteValueLOONG64_OpLOONG64SGTUconst(v)
   457  	case OpLOONG64SGTconst:
   458  		return rewriteValueLOONG64_OpLOONG64SGTconst(v)
   459  	case OpLOONG64SLL:
   460  		return rewriteValueLOONG64_OpLOONG64SLL(v)
   461  	case OpLOONG64SLLV:
   462  		return rewriteValueLOONG64_OpLOONG64SLLV(v)
   463  	case OpLOONG64SLLVconst:
   464  		return rewriteValueLOONG64_OpLOONG64SLLVconst(v)
   465  	case OpLOONG64SRA:
   466  		return rewriteValueLOONG64_OpLOONG64SRA(v)
   467  	case OpLOONG64SRAV:
   468  		return rewriteValueLOONG64_OpLOONG64SRAV(v)
   469  	case OpLOONG64SRAVconst:
   470  		return rewriteValueLOONG64_OpLOONG64SRAVconst(v)
   471  	case OpLOONG64SRL:
   472  		return rewriteValueLOONG64_OpLOONG64SRL(v)
   473  	case OpLOONG64SRLV:
   474  		return rewriteValueLOONG64_OpLOONG64SRLV(v)
   475  	case OpLOONG64SRLVconst:
   476  		return rewriteValueLOONG64_OpLOONG64SRLVconst(v)
   477  	case OpLOONG64SUBD:
   478  		return rewriteValueLOONG64_OpLOONG64SUBD(v)
   479  	case OpLOONG64SUBF:
   480  		return rewriteValueLOONG64_OpLOONG64SUBF(v)
   481  	case OpLOONG64SUBV:
   482  		return rewriteValueLOONG64_OpLOONG64SUBV(v)
   483  	case OpLOONG64SUBVconst:
   484  		return rewriteValueLOONG64_OpLOONG64SUBVconst(v)
   485  	case OpLOONG64XOR:
   486  		return rewriteValueLOONG64_OpLOONG64XOR(v)
   487  	case OpLOONG64XORconst:
   488  		return rewriteValueLOONG64_OpLOONG64XORconst(v)
   489  	case OpLeq16:
   490  		return rewriteValueLOONG64_OpLeq16(v)
   491  	case OpLeq16U:
   492  		return rewriteValueLOONG64_OpLeq16U(v)
   493  	case OpLeq32:
   494  		return rewriteValueLOONG64_OpLeq32(v)
   495  	case OpLeq32F:
   496  		return rewriteValueLOONG64_OpLeq32F(v)
   497  	case OpLeq32U:
   498  		return rewriteValueLOONG64_OpLeq32U(v)
   499  	case OpLeq64:
   500  		return rewriteValueLOONG64_OpLeq64(v)
   501  	case OpLeq64F:
   502  		return rewriteValueLOONG64_OpLeq64F(v)
   503  	case OpLeq64U:
   504  		return rewriteValueLOONG64_OpLeq64U(v)
   505  	case OpLeq8:
   506  		return rewriteValueLOONG64_OpLeq8(v)
   507  	case OpLeq8U:
   508  		return rewriteValueLOONG64_OpLeq8U(v)
   509  	case OpLess16:
   510  		return rewriteValueLOONG64_OpLess16(v)
   511  	case OpLess16U:
   512  		return rewriteValueLOONG64_OpLess16U(v)
   513  	case OpLess32:
   514  		return rewriteValueLOONG64_OpLess32(v)
   515  	case OpLess32F:
   516  		return rewriteValueLOONG64_OpLess32F(v)
   517  	case OpLess32U:
   518  		return rewriteValueLOONG64_OpLess32U(v)
   519  	case OpLess64:
   520  		return rewriteValueLOONG64_OpLess64(v)
   521  	case OpLess64F:
   522  		return rewriteValueLOONG64_OpLess64F(v)
   523  	case OpLess64U:
   524  		return rewriteValueLOONG64_OpLess64U(v)
   525  	case OpLess8:
   526  		return rewriteValueLOONG64_OpLess8(v)
   527  	case OpLess8U:
   528  		return rewriteValueLOONG64_OpLess8U(v)
   529  	case OpLoad:
   530  		return rewriteValueLOONG64_OpLoad(v)
   531  	case OpLocalAddr:
   532  		return rewriteValueLOONG64_OpLocalAddr(v)
   533  	case OpLsh16x16:
   534  		return rewriteValueLOONG64_OpLsh16x16(v)
   535  	case OpLsh16x32:
   536  		return rewriteValueLOONG64_OpLsh16x32(v)
   537  	case OpLsh16x64:
   538  		return rewriteValueLOONG64_OpLsh16x64(v)
   539  	case OpLsh16x8:
   540  		return rewriteValueLOONG64_OpLsh16x8(v)
   541  	case OpLsh32x16:
   542  		return rewriteValueLOONG64_OpLsh32x16(v)
   543  	case OpLsh32x32:
   544  		return rewriteValueLOONG64_OpLsh32x32(v)
   545  	case OpLsh32x64:
   546  		return rewriteValueLOONG64_OpLsh32x64(v)
   547  	case OpLsh32x8:
   548  		return rewriteValueLOONG64_OpLsh32x8(v)
   549  	case OpLsh64x16:
   550  		return rewriteValueLOONG64_OpLsh64x16(v)
   551  	case OpLsh64x32:
   552  		return rewriteValueLOONG64_OpLsh64x32(v)
   553  	case OpLsh64x64:
   554  		return rewriteValueLOONG64_OpLsh64x64(v)
   555  	case OpLsh64x8:
   556  		return rewriteValueLOONG64_OpLsh64x8(v)
   557  	case OpLsh8x16:
   558  		return rewriteValueLOONG64_OpLsh8x16(v)
   559  	case OpLsh8x32:
   560  		return rewriteValueLOONG64_OpLsh8x32(v)
   561  	case OpLsh8x64:
   562  		return rewriteValueLOONG64_OpLsh8x64(v)
   563  	case OpLsh8x8:
   564  		return rewriteValueLOONG64_OpLsh8x8(v)
   565  	case OpMax32F:
   566  		v.Op = OpLOONG64FMAXF
   567  		return true
   568  	case OpMax64F:
   569  		v.Op = OpLOONG64FMAXD
   570  		return true
   571  	case OpMin32F:
   572  		v.Op = OpLOONG64FMINF
   573  		return true
   574  	case OpMin64F:
   575  		v.Op = OpLOONG64FMIND
   576  		return true
   577  	case OpMod16:
   578  		return rewriteValueLOONG64_OpMod16(v)
   579  	case OpMod16u:
   580  		return rewriteValueLOONG64_OpMod16u(v)
   581  	case OpMod32:
   582  		return rewriteValueLOONG64_OpMod32(v)
   583  	case OpMod32u:
   584  		return rewriteValueLOONG64_OpMod32u(v)
   585  	case OpMod64:
   586  		return rewriteValueLOONG64_OpMod64(v)
   587  	case OpMod64u:
   588  		v.Op = OpLOONG64REMVU
   589  		return true
   590  	case OpMod8:
   591  		return rewriteValueLOONG64_OpMod8(v)
   592  	case OpMod8u:
   593  		return rewriteValueLOONG64_OpMod8u(v)
   594  	case OpMove:
   595  		return rewriteValueLOONG64_OpMove(v)
   596  	case OpMul16:
   597  		v.Op = OpLOONG64MULV
   598  		return true
   599  	case OpMul32:
   600  		v.Op = OpLOONG64MULV
   601  		return true
   602  	case OpMul32F:
   603  		v.Op = OpLOONG64MULF
   604  		return true
   605  	case OpMul64:
   606  		v.Op = OpLOONG64MULV
   607  		return true
   608  	case OpMul64F:
   609  		v.Op = OpLOONG64MULD
   610  		return true
   611  	case OpMul8:
   612  		v.Op = OpLOONG64MULV
   613  		return true
   614  	case OpNeg16:
   615  		v.Op = OpLOONG64NEGV
   616  		return true
   617  	case OpNeg32:
   618  		v.Op = OpLOONG64NEGV
   619  		return true
   620  	case OpNeg32F:
   621  		v.Op = OpLOONG64NEGF
   622  		return true
   623  	case OpNeg64:
   624  		v.Op = OpLOONG64NEGV
   625  		return true
   626  	case OpNeg64F:
   627  		v.Op = OpLOONG64NEGD
   628  		return true
   629  	case OpNeg8:
   630  		v.Op = OpLOONG64NEGV
   631  		return true
   632  	case OpNeq16:
   633  		return rewriteValueLOONG64_OpNeq16(v)
   634  	case OpNeq32:
   635  		return rewriteValueLOONG64_OpNeq32(v)
   636  	case OpNeq32F:
   637  		return rewriteValueLOONG64_OpNeq32F(v)
   638  	case OpNeq64:
   639  		return rewriteValueLOONG64_OpNeq64(v)
   640  	case OpNeq64F:
   641  		return rewriteValueLOONG64_OpNeq64F(v)
   642  	case OpNeq8:
   643  		return rewriteValueLOONG64_OpNeq8(v)
   644  	case OpNeqB:
   645  		v.Op = OpLOONG64XOR
   646  		return true
   647  	case OpNeqPtr:
   648  		return rewriteValueLOONG64_OpNeqPtr(v)
   649  	case OpNilCheck:
   650  		v.Op = OpLOONG64LoweredNilCheck
   651  		return true
   652  	case OpNot:
   653  		return rewriteValueLOONG64_OpNot(v)
   654  	case OpOffPtr:
   655  		return rewriteValueLOONG64_OpOffPtr(v)
   656  	case OpOr16:
   657  		v.Op = OpLOONG64OR
   658  		return true
   659  	case OpOr32:
   660  		v.Op = OpLOONG64OR
   661  		return true
   662  	case OpOr64:
   663  		v.Op = OpLOONG64OR
   664  		return true
   665  	case OpOr8:
   666  		v.Op = OpLOONG64OR
   667  		return true
   668  	case OpOrB:
   669  		v.Op = OpLOONG64OR
   670  		return true
   671  	case OpPanicBounds:
   672  		return rewriteValueLOONG64_OpPanicBounds(v)
   673  	case OpPopCount16:
   674  		return rewriteValueLOONG64_OpPopCount16(v)
   675  	case OpPopCount32:
   676  		return rewriteValueLOONG64_OpPopCount32(v)
   677  	case OpPopCount64:
   678  		return rewriteValueLOONG64_OpPopCount64(v)
   679  	case OpPrefetchCache:
   680  		return rewriteValueLOONG64_OpPrefetchCache(v)
   681  	case OpPrefetchCacheStreamed:
   682  		return rewriteValueLOONG64_OpPrefetchCacheStreamed(v)
   683  	case OpPubBarrier:
   684  		v.Op = OpLOONG64LoweredPubBarrier
   685  		return true
   686  	case OpRotateLeft16:
   687  		return rewriteValueLOONG64_OpRotateLeft16(v)
   688  	case OpRotateLeft32:
   689  		return rewriteValueLOONG64_OpRotateLeft32(v)
   690  	case OpRotateLeft64:
   691  		return rewriteValueLOONG64_OpRotateLeft64(v)
   692  	case OpRotateLeft8:
   693  		return rewriteValueLOONG64_OpRotateLeft8(v)
   694  	case OpRound32F:
   695  		v.Op = OpLOONG64LoweredRound32F
   696  		return true
   697  	case OpRound64F:
   698  		v.Op = OpLOONG64LoweredRound64F
   699  		return true
   700  	case OpRsh16Ux16:
   701  		return rewriteValueLOONG64_OpRsh16Ux16(v)
   702  	case OpRsh16Ux32:
   703  		return rewriteValueLOONG64_OpRsh16Ux32(v)
   704  	case OpRsh16Ux64:
   705  		return rewriteValueLOONG64_OpRsh16Ux64(v)
   706  	case OpRsh16Ux8:
   707  		return rewriteValueLOONG64_OpRsh16Ux8(v)
   708  	case OpRsh16x16:
   709  		return rewriteValueLOONG64_OpRsh16x16(v)
   710  	case OpRsh16x32:
   711  		return rewriteValueLOONG64_OpRsh16x32(v)
   712  	case OpRsh16x64:
   713  		return rewriteValueLOONG64_OpRsh16x64(v)
   714  	case OpRsh16x8:
   715  		return rewriteValueLOONG64_OpRsh16x8(v)
   716  	case OpRsh32Ux16:
   717  		return rewriteValueLOONG64_OpRsh32Ux16(v)
   718  	case OpRsh32Ux32:
   719  		return rewriteValueLOONG64_OpRsh32Ux32(v)
   720  	case OpRsh32Ux64:
   721  		return rewriteValueLOONG64_OpRsh32Ux64(v)
   722  	case OpRsh32Ux8:
   723  		return rewriteValueLOONG64_OpRsh32Ux8(v)
   724  	case OpRsh32x16:
   725  		return rewriteValueLOONG64_OpRsh32x16(v)
   726  	case OpRsh32x32:
   727  		return rewriteValueLOONG64_OpRsh32x32(v)
   728  	case OpRsh32x64:
   729  		return rewriteValueLOONG64_OpRsh32x64(v)
   730  	case OpRsh32x8:
   731  		return rewriteValueLOONG64_OpRsh32x8(v)
   732  	case OpRsh64Ux16:
   733  		return rewriteValueLOONG64_OpRsh64Ux16(v)
   734  	case OpRsh64Ux32:
   735  		return rewriteValueLOONG64_OpRsh64Ux32(v)
   736  	case OpRsh64Ux64:
   737  		return rewriteValueLOONG64_OpRsh64Ux64(v)
   738  	case OpRsh64Ux8:
   739  		return rewriteValueLOONG64_OpRsh64Ux8(v)
   740  	case OpRsh64x16:
   741  		return rewriteValueLOONG64_OpRsh64x16(v)
   742  	case OpRsh64x32:
   743  		return rewriteValueLOONG64_OpRsh64x32(v)
   744  	case OpRsh64x64:
   745  		return rewriteValueLOONG64_OpRsh64x64(v)
   746  	case OpRsh64x8:
   747  		return rewriteValueLOONG64_OpRsh64x8(v)
   748  	case OpRsh8Ux16:
   749  		return rewriteValueLOONG64_OpRsh8Ux16(v)
   750  	case OpRsh8Ux32:
   751  		return rewriteValueLOONG64_OpRsh8Ux32(v)
   752  	case OpRsh8Ux64:
   753  		return rewriteValueLOONG64_OpRsh8Ux64(v)
   754  	case OpRsh8Ux8:
   755  		return rewriteValueLOONG64_OpRsh8Ux8(v)
   756  	case OpRsh8x16:
   757  		return rewriteValueLOONG64_OpRsh8x16(v)
   758  	case OpRsh8x32:
   759  		return rewriteValueLOONG64_OpRsh8x32(v)
   760  	case OpRsh8x64:
   761  		return rewriteValueLOONG64_OpRsh8x64(v)
   762  	case OpRsh8x8:
   763  		return rewriteValueLOONG64_OpRsh8x8(v)
   764  	case OpSelect0:
   765  		return rewriteValueLOONG64_OpSelect0(v)
   766  	case OpSelect1:
   767  		return rewriteValueLOONG64_OpSelect1(v)
   768  	case OpSelectN:
   769  		return rewriteValueLOONG64_OpSelectN(v)
   770  	case OpSignExt16to32:
   771  		v.Op = OpLOONG64MOVHreg
   772  		return true
   773  	case OpSignExt16to64:
   774  		v.Op = OpLOONG64MOVHreg
   775  		return true
   776  	case OpSignExt32to64:
   777  		v.Op = OpLOONG64MOVWreg
   778  		return true
   779  	case OpSignExt8to16:
   780  		v.Op = OpLOONG64MOVBreg
   781  		return true
   782  	case OpSignExt8to32:
   783  		v.Op = OpLOONG64MOVBreg
   784  		return true
   785  	case OpSignExt8to64:
   786  		v.Op = OpLOONG64MOVBreg
   787  		return true
   788  	case OpSlicemask:
   789  		return rewriteValueLOONG64_OpSlicemask(v)
   790  	case OpSqrt:
   791  		v.Op = OpLOONG64SQRTD
   792  		return true
   793  	case OpSqrt32:
   794  		v.Op = OpLOONG64SQRTF
   795  		return true
   796  	case OpStaticCall:
   797  		v.Op = OpLOONG64CALLstatic
   798  		return true
   799  	case OpStore:
   800  		return rewriteValueLOONG64_OpStore(v)
   801  	case OpSub16:
   802  		v.Op = OpLOONG64SUBV
   803  		return true
   804  	case OpSub32:
   805  		v.Op = OpLOONG64SUBV
   806  		return true
   807  	case OpSub32F:
   808  		v.Op = OpLOONG64SUBF
   809  		return true
   810  	case OpSub64:
   811  		v.Op = OpLOONG64SUBV
   812  		return true
   813  	case OpSub64F:
   814  		v.Op = OpLOONG64SUBD
   815  		return true
   816  	case OpSub8:
   817  		v.Op = OpLOONG64SUBV
   818  		return true
   819  	case OpSubPtr:
   820  		v.Op = OpLOONG64SUBV
   821  		return true
   822  	case OpTailCall:
   823  		v.Op = OpLOONG64CALLtail
   824  		return true
   825  	case OpTrunc16to8:
   826  		v.Op = OpCopy
   827  		return true
   828  	case OpTrunc32to16:
   829  		v.Op = OpCopy
   830  		return true
   831  	case OpTrunc32to8:
   832  		v.Op = OpCopy
   833  		return true
   834  	case OpTrunc64to16:
   835  		v.Op = OpCopy
   836  		return true
   837  	case OpTrunc64to32:
   838  		v.Op = OpCopy
   839  		return true
   840  	case OpTrunc64to8:
   841  		v.Op = OpCopy
   842  		return true
   843  	case OpWB:
   844  		v.Op = OpLOONG64LoweredWB
   845  		return true
   846  	case OpXor16:
   847  		v.Op = OpLOONG64XOR
   848  		return true
   849  	case OpXor32:
   850  		v.Op = OpLOONG64XOR
   851  		return true
   852  	case OpXor64:
   853  		v.Op = OpLOONG64XOR
   854  		return true
   855  	case OpXor8:
   856  		v.Op = OpLOONG64XOR
   857  		return true
   858  	case OpZero:
   859  		return rewriteValueLOONG64_OpZero(v)
   860  	case OpZeroExt16to32:
   861  		v.Op = OpLOONG64MOVHUreg
   862  		return true
   863  	case OpZeroExt16to64:
   864  		v.Op = OpLOONG64MOVHUreg
   865  		return true
   866  	case OpZeroExt32to64:
   867  		v.Op = OpLOONG64MOVWUreg
   868  		return true
   869  	case OpZeroExt8to16:
   870  		v.Op = OpLOONG64MOVBUreg
   871  		return true
   872  	case OpZeroExt8to32:
   873  		v.Op = OpLOONG64MOVBUreg
   874  		return true
   875  	case OpZeroExt8to64:
   876  		v.Op = OpLOONG64MOVBUreg
   877  		return true
   878  	}
   879  	return false
   880  }
   881  func rewriteValueLOONG64_OpAddr(v *Value) bool {
   882  	v_0 := v.Args[0]
   883  	// match: (Addr {sym} base)
   884  	// result: (MOVVaddr {sym} base)
   885  	for {
   886  		sym := auxToSym(v.Aux)
   887  		base := v_0
   888  		v.reset(OpLOONG64MOVVaddr)
   889  		v.Aux = symToAux(sym)
   890  		v.AddArg(base)
   891  		return true
   892  	}
   893  }
   894  func rewriteValueLOONG64_OpAtomicAnd8(v *Value) bool {
   895  	v_2 := v.Args[2]
   896  	v_1 := v.Args[1]
   897  	v_0 := v.Args[0]
   898  	b := v.Block
   899  	typ := &b.Func.Config.Types
   900  	// match: (AtomicAnd8 ptr val mem)
   901  	// result: (LoweredAtomicAnd32 (AND <typ.Uintptr> (MOVVconst [^3]) ptr) (NORconst [0] <typ.UInt32> (SLLV <typ.UInt32> (XORconst <typ.UInt32> [0xff] (ZeroExt8to32 val)) (SLLVconst <typ.UInt64> [3] (ANDconst <typ.UInt64> [3] ptr)))) mem)
   902  	for {
   903  		ptr := v_0
   904  		val := v_1
   905  		mem := v_2
   906  		v.reset(OpLOONG64LoweredAtomicAnd32)
   907  		v0 := b.NewValue0(v.Pos, OpLOONG64AND, typ.Uintptr)
   908  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
   909  		v1.AuxInt = int64ToAuxInt(^3)
   910  		v0.AddArg2(v1, ptr)
   911  		v2 := b.NewValue0(v.Pos, OpLOONG64NORconst, typ.UInt32)
   912  		v2.AuxInt = int64ToAuxInt(0)
   913  		v3 := b.NewValue0(v.Pos, OpLOONG64SLLV, typ.UInt32)
   914  		v4 := b.NewValue0(v.Pos, OpLOONG64XORconst, typ.UInt32)
   915  		v4.AuxInt = int64ToAuxInt(0xff)
   916  		v5 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   917  		v5.AddArg(val)
   918  		v4.AddArg(v5)
   919  		v6 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, typ.UInt64)
   920  		v6.AuxInt = int64ToAuxInt(3)
   921  		v7 := b.NewValue0(v.Pos, OpLOONG64ANDconst, typ.UInt64)
   922  		v7.AuxInt = int64ToAuxInt(3)
   923  		v7.AddArg(ptr)
   924  		v6.AddArg(v7)
   925  		v3.AddArg2(v4, v6)
   926  		v2.AddArg(v3)
   927  		v.AddArg3(v0, v2, mem)
   928  		return true
   929  	}
   930  }
   931  func rewriteValueLOONG64_OpAtomicCompareAndSwap32(v *Value) bool {
   932  	v_3 := v.Args[3]
   933  	v_2 := v.Args[2]
   934  	v_1 := v.Args[1]
   935  	v_0 := v.Args[0]
   936  	b := v.Block
   937  	typ := &b.Func.Config.Types
   938  	// match: (AtomicCompareAndSwap32 ptr old new mem)
   939  	// result: (LoweredAtomicCas32 ptr (SignExt32to64 old) new mem)
   940  	for {
   941  		ptr := v_0
   942  		old := v_1
   943  		new := v_2
   944  		mem := v_3
   945  		v.reset(OpLOONG64LoweredAtomicCas32)
   946  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
   947  		v0.AddArg(old)
   948  		v.AddArg4(ptr, v0, new, mem)
   949  		return true
   950  	}
   951  }
   952  func rewriteValueLOONG64_OpAtomicCompareAndSwap32Variant(v *Value) bool {
   953  	v_3 := v.Args[3]
   954  	v_2 := v.Args[2]
   955  	v_1 := v.Args[1]
   956  	v_0 := v.Args[0]
   957  	b := v.Block
   958  	typ := &b.Func.Config.Types
   959  	// match: (AtomicCompareAndSwap32Variant ptr old new mem)
   960  	// result: (LoweredAtomicCas32Variant ptr (SignExt32to64 old) new mem)
   961  	for {
   962  		ptr := v_0
   963  		old := v_1
   964  		new := v_2
   965  		mem := v_3
   966  		v.reset(OpLOONG64LoweredAtomicCas32Variant)
   967  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
   968  		v0.AddArg(old)
   969  		v.AddArg4(ptr, v0, new, mem)
   970  		return true
   971  	}
   972  }
   973  func rewriteValueLOONG64_OpAtomicOr8(v *Value) bool {
   974  	v_2 := v.Args[2]
   975  	v_1 := v.Args[1]
   976  	v_0 := v.Args[0]
   977  	b := v.Block
   978  	typ := &b.Func.Config.Types
   979  	// match: (AtomicOr8 ptr val mem)
   980  	// result: (LoweredAtomicOr32 (AND <typ.Uintptr> (MOVVconst [^3]) ptr) (SLLV <typ.UInt32> (ZeroExt8to32 val) (SLLVconst <typ.UInt64> [3] (ANDconst <typ.UInt64> [3] ptr))) mem)
   981  	for {
   982  		ptr := v_0
   983  		val := v_1
   984  		mem := v_2
   985  		v.reset(OpLOONG64LoweredAtomicOr32)
   986  		v0 := b.NewValue0(v.Pos, OpLOONG64AND, typ.Uintptr)
   987  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
   988  		v1.AuxInt = int64ToAuxInt(^3)
   989  		v0.AddArg2(v1, ptr)
   990  		v2 := b.NewValue0(v.Pos, OpLOONG64SLLV, typ.UInt32)
   991  		v3 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   992  		v3.AddArg(val)
   993  		v4 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, typ.UInt64)
   994  		v4.AuxInt = int64ToAuxInt(3)
   995  		v5 := b.NewValue0(v.Pos, OpLOONG64ANDconst, typ.UInt64)
   996  		v5.AuxInt = int64ToAuxInt(3)
   997  		v5.AddArg(ptr)
   998  		v4.AddArg(v5)
   999  		v2.AddArg2(v3, v4)
  1000  		v.AddArg3(v0, v2, mem)
  1001  		return true
  1002  	}
  1003  }
  1004  func rewriteValueLOONG64_OpAvg64u(v *Value) bool {
  1005  	v_1 := v.Args[1]
  1006  	v_0 := v.Args[0]
  1007  	b := v.Block
  1008  	// match: (Avg64u <t> x y)
  1009  	// result: (ADDV (SRLVconst <t> (SUBV <t> x y) [1]) y)
  1010  	for {
  1011  		t := v.Type
  1012  		x := v_0
  1013  		y := v_1
  1014  		v.reset(OpLOONG64ADDV)
  1015  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLVconst, t)
  1016  		v0.AuxInt = int64ToAuxInt(1)
  1017  		v1 := b.NewValue0(v.Pos, OpLOONG64SUBV, t)
  1018  		v1.AddArg2(x, y)
  1019  		v0.AddArg(v1)
  1020  		v.AddArg2(v0, y)
  1021  		return true
  1022  	}
  1023  }
  1024  func rewriteValueLOONG64_OpBitLen16(v *Value) bool {
  1025  	v_0 := v.Args[0]
  1026  	b := v.Block
  1027  	typ := &b.Func.Config.Types
  1028  	// match: (BitLen16 x)
  1029  	// result: (BitLen64 (ZeroExt16to64 x))
  1030  	for {
  1031  		x := v_0
  1032  		v.reset(OpBitLen64)
  1033  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1034  		v0.AddArg(x)
  1035  		v.AddArg(v0)
  1036  		return true
  1037  	}
  1038  }
  1039  func rewriteValueLOONG64_OpBitLen32(v *Value) bool {
  1040  	v_0 := v.Args[0]
  1041  	b := v.Block
  1042  	// match: (BitLen32 <t> x)
  1043  	// result: (NEGV <t> (SUBVconst <t> [32] (CLZW <t> x)))
  1044  	for {
  1045  		t := v.Type
  1046  		x := v_0
  1047  		v.reset(OpLOONG64NEGV)
  1048  		v.Type = t
  1049  		v0 := b.NewValue0(v.Pos, OpLOONG64SUBVconst, t)
  1050  		v0.AuxInt = int64ToAuxInt(32)
  1051  		v1 := b.NewValue0(v.Pos, OpLOONG64CLZW, t)
  1052  		v1.AddArg(x)
  1053  		v0.AddArg(v1)
  1054  		v.AddArg(v0)
  1055  		return true
  1056  	}
  1057  }
  1058  func rewriteValueLOONG64_OpBitLen64(v *Value) bool {
  1059  	v_0 := v.Args[0]
  1060  	b := v.Block
  1061  	// match: (BitLen64 <t> x)
  1062  	// result: (NEGV <t> (SUBVconst <t> [64] (CLZV <t> x)))
  1063  	for {
  1064  		t := v.Type
  1065  		x := v_0
  1066  		v.reset(OpLOONG64NEGV)
  1067  		v.Type = t
  1068  		v0 := b.NewValue0(v.Pos, OpLOONG64SUBVconst, t)
  1069  		v0.AuxInt = int64ToAuxInt(64)
  1070  		v1 := b.NewValue0(v.Pos, OpLOONG64CLZV, t)
  1071  		v1.AddArg(x)
  1072  		v0.AddArg(v1)
  1073  		v.AddArg(v0)
  1074  		return true
  1075  	}
  1076  }
  1077  func rewriteValueLOONG64_OpBitLen8(v *Value) bool {
  1078  	v_0 := v.Args[0]
  1079  	b := v.Block
  1080  	typ := &b.Func.Config.Types
  1081  	// match: (BitLen8 x)
  1082  	// result: (BitLen64 (ZeroExt8to64 x))
  1083  	for {
  1084  		x := v_0
  1085  		v.reset(OpBitLen64)
  1086  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1087  		v0.AddArg(x)
  1088  		v.AddArg(v0)
  1089  		return true
  1090  	}
  1091  }
  1092  func rewriteValueLOONG64_OpBitRev16(v *Value) bool {
  1093  	v_0 := v.Args[0]
  1094  	b := v.Block
  1095  	// match: (BitRev16 <t> x)
  1096  	// result: (REVB2H (BITREV4B <t> x))
  1097  	for {
  1098  		t := v.Type
  1099  		x := v_0
  1100  		v.reset(OpLOONG64REVB2H)
  1101  		v0 := b.NewValue0(v.Pos, OpLOONG64BITREV4B, t)
  1102  		v0.AddArg(x)
  1103  		v.AddArg(v0)
  1104  		return true
  1105  	}
  1106  }
  1107  func rewriteValueLOONG64_OpCom16(v *Value) bool {
  1108  	v_0 := v.Args[0]
  1109  	b := v.Block
  1110  	typ := &b.Func.Config.Types
  1111  	// match: (Com16 x)
  1112  	// result: (NOR (MOVVconst [0]) x)
  1113  	for {
  1114  		x := v_0
  1115  		v.reset(OpLOONG64NOR)
  1116  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1117  		v0.AuxInt = int64ToAuxInt(0)
  1118  		v.AddArg2(v0, x)
  1119  		return true
  1120  	}
  1121  }
  1122  func rewriteValueLOONG64_OpCom32(v *Value) bool {
  1123  	v_0 := v.Args[0]
  1124  	b := v.Block
  1125  	typ := &b.Func.Config.Types
  1126  	// match: (Com32 x)
  1127  	// result: (NOR (MOVVconst [0]) x)
  1128  	for {
  1129  		x := v_0
  1130  		v.reset(OpLOONG64NOR)
  1131  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1132  		v0.AuxInt = int64ToAuxInt(0)
  1133  		v.AddArg2(v0, x)
  1134  		return true
  1135  	}
  1136  }
  1137  func rewriteValueLOONG64_OpCom64(v *Value) bool {
  1138  	v_0 := v.Args[0]
  1139  	b := v.Block
  1140  	typ := &b.Func.Config.Types
  1141  	// match: (Com64 x)
  1142  	// result: (NOR (MOVVconst [0]) x)
  1143  	for {
  1144  		x := v_0
  1145  		v.reset(OpLOONG64NOR)
  1146  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1147  		v0.AuxInt = int64ToAuxInt(0)
  1148  		v.AddArg2(v0, x)
  1149  		return true
  1150  	}
  1151  }
  1152  func rewriteValueLOONG64_OpCom8(v *Value) bool {
  1153  	v_0 := v.Args[0]
  1154  	b := v.Block
  1155  	typ := &b.Func.Config.Types
  1156  	// match: (Com8 x)
  1157  	// result: (NOR (MOVVconst [0]) x)
  1158  	for {
  1159  		x := v_0
  1160  		v.reset(OpLOONG64NOR)
  1161  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1162  		v0.AuxInt = int64ToAuxInt(0)
  1163  		v.AddArg2(v0, x)
  1164  		return true
  1165  	}
  1166  }
  1167  func rewriteValueLOONG64_OpCondSelect(v *Value) bool {
  1168  	v_2 := v.Args[2]
  1169  	v_1 := v.Args[1]
  1170  	v_0 := v.Args[0]
  1171  	b := v.Block
  1172  	// match: (CondSelect <t> x y cond)
  1173  	// result: (OR (MASKEQZ <t> x cond) (MASKNEZ <t> y cond))
  1174  	for {
  1175  		t := v.Type
  1176  		x := v_0
  1177  		y := v_1
  1178  		cond := v_2
  1179  		v.reset(OpLOONG64OR)
  1180  		v0 := b.NewValue0(v.Pos, OpLOONG64MASKEQZ, t)
  1181  		v0.AddArg2(x, cond)
  1182  		v1 := b.NewValue0(v.Pos, OpLOONG64MASKNEZ, t)
  1183  		v1.AddArg2(y, cond)
  1184  		v.AddArg2(v0, v1)
  1185  		return true
  1186  	}
  1187  }
  1188  func rewriteValueLOONG64_OpConst16(v *Value) bool {
  1189  	// match: (Const16 [val])
  1190  	// result: (MOVVconst [int64(val)])
  1191  	for {
  1192  		val := auxIntToInt16(v.AuxInt)
  1193  		v.reset(OpLOONG64MOVVconst)
  1194  		v.AuxInt = int64ToAuxInt(int64(val))
  1195  		return true
  1196  	}
  1197  }
  1198  func rewriteValueLOONG64_OpConst32(v *Value) bool {
  1199  	// match: (Const32 [val])
  1200  	// result: (MOVVconst [int64(val)])
  1201  	for {
  1202  		val := auxIntToInt32(v.AuxInt)
  1203  		v.reset(OpLOONG64MOVVconst)
  1204  		v.AuxInt = int64ToAuxInt(int64(val))
  1205  		return true
  1206  	}
  1207  }
  1208  func rewriteValueLOONG64_OpConst32F(v *Value) bool {
  1209  	// match: (Const32F [val])
  1210  	// result: (MOVFconst [float64(val)])
  1211  	for {
  1212  		val := auxIntToFloat32(v.AuxInt)
  1213  		v.reset(OpLOONG64MOVFconst)
  1214  		v.AuxInt = float64ToAuxInt(float64(val))
  1215  		return true
  1216  	}
  1217  }
  1218  func rewriteValueLOONG64_OpConst64(v *Value) bool {
  1219  	// match: (Const64 [val])
  1220  	// result: (MOVVconst [int64(val)])
  1221  	for {
  1222  		val := auxIntToInt64(v.AuxInt)
  1223  		v.reset(OpLOONG64MOVVconst)
  1224  		v.AuxInt = int64ToAuxInt(int64(val))
  1225  		return true
  1226  	}
  1227  }
  1228  func rewriteValueLOONG64_OpConst64F(v *Value) bool {
  1229  	// match: (Const64F [val])
  1230  	// result: (MOVDconst [float64(val)])
  1231  	for {
  1232  		val := auxIntToFloat64(v.AuxInt)
  1233  		v.reset(OpLOONG64MOVDconst)
  1234  		v.AuxInt = float64ToAuxInt(float64(val))
  1235  		return true
  1236  	}
  1237  }
  1238  func rewriteValueLOONG64_OpConst8(v *Value) bool {
  1239  	// match: (Const8 [val])
  1240  	// result: (MOVVconst [int64(val)])
  1241  	for {
  1242  		val := auxIntToInt8(v.AuxInt)
  1243  		v.reset(OpLOONG64MOVVconst)
  1244  		v.AuxInt = int64ToAuxInt(int64(val))
  1245  		return true
  1246  	}
  1247  }
  1248  func rewriteValueLOONG64_OpConstBool(v *Value) bool {
  1249  	// match: (ConstBool [t])
  1250  	// result: (MOVVconst [int64(b2i(t))])
  1251  	for {
  1252  		t := auxIntToBool(v.AuxInt)
  1253  		v.reset(OpLOONG64MOVVconst)
  1254  		v.AuxInt = int64ToAuxInt(int64(b2i(t)))
  1255  		return true
  1256  	}
  1257  }
  1258  func rewriteValueLOONG64_OpConstNil(v *Value) bool {
  1259  	// match: (ConstNil)
  1260  	// result: (MOVVconst [0])
  1261  	for {
  1262  		v.reset(OpLOONG64MOVVconst)
  1263  		v.AuxInt = int64ToAuxInt(0)
  1264  		return true
  1265  	}
  1266  }
  1267  func rewriteValueLOONG64_OpCtz16(v *Value) bool {
  1268  	v_0 := v.Args[0]
  1269  	b := v.Block
  1270  	typ := &b.Func.Config.Types
  1271  	// match: (Ctz16 x)
  1272  	// result: (CTZV (OR <typ.UInt64> x (MOVVconst [1<<16])))
  1273  	for {
  1274  		x := v_0
  1275  		v.reset(OpLOONG64CTZV)
  1276  		v0 := b.NewValue0(v.Pos, OpLOONG64OR, typ.UInt64)
  1277  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1278  		v1.AuxInt = int64ToAuxInt(1 << 16)
  1279  		v0.AddArg2(x, v1)
  1280  		v.AddArg(v0)
  1281  		return true
  1282  	}
  1283  }
  1284  func rewriteValueLOONG64_OpCtz8(v *Value) bool {
  1285  	v_0 := v.Args[0]
  1286  	b := v.Block
  1287  	typ := &b.Func.Config.Types
  1288  	// match: (Ctz8 x)
  1289  	// result: (CTZV (OR <typ.UInt64> x (MOVVconst [1<<8])))
  1290  	for {
  1291  		x := v_0
  1292  		v.reset(OpLOONG64CTZV)
  1293  		v0 := b.NewValue0(v.Pos, OpLOONG64OR, typ.UInt64)
  1294  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1295  		v1.AuxInt = int64ToAuxInt(1 << 8)
  1296  		v0.AddArg2(x, v1)
  1297  		v.AddArg(v0)
  1298  		return true
  1299  	}
  1300  }
  1301  func rewriteValueLOONG64_OpDiv16(v *Value) bool {
  1302  	v_1 := v.Args[1]
  1303  	v_0 := v.Args[0]
  1304  	b := v.Block
  1305  	typ := &b.Func.Config.Types
  1306  	// match: (Div16 x y)
  1307  	// result: (DIVV (SignExt16to64 x) (SignExt16to64 y))
  1308  	for {
  1309  		x := v_0
  1310  		y := v_1
  1311  		v.reset(OpLOONG64DIVV)
  1312  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  1313  		v0.AddArg(x)
  1314  		v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  1315  		v1.AddArg(y)
  1316  		v.AddArg2(v0, v1)
  1317  		return true
  1318  	}
  1319  }
  1320  func rewriteValueLOONG64_OpDiv16u(v *Value) bool {
  1321  	v_1 := v.Args[1]
  1322  	v_0 := v.Args[0]
  1323  	b := v.Block
  1324  	typ := &b.Func.Config.Types
  1325  	// match: (Div16u x y)
  1326  	// result: (DIVVU (ZeroExt16to64 x) (ZeroExt16to64 y))
  1327  	for {
  1328  		x := v_0
  1329  		y := v_1
  1330  		v.reset(OpLOONG64DIVVU)
  1331  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1332  		v0.AddArg(x)
  1333  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1334  		v1.AddArg(y)
  1335  		v.AddArg2(v0, v1)
  1336  		return true
  1337  	}
  1338  }
  1339  func rewriteValueLOONG64_OpDiv32(v *Value) bool {
  1340  	v_1 := v.Args[1]
  1341  	v_0 := v.Args[0]
  1342  	b := v.Block
  1343  	typ := &b.Func.Config.Types
  1344  	// match: (Div32 x y)
  1345  	// result: (DIVV (SignExt32to64 x) (SignExt32to64 y))
  1346  	for {
  1347  		x := v_0
  1348  		y := v_1
  1349  		v.reset(OpLOONG64DIVV)
  1350  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1351  		v0.AddArg(x)
  1352  		v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1353  		v1.AddArg(y)
  1354  		v.AddArg2(v0, v1)
  1355  		return true
  1356  	}
  1357  }
  1358  func rewriteValueLOONG64_OpDiv32u(v *Value) bool {
  1359  	v_1 := v.Args[1]
  1360  	v_0 := v.Args[0]
  1361  	b := v.Block
  1362  	typ := &b.Func.Config.Types
  1363  	// match: (Div32u x y)
  1364  	// result: (DIVVU (ZeroExt32to64 x) (ZeroExt32to64 y))
  1365  	for {
  1366  		x := v_0
  1367  		y := v_1
  1368  		v.reset(OpLOONG64DIVVU)
  1369  		v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1370  		v0.AddArg(x)
  1371  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1372  		v1.AddArg(y)
  1373  		v.AddArg2(v0, v1)
  1374  		return true
  1375  	}
  1376  }
  1377  func rewriteValueLOONG64_OpDiv64(v *Value) bool {
  1378  	v_1 := v.Args[1]
  1379  	v_0 := v.Args[0]
  1380  	// match: (Div64 x y)
  1381  	// result: (DIVV x y)
  1382  	for {
  1383  		x := v_0
  1384  		y := v_1
  1385  		v.reset(OpLOONG64DIVV)
  1386  		v.AddArg2(x, y)
  1387  		return true
  1388  	}
  1389  }
  1390  func rewriteValueLOONG64_OpDiv8(v *Value) bool {
  1391  	v_1 := v.Args[1]
  1392  	v_0 := v.Args[0]
  1393  	b := v.Block
  1394  	typ := &b.Func.Config.Types
  1395  	// match: (Div8 x y)
  1396  	// result: (DIVV (SignExt8to64 x) (SignExt8to64 y))
  1397  	for {
  1398  		x := v_0
  1399  		y := v_1
  1400  		v.reset(OpLOONG64DIVV)
  1401  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  1402  		v0.AddArg(x)
  1403  		v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  1404  		v1.AddArg(y)
  1405  		v.AddArg2(v0, v1)
  1406  		return true
  1407  	}
  1408  }
  1409  func rewriteValueLOONG64_OpDiv8u(v *Value) bool {
  1410  	v_1 := v.Args[1]
  1411  	v_0 := v.Args[0]
  1412  	b := v.Block
  1413  	typ := &b.Func.Config.Types
  1414  	// match: (Div8u x y)
  1415  	// result: (DIVVU (ZeroExt8to64 x) (ZeroExt8to64 y))
  1416  	for {
  1417  		x := v_0
  1418  		y := v_1
  1419  		v.reset(OpLOONG64DIVVU)
  1420  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1421  		v0.AddArg(x)
  1422  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1423  		v1.AddArg(y)
  1424  		v.AddArg2(v0, v1)
  1425  		return true
  1426  	}
  1427  }
  1428  func rewriteValueLOONG64_OpEq16(v *Value) bool {
  1429  	v_1 := v.Args[1]
  1430  	v_0 := v.Args[0]
  1431  	b := v.Block
  1432  	typ := &b.Func.Config.Types
  1433  	// match: (Eq16 x y)
  1434  	// result: (SGTU (MOVVconst [1]) (XOR (ZeroExt16to64 x) (ZeroExt16to64 y)))
  1435  	for {
  1436  		x := v_0
  1437  		y := v_1
  1438  		v.reset(OpLOONG64SGTU)
  1439  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1440  		v0.AuxInt = int64ToAuxInt(1)
  1441  		v1 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.UInt64)
  1442  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1443  		v2.AddArg(x)
  1444  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1445  		v3.AddArg(y)
  1446  		v1.AddArg2(v2, v3)
  1447  		v.AddArg2(v0, v1)
  1448  		return true
  1449  	}
  1450  }
  1451  func rewriteValueLOONG64_OpEq32(v *Value) bool {
  1452  	v_1 := v.Args[1]
  1453  	v_0 := v.Args[0]
  1454  	b := v.Block
  1455  	typ := &b.Func.Config.Types
  1456  	// match: (Eq32 x y)
  1457  	// result: (SGTU (MOVVconst [1]) (XOR (ZeroExt32to64 x) (ZeroExt32to64 y)))
  1458  	for {
  1459  		x := v_0
  1460  		y := v_1
  1461  		v.reset(OpLOONG64SGTU)
  1462  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1463  		v0.AuxInt = int64ToAuxInt(1)
  1464  		v1 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.UInt64)
  1465  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1466  		v2.AddArg(x)
  1467  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1468  		v3.AddArg(y)
  1469  		v1.AddArg2(v2, v3)
  1470  		v.AddArg2(v0, v1)
  1471  		return true
  1472  	}
  1473  }
  1474  func rewriteValueLOONG64_OpEq32F(v *Value) bool {
  1475  	v_1 := v.Args[1]
  1476  	v_0 := v.Args[0]
  1477  	b := v.Block
  1478  	// match: (Eq32F x y)
  1479  	// result: (FPFlagTrue (CMPEQF x y))
  1480  	for {
  1481  		x := v_0
  1482  		y := v_1
  1483  		v.reset(OpLOONG64FPFlagTrue)
  1484  		v0 := b.NewValue0(v.Pos, OpLOONG64CMPEQF, types.TypeFlags)
  1485  		v0.AddArg2(x, y)
  1486  		v.AddArg(v0)
  1487  		return true
  1488  	}
  1489  }
  1490  func rewriteValueLOONG64_OpEq64(v *Value) bool {
  1491  	v_1 := v.Args[1]
  1492  	v_0 := v.Args[0]
  1493  	b := v.Block
  1494  	typ := &b.Func.Config.Types
  1495  	// match: (Eq64 x y)
  1496  	// result: (SGTU (MOVVconst [1]) (XOR x y))
  1497  	for {
  1498  		x := v_0
  1499  		y := v_1
  1500  		v.reset(OpLOONG64SGTU)
  1501  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1502  		v0.AuxInt = int64ToAuxInt(1)
  1503  		v1 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.UInt64)
  1504  		v1.AddArg2(x, y)
  1505  		v.AddArg2(v0, v1)
  1506  		return true
  1507  	}
  1508  }
  1509  func rewriteValueLOONG64_OpEq64F(v *Value) bool {
  1510  	v_1 := v.Args[1]
  1511  	v_0 := v.Args[0]
  1512  	b := v.Block
  1513  	// match: (Eq64F x y)
  1514  	// result: (FPFlagTrue (CMPEQD x y))
  1515  	for {
  1516  		x := v_0
  1517  		y := v_1
  1518  		v.reset(OpLOONG64FPFlagTrue)
  1519  		v0 := b.NewValue0(v.Pos, OpLOONG64CMPEQD, types.TypeFlags)
  1520  		v0.AddArg2(x, y)
  1521  		v.AddArg(v0)
  1522  		return true
  1523  	}
  1524  }
  1525  func rewriteValueLOONG64_OpEq8(v *Value) bool {
  1526  	v_1 := v.Args[1]
  1527  	v_0 := v.Args[0]
  1528  	b := v.Block
  1529  	typ := &b.Func.Config.Types
  1530  	// match: (Eq8 x y)
  1531  	// result: (SGTU (MOVVconst [1]) (XOR (ZeroExt8to64 x) (ZeroExt8to64 y)))
  1532  	for {
  1533  		x := v_0
  1534  		y := v_1
  1535  		v.reset(OpLOONG64SGTU)
  1536  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1537  		v0.AuxInt = int64ToAuxInt(1)
  1538  		v1 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.UInt64)
  1539  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1540  		v2.AddArg(x)
  1541  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1542  		v3.AddArg(y)
  1543  		v1.AddArg2(v2, v3)
  1544  		v.AddArg2(v0, v1)
  1545  		return true
  1546  	}
  1547  }
  1548  func rewriteValueLOONG64_OpEqB(v *Value) bool {
  1549  	v_1 := v.Args[1]
  1550  	v_0 := v.Args[0]
  1551  	b := v.Block
  1552  	typ := &b.Func.Config.Types
  1553  	// match: (EqB x y)
  1554  	// result: (XOR (MOVVconst [1]) (XOR <typ.Bool> x y))
  1555  	for {
  1556  		x := v_0
  1557  		y := v_1
  1558  		v.reset(OpLOONG64XOR)
  1559  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1560  		v0.AuxInt = int64ToAuxInt(1)
  1561  		v1 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.Bool)
  1562  		v1.AddArg2(x, y)
  1563  		v.AddArg2(v0, v1)
  1564  		return true
  1565  	}
  1566  }
  1567  func rewriteValueLOONG64_OpEqPtr(v *Value) bool {
  1568  	v_1 := v.Args[1]
  1569  	v_0 := v.Args[0]
  1570  	b := v.Block
  1571  	typ := &b.Func.Config.Types
  1572  	// match: (EqPtr x y)
  1573  	// result: (SGTU (MOVVconst [1]) (XOR x y))
  1574  	for {
  1575  		x := v_0
  1576  		y := v_1
  1577  		v.reset(OpLOONG64SGTU)
  1578  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1579  		v0.AuxInt = int64ToAuxInt(1)
  1580  		v1 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.UInt64)
  1581  		v1.AddArg2(x, y)
  1582  		v.AddArg2(v0, v1)
  1583  		return true
  1584  	}
  1585  }
  1586  func rewriteValueLOONG64_OpHmul32(v *Value) bool {
  1587  	v_1 := v.Args[1]
  1588  	v_0 := v.Args[0]
  1589  	b := v.Block
  1590  	typ := &b.Func.Config.Types
  1591  	// match: (Hmul32 x y)
  1592  	// result: (SRAVconst (MULV (SignExt32to64 x) (SignExt32to64 y)) [32])
  1593  	for {
  1594  		x := v_0
  1595  		y := v_1
  1596  		v.reset(OpLOONG64SRAVconst)
  1597  		v.AuxInt = int64ToAuxInt(32)
  1598  		v0 := b.NewValue0(v.Pos, OpLOONG64MULV, typ.Int64)
  1599  		v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1600  		v1.AddArg(x)
  1601  		v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1602  		v2.AddArg(y)
  1603  		v0.AddArg2(v1, v2)
  1604  		v.AddArg(v0)
  1605  		return true
  1606  	}
  1607  }
  1608  func rewriteValueLOONG64_OpHmul32u(v *Value) bool {
  1609  	v_1 := v.Args[1]
  1610  	v_0 := v.Args[0]
  1611  	b := v.Block
  1612  	typ := &b.Func.Config.Types
  1613  	// match: (Hmul32u x y)
  1614  	// result: (SRLVconst (MULV (ZeroExt32to64 x) (ZeroExt32to64 y)) [32])
  1615  	for {
  1616  		x := v_0
  1617  		y := v_1
  1618  		v.reset(OpLOONG64SRLVconst)
  1619  		v.AuxInt = int64ToAuxInt(32)
  1620  		v0 := b.NewValue0(v.Pos, OpLOONG64MULV, typ.Int64)
  1621  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1622  		v1.AddArg(x)
  1623  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1624  		v2.AddArg(y)
  1625  		v0.AddArg2(v1, v2)
  1626  		v.AddArg(v0)
  1627  		return true
  1628  	}
  1629  }
  1630  func rewriteValueLOONG64_OpIsInBounds(v *Value) bool {
  1631  	v_1 := v.Args[1]
  1632  	v_0 := v.Args[0]
  1633  	// match: (IsInBounds idx len)
  1634  	// result: (SGTU len idx)
  1635  	for {
  1636  		idx := v_0
  1637  		len := v_1
  1638  		v.reset(OpLOONG64SGTU)
  1639  		v.AddArg2(len, idx)
  1640  		return true
  1641  	}
  1642  }
  1643  func rewriteValueLOONG64_OpIsNonNil(v *Value) bool {
  1644  	v_0 := v.Args[0]
  1645  	b := v.Block
  1646  	typ := &b.Func.Config.Types
  1647  	// match: (IsNonNil ptr)
  1648  	// result: (SGTU ptr (MOVVconst [0]))
  1649  	for {
  1650  		ptr := v_0
  1651  		v.reset(OpLOONG64SGTU)
  1652  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1653  		v0.AuxInt = int64ToAuxInt(0)
  1654  		v.AddArg2(ptr, v0)
  1655  		return true
  1656  	}
  1657  }
  1658  func rewriteValueLOONG64_OpIsSliceInBounds(v *Value) bool {
  1659  	v_1 := v.Args[1]
  1660  	v_0 := v.Args[0]
  1661  	b := v.Block
  1662  	typ := &b.Func.Config.Types
  1663  	// match: (IsSliceInBounds idx len)
  1664  	// result: (XOR (MOVVconst [1]) (SGTU idx len))
  1665  	for {
  1666  		idx := v_0
  1667  		len := v_1
  1668  		v.reset(OpLOONG64XOR)
  1669  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1670  		v0.AuxInt = int64ToAuxInt(1)
  1671  		v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  1672  		v1.AddArg2(idx, len)
  1673  		v.AddArg2(v0, v1)
  1674  		return true
  1675  	}
  1676  }
  1677  func rewriteValueLOONG64_OpLOONG64ADDD(v *Value) bool {
  1678  	v_1 := v.Args[1]
  1679  	v_0 := v.Args[0]
  1680  	// match: (ADDD (MULD x y) z)
  1681  	// cond: z.Block.Func.useFMA(v)
  1682  	// result: (FMADDD x y z)
  1683  	for {
  1684  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1685  			if v_0.Op != OpLOONG64MULD {
  1686  				continue
  1687  			}
  1688  			y := v_0.Args[1]
  1689  			x := v_0.Args[0]
  1690  			z := v_1
  1691  			if !(z.Block.Func.useFMA(v)) {
  1692  				continue
  1693  			}
  1694  			v.reset(OpLOONG64FMADDD)
  1695  			v.AddArg3(x, y, z)
  1696  			return true
  1697  		}
  1698  		break
  1699  	}
  1700  	// match: (ADDD z (NEGD (MULD x y)))
  1701  	// cond: z.Block.Func.useFMA(v)
  1702  	// result: (FNMSUBD x y z)
  1703  	for {
  1704  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1705  			z := v_0
  1706  			if v_1.Op != OpLOONG64NEGD {
  1707  				continue
  1708  			}
  1709  			v_1_0 := v_1.Args[0]
  1710  			if v_1_0.Op != OpLOONG64MULD {
  1711  				continue
  1712  			}
  1713  			y := v_1_0.Args[1]
  1714  			x := v_1_0.Args[0]
  1715  			if !(z.Block.Func.useFMA(v)) {
  1716  				continue
  1717  			}
  1718  			v.reset(OpLOONG64FNMSUBD)
  1719  			v.AddArg3(x, y, z)
  1720  			return true
  1721  		}
  1722  		break
  1723  	}
  1724  	return false
  1725  }
  1726  func rewriteValueLOONG64_OpLOONG64ADDF(v *Value) bool {
  1727  	v_1 := v.Args[1]
  1728  	v_0 := v.Args[0]
  1729  	// match: (ADDF (MULF x y) z)
  1730  	// cond: z.Block.Func.useFMA(v)
  1731  	// result: (FMADDF x y z)
  1732  	for {
  1733  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1734  			if v_0.Op != OpLOONG64MULF {
  1735  				continue
  1736  			}
  1737  			y := v_0.Args[1]
  1738  			x := v_0.Args[0]
  1739  			z := v_1
  1740  			if !(z.Block.Func.useFMA(v)) {
  1741  				continue
  1742  			}
  1743  			v.reset(OpLOONG64FMADDF)
  1744  			v.AddArg3(x, y, z)
  1745  			return true
  1746  		}
  1747  		break
  1748  	}
  1749  	// match: (ADDF z (NEGF (MULF x y)))
  1750  	// cond: z.Block.Func.useFMA(v)
  1751  	// result: (FNMSUBF x y z)
  1752  	for {
  1753  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1754  			z := v_0
  1755  			if v_1.Op != OpLOONG64NEGF {
  1756  				continue
  1757  			}
  1758  			v_1_0 := v_1.Args[0]
  1759  			if v_1_0.Op != OpLOONG64MULF {
  1760  				continue
  1761  			}
  1762  			y := v_1_0.Args[1]
  1763  			x := v_1_0.Args[0]
  1764  			if !(z.Block.Func.useFMA(v)) {
  1765  				continue
  1766  			}
  1767  			v.reset(OpLOONG64FNMSUBF)
  1768  			v.AddArg3(x, y, z)
  1769  			return true
  1770  		}
  1771  		break
  1772  	}
  1773  	return false
  1774  }
  1775  func rewriteValueLOONG64_OpLOONG64ADDV(v *Value) bool {
  1776  	v_1 := v.Args[1]
  1777  	v_0 := v.Args[0]
  1778  	// match: (ADDV x (MOVVconst <t> [c]))
  1779  	// cond: is32Bit(c) && !t.IsPtr()
  1780  	// result: (ADDVconst [c] x)
  1781  	for {
  1782  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1783  			x := v_0
  1784  			if v_1.Op != OpLOONG64MOVVconst {
  1785  				continue
  1786  			}
  1787  			t := v_1.Type
  1788  			c := auxIntToInt64(v_1.AuxInt)
  1789  			if !(is32Bit(c) && !t.IsPtr()) {
  1790  				continue
  1791  			}
  1792  			v.reset(OpLOONG64ADDVconst)
  1793  			v.AuxInt = int64ToAuxInt(c)
  1794  			v.AddArg(x)
  1795  			return true
  1796  		}
  1797  		break
  1798  	}
  1799  	// match: (ADDV x (NEGV y))
  1800  	// result: (SUBV x y)
  1801  	for {
  1802  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1803  			x := v_0
  1804  			if v_1.Op != OpLOONG64NEGV {
  1805  				continue
  1806  			}
  1807  			y := v_1.Args[0]
  1808  			v.reset(OpLOONG64SUBV)
  1809  			v.AddArg2(x, y)
  1810  			return true
  1811  		}
  1812  		break
  1813  	}
  1814  	return false
  1815  }
  1816  func rewriteValueLOONG64_OpLOONG64ADDVconst(v *Value) bool {
  1817  	v_0 := v.Args[0]
  1818  	// match: (ADDVconst [off1] (MOVVaddr [off2] {sym} ptr))
  1819  	// cond: is32Bit(off1+int64(off2))
  1820  	// result: (MOVVaddr [int32(off1)+int32(off2)] {sym} ptr)
  1821  	for {
  1822  		off1 := auxIntToInt64(v.AuxInt)
  1823  		if v_0.Op != OpLOONG64MOVVaddr {
  1824  			break
  1825  		}
  1826  		off2 := auxIntToInt32(v_0.AuxInt)
  1827  		sym := auxToSym(v_0.Aux)
  1828  		ptr := v_0.Args[0]
  1829  		if !(is32Bit(off1 + int64(off2))) {
  1830  			break
  1831  		}
  1832  		v.reset(OpLOONG64MOVVaddr)
  1833  		v.AuxInt = int32ToAuxInt(int32(off1) + int32(off2))
  1834  		v.Aux = symToAux(sym)
  1835  		v.AddArg(ptr)
  1836  		return true
  1837  	}
  1838  	// match: (ADDVconst [0] x)
  1839  	// result: x
  1840  	for {
  1841  		if auxIntToInt64(v.AuxInt) != 0 {
  1842  			break
  1843  		}
  1844  		x := v_0
  1845  		v.copyOf(x)
  1846  		return true
  1847  	}
  1848  	// match: (ADDVconst [c] (MOVVconst [d]))
  1849  	// result: (MOVVconst [c+d])
  1850  	for {
  1851  		c := auxIntToInt64(v.AuxInt)
  1852  		if v_0.Op != OpLOONG64MOVVconst {
  1853  			break
  1854  		}
  1855  		d := auxIntToInt64(v_0.AuxInt)
  1856  		v.reset(OpLOONG64MOVVconst)
  1857  		v.AuxInt = int64ToAuxInt(c + d)
  1858  		return true
  1859  	}
  1860  	// match: (ADDVconst [c] (ADDVconst [d] x))
  1861  	// cond: is32Bit(c+d)
  1862  	// result: (ADDVconst [c+d] x)
  1863  	for {
  1864  		c := auxIntToInt64(v.AuxInt)
  1865  		if v_0.Op != OpLOONG64ADDVconst {
  1866  			break
  1867  		}
  1868  		d := auxIntToInt64(v_0.AuxInt)
  1869  		x := v_0.Args[0]
  1870  		if !(is32Bit(c + d)) {
  1871  			break
  1872  		}
  1873  		v.reset(OpLOONG64ADDVconst)
  1874  		v.AuxInt = int64ToAuxInt(c + d)
  1875  		v.AddArg(x)
  1876  		return true
  1877  	}
  1878  	// match: (ADDVconst [c] (SUBVconst [d] x))
  1879  	// cond: is32Bit(c-d)
  1880  	// result: (ADDVconst [c-d] x)
  1881  	for {
  1882  		c := auxIntToInt64(v.AuxInt)
  1883  		if v_0.Op != OpLOONG64SUBVconst {
  1884  			break
  1885  		}
  1886  		d := auxIntToInt64(v_0.AuxInt)
  1887  		x := v_0.Args[0]
  1888  		if !(is32Bit(c - d)) {
  1889  			break
  1890  		}
  1891  		v.reset(OpLOONG64ADDVconst)
  1892  		v.AuxInt = int64ToAuxInt(c - d)
  1893  		v.AddArg(x)
  1894  		return true
  1895  	}
  1896  	return false
  1897  }
  1898  func rewriteValueLOONG64_OpLOONG64AND(v *Value) bool {
  1899  	v_1 := v.Args[1]
  1900  	v_0 := v.Args[0]
  1901  	// match: (AND x (MOVVconst [c]))
  1902  	// cond: is32Bit(c)
  1903  	// result: (ANDconst [c] x)
  1904  	for {
  1905  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1906  			x := v_0
  1907  			if v_1.Op != OpLOONG64MOVVconst {
  1908  				continue
  1909  			}
  1910  			c := auxIntToInt64(v_1.AuxInt)
  1911  			if !(is32Bit(c)) {
  1912  				continue
  1913  			}
  1914  			v.reset(OpLOONG64ANDconst)
  1915  			v.AuxInt = int64ToAuxInt(c)
  1916  			v.AddArg(x)
  1917  			return true
  1918  		}
  1919  		break
  1920  	}
  1921  	// match: (AND x x)
  1922  	// result: x
  1923  	for {
  1924  		x := v_0
  1925  		if x != v_1 {
  1926  			break
  1927  		}
  1928  		v.copyOf(x)
  1929  		return true
  1930  	}
  1931  	// match: (AND x (NORconst [0] y))
  1932  	// result: (ANDN x y)
  1933  	for {
  1934  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1935  			x := v_0
  1936  			if v_1.Op != OpLOONG64NORconst || auxIntToInt64(v_1.AuxInt) != 0 {
  1937  				continue
  1938  			}
  1939  			y := v_1.Args[0]
  1940  			v.reset(OpLOONG64ANDN)
  1941  			v.AddArg2(x, y)
  1942  			return true
  1943  		}
  1944  		break
  1945  	}
  1946  	return false
  1947  }
  1948  func rewriteValueLOONG64_OpLOONG64ANDconst(v *Value) bool {
  1949  	v_0 := v.Args[0]
  1950  	// match: (ANDconst [0] _)
  1951  	// result: (MOVVconst [0])
  1952  	for {
  1953  		if auxIntToInt64(v.AuxInt) != 0 {
  1954  			break
  1955  		}
  1956  		v.reset(OpLOONG64MOVVconst)
  1957  		v.AuxInt = int64ToAuxInt(0)
  1958  		return true
  1959  	}
  1960  	// match: (ANDconst [-1] x)
  1961  	// result: x
  1962  	for {
  1963  		if auxIntToInt64(v.AuxInt) != -1 {
  1964  			break
  1965  		}
  1966  		x := v_0
  1967  		v.copyOf(x)
  1968  		return true
  1969  	}
  1970  	// match: (ANDconst [c] (MOVVconst [d]))
  1971  	// result: (MOVVconst [c&d])
  1972  	for {
  1973  		c := auxIntToInt64(v.AuxInt)
  1974  		if v_0.Op != OpLOONG64MOVVconst {
  1975  			break
  1976  		}
  1977  		d := auxIntToInt64(v_0.AuxInt)
  1978  		v.reset(OpLOONG64MOVVconst)
  1979  		v.AuxInt = int64ToAuxInt(c & d)
  1980  		return true
  1981  	}
  1982  	// match: (ANDconst [c] (ANDconst [d] x))
  1983  	// result: (ANDconst [c&d] x)
  1984  	for {
  1985  		c := auxIntToInt64(v.AuxInt)
  1986  		if v_0.Op != OpLOONG64ANDconst {
  1987  			break
  1988  		}
  1989  		d := auxIntToInt64(v_0.AuxInt)
  1990  		x := v_0.Args[0]
  1991  		v.reset(OpLOONG64ANDconst)
  1992  		v.AuxInt = int64ToAuxInt(c & d)
  1993  		v.AddArg(x)
  1994  		return true
  1995  	}
  1996  	return false
  1997  }
  1998  func rewriteValueLOONG64_OpLOONG64DIVV(v *Value) bool {
  1999  	v_1 := v.Args[1]
  2000  	v_0 := v.Args[0]
  2001  	// match: (DIVV (MOVVconst [c]) (MOVVconst [d]))
  2002  	// cond: d != 0
  2003  	// result: (MOVVconst [c/d])
  2004  	for {
  2005  		if v_0.Op != OpLOONG64MOVVconst {
  2006  			break
  2007  		}
  2008  		c := auxIntToInt64(v_0.AuxInt)
  2009  		if v_1.Op != OpLOONG64MOVVconst {
  2010  			break
  2011  		}
  2012  		d := auxIntToInt64(v_1.AuxInt)
  2013  		if !(d != 0) {
  2014  			break
  2015  		}
  2016  		v.reset(OpLOONG64MOVVconst)
  2017  		v.AuxInt = int64ToAuxInt(c / d)
  2018  		return true
  2019  	}
  2020  	return false
  2021  }
  2022  func rewriteValueLOONG64_OpLOONG64DIVVU(v *Value) bool {
  2023  	v_1 := v.Args[1]
  2024  	v_0 := v.Args[0]
  2025  	// match: (DIVVU x (MOVVconst [1]))
  2026  	// result: x
  2027  	for {
  2028  		x := v_0
  2029  		if v_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_1.AuxInt) != 1 {
  2030  			break
  2031  		}
  2032  		v.copyOf(x)
  2033  		return true
  2034  	}
  2035  	// match: (DIVVU x (MOVVconst [c]))
  2036  	// cond: isPowerOfTwo(c)
  2037  	// result: (SRLVconst [log64(c)] x)
  2038  	for {
  2039  		x := v_0
  2040  		if v_1.Op != OpLOONG64MOVVconst {
  2041  			break
  2042  		}
  2043  		c := auxIntToInt64(v_1.AuxInt)
  2044  		if !(isPowerOfTwo(c)) {
  2045  			break
  2046  		}
  2047  		v.reset(OpLOONG64SRLVconst)
  2048  		v.AuxInt = int64ToAuxInt(log64(c))
  2049  		v.AddArg(x)
  2050  		return true
  2051  	}
  2052  	// match: (DIVVU (MOVVconst [c]) (MOVVconst [d]))
  2053  	// cond: d != 0
  2054  	// result: (MOVVconst [int64(uint64(c)/uint64(d))])
  2055  	for {
  2056  		if v_0.Op != OpLOONG64MOVVconst {
  2057  			break
  2058  		}
  2059  		c := auxIntToInt64(v_0.AuxInt)
  2060  		if v_1.Op != OpLOONG64MOVVconst {
  2061  			break
  2062  		}
  2063  		d := auxIntToInt64(v_1.AuxInt)
  2064  		if !(d != 0) {
  2065  			break
  2066  		}
  2067  		v.reset(OpLOONG64MOVVconst)
  2068  		v.AuxInt = int64ToAuxInt(int64(uint64(c) / uint64(d)))
  2069  		return true
  2070  	}
  2071  	return false
  2072  }
  2073  func rewriteValueLOONG64_OpLOONG64MASKEQZ(v *Value) bool {
  2074  	v_1 := v.Args[1]
  2075  	v_0 := v.Args[0]
  2076  	// match: (MASKEQZ (MOVVconst [0]) cond)
  2077  	// result: (MOVVconst [0])
  2078  	for {
  2079  		if v_0.Op != OpLOONG64MOVVconst || auxIntToInt64(v_0.AuxInt) != 0 {
  2080  			break
  2081  		}
  2082  		v.reset(OpLOONG64MOVVconst)
  2083  		v.AuxInt = int64ToAuxInt(0)
  2084  		return true
  2085  	}
  2086  	// match: (MASKEQZ x (MOVVconst [c]))
  2087  	// cond: c == 0
  2088  	// result: (MOVVconst [0])
  2089  	for {
  2090  		if v_1.Op != OpLOONG64MOVVconst {
  2091  			break
  2092  		}
  2093  		c := auxIntToInt64(v_1.AuxInt)
  2094  		if !(c == 0) {
  2095  			break
  2096  		}
  2097  		v.reset(OpLOONG64MOVVconst)
  2098  		v.AuxInt = int64ToAuxInt(0)
  2099  		return true
  2100  	}
  2101  	// match: (MASKEQZ x (MOVVconst [c]))
  2102  	// cond: c != 0
  2103  	// result: x
  2104  	for {
  2105  		x := v_0
  2106  		if v_1.Op != OpLOONG64MOVVconst {
  2107  			break
  2108  		}
  2109  		c := auxIntToInt64(v_1.AuxInt)
  2110  		if !(c != 0) {
  2111  			break
  2112  		}
  2113  		v.copyOf(x)
  2114  		return true
  2115  	}
  2116  	return false
  2117  }
  2118  func rewriteValueLOONG64_OpLOONG64MASKNEZ(v *Value) bool {
  2119  	v_0 := v.Args[0]
  2120  	// match: (MASKNEZ (MOVVconst [0]) cond)
  2121  	// result: (MOVVconst [0])
  2122  	for {
  2123  		if v_0.Op != OpLOONG64MOVVconst || auxIntToInt64(v_0.AuxInt) != 0 {
  2124  			break
  2125  		}
  2126  		v.reset(OpLOONG64MOVVconst)
  2127  		v.AuxInt = int64ToAuxInt(0)
  2128  		return true
  2129  	}
  2130  	return false
  2131  }
  2132  func rewriteValueLOONG64_OpLOONG64MOVBUload(v *Value) bool {
  2133  	v_1 := v.Args[1]
  2134  	v_0 := v.Args[0]
  2135  	b := v.Block
  2136  	config := b.Func.Config
  2137  	// match: (MOVBUload [off1] {sym} (ADDVconst [off2] ptr) mem)
  2138  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  2139  	// result: (MOVBUload [off1+int32(off2)] {sym} ptr mem)
  2140  	for {
  2141  		off1 := auxIntToInt32(v.AuxInt)
  2142  		sym := auxToSym(v.Aux)
  2143  		if v_0.Op != OpLOONG64ADDVconst {
  2144  			break
  2145  		}
  2146  		off2 := auxIntToInt64(v_0.AuxInt)
  2147  		ptr := v_0.Args[0]
  2148  		mem := v_1
  2149  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  2150  			break
  2151  		}
  2152  		v.reset(OpLOONG64MOVBUload)
  2153  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2154  		v.Aux = symToAux(sym)
  2155  		v.AddArg2(ptr, mem)
  2156  		return true
  2157  	}
  2158  	// match: (MOVBUload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  2159  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  2160  	// result: (MOVBUload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  2161  	for {
  2162  		off1 := auxIntToInt32(v.AuxInt)
  2163  		sym1 := auxToSym(v.Aux)
  2164  		if v_0.Op != OpLOONG64MOVVaddr {
  2165  			break
  2166  		}
  2167  		off2 := auxIntToInt32(v_0.AuxInt)
  2168  		sym2 := auxToSym(v_0.Aux)
  2169  		ptr := v_0.Args[0]
  2170  		mem := v_1
  2171  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  2172  			break
  2173  		}
  2174  		v.reset(OpLOONG64MOVBUload)
  2175  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2176  		v.Aux = symToAux(mergeSym(sym1, sym2))
  2177  		v.AddArg2(ptr, mem)
  2178  		return true
  2179  	}
  2180  	// match: (MOVBUload [off] {sym} (ADDV ptr idx) mem)
  2181  	// cond: off == 0 && sym == nil
  2182  	// result: (MOVBUloadidx ptr idx mem)
  2183  	for {
  2184  		off := auxIntToInt32(v.AuxInt)
  2185  		sym := auxToSym(v.Aux)
  2186  		if v_0.Op != OpLOONG64ADDV {
  2187  			break
  2188  		}
  2189  		idx := v_0.Args[1]
  2190  		ptr := v_0.Args[0]
  2191  		mem := v_1
  2192  		if !(off == 0 && sym == nil) {
  2193  			break
  2194  		}
  2195  		v.reset(OpLOONG64MOVBUloadidx)
  2196  		v.AddArg3(ptr, idx, mem)
  2197  		return true
  2198  	}
  2199  	return false
  2200  }
  2201  func rewriteValueLOONG64_OpLOONG64MOVBUloadidx(v *Value) bool {
  2202  	v_2 := v.Args[2]
  2203  	v_1 := v.Args[1]
  2204  	v_0 := v.Args[0]
  2205  	// match: (MOVBUloadidx ptr (MOVVconst [c]) mem)
  2206  	// cond: is32Bit(c)
  2207  	// result: (MOVBUload [int32(c)] ptr mem)
  2208  	for {
  2209  		ptr := v_0
  2210  		if v_1.Op != OpLOONG64MOVVconst {
  2211  			break
  2212  		}
  2213  		c := auxIntToInt64(v_1.AuxInt)
  2214  		mem := v_2
  2215  		if !(is32Bit(c)) {
  2216  			break
  2217  		}
  2218  		v.reset(OpLOONG64MOVBUload)
  2219  		v.AuxInt = int32ToAuxInt(int32(c))
  2220  		v.AddArg2(ptr, mem)
  2221  		return true
  2222  	}
  2223  	// match: (MOVBUloadidx (MOVVconst [c]) ptr mem)
  2224  	// cond: is32Bit(c)
  2225  	// result: (MOVBUload [int32(c)] ptr mem)
  2226  	for {
  2227  		if v_0.Op != OpLOONG64MOVVconst {
  2228  			break
  2229  		}
  2230  		c := auxIntToInt64(v_0.AuxInt)
  2231  		ptr := v_1
  2232  		mem := v_2
  2233  		if !(is32Bit(c)) {
  2234  			break
  2235  		}
  2236  		v.reset(OpLOONG64MOVBUload)
  2237  		v.AuxInt = int32ToAuxInt(int32(c))
  2238  		v.AddArg2(ptr, mem)
  2239  		return true
  2240  	}
  2241  	return false
  2242  }
  2243  func rewriteValueLOONG64_OpLOONG64MOVBUreg(v *Value) bool {
  2244  	v_0 := v.Args[0]
  2245  	// match: (MOVBUreg (SRLVconst [rc] x))
  2246  	// cond: rc < 8
  2247  	// result: (BSTRPICKV [rc + (7+rc)<<6] x)
  2248  	for {
  2249  		if v_0.Op != OpLOONG64SRLVconst {
  2250  			break
  2251  		}
  2252  		rc := auxIntToInt64(v_0.AuxInt)
  2253  		x := v_0.Args[0]
  2254  		if !(rc < 8) {
  2255  			break
  2256  		}
  2257  		v.reset(OpLOONG64BSTRPICKV)
  2258  		v.AuxInt = int64ToAuxInt(rc + (7+rc)<<6)
  2259  		v.AddArg(x)
  2260  		return true
  2261  	}
  2262  	// match: (MOVBUreg x:(SGT _ _))
  2263  	// result: x
  2264  	for {
  2265  		x := v_0
  2266  		if x.Op != OpLOONG64SGT {
  2267  			break
  2268  		}
  2269  		v.copyOf(x)
  2270  		return true
  2271  	}
  2272  	// match: (MOVBUreg x:(SGTU _ _))
  2273  	// result: x
  2274  	for {
  2275  		x := v_0
  2276  		if x.Op != OpLOONG64SGTU {
  2277  			break
  2278  		}
  2279  		v.copyOf(x)
  2280  		return true
  2281  	}
  2282  	// match: (MOVBUreg x:(XOR (MOVVconst [1]) (SGT _ _)))
  2283  	// result: x
  2284  	for {
  2285  		x := v_0
  2286  		if x.Op != OpLOONG64XOR {
  2287  			break
  2288  		}
  2289  		_ = x.Args[1]
  2290  		x_0 := x.Args[0]
  2291  		x_1 := x.Args[1]
  2292  		for _i0 := 0; _i0 <= 1; _i0, x_0, x_1 = _i0+1, x_1, x_0 {
  2293  			if x_0.Op != OpLOONG64MOVVconst || auxIntToInt64(x_0.AuxInt) != 1 || x_1.Op != OpLOONG64SGT {
  2294  				continue
  2295  			}
  2296  			v.copyOf(x)
  2297  			return true
  2298  		}
  2299  		break
  2300  	}
  2301  	// match: (MOVBUreg x:(XOR (MOVVconst [1]) (SGTU _ _)))
  2302  	// result: x
  2303  	for {
  2304  		x := v_0
  2305  		if x.Op != OpLOONG64XOR {
  2306  			break
  2307  		}
  2308  		_ = x.Args[1]
  2309  		x_0 := x.Args[0]
  2310  		x_1 := x.Args[1]
  2311  		for _i0 := 0; _i0 <= 1; _i0, x_0, x_1 = _i0+1, x_1, x_0 {
  2312  			if x_0.Op != OpLOONG64MOVVconst || auxIntToInt64(x_0.AuxInt) != 1 || x_1.Op != OpLOONG64SGTU {
  2313  				continue
  2314  			}
  2315  			v.copyOf(x)
  2316  			return true
  2317  		}
  2318  		break
  2319  	}
  2320  	// match: (MOVBUreg x:(MOVBUload _ _))
  2321  	// result: (MOVVreg x)
  2322  	for {
  2323  		x := v_0
  2324  		if x.Op != OpLOONG64MOVBUload {
  2325  			break
  2326  		}
  2327  		v.reset(OpLOONG64MOVVreg)
  2328  		v.AddArg(x)
  2329  		return true
  2330  	}
  2331  	// match: (MOVBUreg x:(MOVBUreg _))
  2332  	// result: (MOVVreg x)
  2333  	for {
  2334  		x := v_0
  2335  		if x.Op != OpLOONG64MOVBUreg {
  2336  			break
  2337  		}
  2338  		v.reset(OpLOONG64MOVVreg)
  2339  		v.AddArg(x)
  2340  		return true
  2341  	}
  2342  	// match: (MOVBUreg (SLLVconst [lc] x))
  2343  	// cond: lc >= 8
  2344  	// result: (MOVVconst [0])
  2345  	for {
  2346  		if v_0.Op != OpLOONG64SLLVconst {
  2347  			break
  2348  		}
  2349  		lc := auxIntToInt64(v_0.AuxInt)
  2350  		if !(lc >= 8) {
  2351  			break
  2352  		}
  2353  		v.reset(OpLOONG64MOVVconst)
  2354  		v.AuxInt = int64ToAuxInt(0)
  2355  		return true
  2356  	}
  2357  	// match: (MOVBUreg (MOVVconst [c]))
  2358  	// result: (MOVVconst [int64(uint8(c))])
  2359  	for {
  2360  		if v_0.Op != OpLOONG64MOVVconst {
  2361  			break
  2362  		}
  2363  		c := auxIntToInt64(v_0.AuxInt)
  2364  		v.reset(OpLOONG64MOVVconst)
  2365  		v.AuxInt = int64ToAuxInt(int64(uint8(c)))
  2366  		return true
  2367  	}
  2368  	// match: (MOVBUreg (ANDconst [c] x))
  2369  	// result: (ANDconst [c&0xff] x)
  2370  	for {
  2371  		if v_0.Op != OpLOONG64ANDconst {
  2372  			break
  2373  		}
  2374  		c := auxIntToInt64(v_0.AuxInt)
  2375  		x := v_0.Args[0]
  2376  		v.reset(OpLOONG64ANDconst)
  2377  		v.AuxInt = int64ToAuxInt(c & 0xff)
  2378  		v.AddArg(x)
  2379  		return true
  2380  	}
  2381  	return false
  2382  }
  2383  func rewriteValueLOONG64_OpLOONG64MOVBload(v *Value) bool {
  2384  	v_1 := v.Args[1]
  2385  	v_0 := v.Args[0]
  2386  	b := v.Block
  2387  	config := b.Func.Config
  2388  	// match: (MOVBload [off1] {sym} (ADDVconst [off2] ptr) mem)
  2389  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  2390  	// result: (MOVBload [off1+int32(off2)] {sym} ptr mem)
  2391  	for {
  2392  		off1 := auxIntToInt32(v.AuxInt)
  2393  		sym := auxToSym(v.Aux)
  2394  		if v_0.Op != OpLOONG64ADDVconst {
  2395  			break
  2396  		}
  2397  		off2 := auxIntToInt64(v_0.AuxInt)
  2398  		ptr := v_0.Args[0]
  2399  		mem := v_1
  2400  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  2401  			break
  2402  		}
  2403  		v.reset(OpLOONG64MOVBload)
  2404  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2405  		v.Aux = symToAux(sym)
  2406  		v.AddArg2(ptr, mem)
  2407  		return true
  2408  	}
  2409  	// match: (MOVBload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  2410  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  2411  	// result: (MOVBload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  2412  	for {
  2413  		off1 := auxIntToInt32(v.AuxInt)
  2414  		sym1 := auxToSym(v.Aux)
  2415  		if v_0.Op != OpLOONG64MOVVaddr {
  2416  			break
  2417  		}
  2418  		off2 := auxIntToInt32(v_0.AuxInt)
  2419  		sym2 := auxToSym(v_0.Aux)
  2420  		ptr := v_0.Args[0]
  2421  		mem := v_1
  2422  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  2423  			break
  2424  		}
  2425  		v.reset(OpLOONG64MOVBload)
  2426  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2427  		v.Aux = symToAux(mergeSym(sym1, sym2))
  2428  		v.AddArg2(ptr, mem)
  2429  		return true
  2430  	}
  2431  	// match: (MOVBload [off] {sym} (ADDV ptr idx) mem)
  2432  	// cond: off == 0 && sym == nil
  2433  	// result: (MOVBloadidx ptr idx mem)
  2434  	for {
  2435  		off := auxIntToInt32(v.AuxInt)
  2436  		sym := auxToSym(v.Aux)
  2437  		if v_0.Op != OpLOONG64ADDV {
  2438  			break
  2439  		}
  2440  		idx := v_0.Args[1]
  2441  		ptr := v_0.Args[0]
  2442  		mem := v_1
  2443  		if !(off == 0 && sym == nil) {
  2444  			break
  2445  		}
  2446  		v.reset(OpLOONG64MOVBloadidx)
  2447  		v.AddArg3(ptr, idx, mem)
  2448  		return true
  2449  	}
  2450  	return false
  2451  }
  2452  func rewriteValueLOONG64_OpLOONG64MOVBloadidx(v *Value) bool {
  2453  	v_2 := v.Args[2]
  2454  	v_1 := v.Args[1]
  2455  	v_0 := v.Args[0]
  2456  	// match: (MOVBloadidx ptr (MOVVconst [c]) mem)
  2457  	// cond: is32Bit(c)
  2458  	// result: (MOVBload [int32(c)] ptr mem)
  2459  	for {
  2460  		ptr := v_0
  2461  		if v_1.Op != OpLOONG64MOVVconst {
  2462  			break
  2463  		}
  2464  		c := auxIntToInt64(v_1.AuxInt)
  2465  		mem := v_2
  2466  		if !(is32Bit(c)) {
  2467  			break
  2468  		}
  2469  		v.reset(OpLOONG64MOVBload)
  2470  		v.AuxInt = int32ToAuxInt(int32(c))
  2471  		v.AddArg2(ptr, mem)
  2472  		return true
  2473  	}
  2474  	// match: (MOVBloadidx (MOVVconst [c]) ptr mem)
  2475  	// cond: is32Bit(c)
  2476  	// result: (MOVBload [int32(c)] ptr mem)
  2477  	for {
  2478  		if v_0.Op != OpLOONG64MOVVconst {
  2479  			break
  2480  		}
  2481  		c := auxIntToInt64(v_0.AuxInt)
  2482  		ptr := v_1
  2483  		mem := v_2
  2484  		if !(is32Bit(c)) {
  2485  			break
  2486  		}
  2487  		v.reset(OpLOONG64MOVBload)
  2488  		v.AuxInt = int32ToAuxInt(int32(c))
  2489  		v.AddArg2(ptr, mem)
  2490  		return true
  2491  	}
  2492  	return false
  2493  }
  2494  func rewriteValueLOONG64_OpLOONG64MOVBreg(v *Value) bool {
  2495  	v_0 := v.Args[0]
  2496  	// match: (MOVBreg x:(MOVBload _ _))
  2497  	// result: (MOVVreg x)
  2498  	for {
  2499  		x := v_0
  2500  		if x.Op != OpLOONG64MOVBload {
  2501  			break
  2502  		}
  2503  		v.reset(OpLOONG64MOVVreg)
  2504  		v.AddArg(x)
  2505  		return true
  2506  	}
  2507  	// match: (MOVBreg x:(MOVBreg _))
  2508  	// result: (MOVVreg x)
  2509  	for {
  2510  		x := v_0
  2511  		if x.Op != OpLOONG64MOVBreg {
  2512  			break
  2513  		}
  2514  		v.reset(OpLOONG64MOVVreg)
  2515  		v.AddArg(x)
  2516  		return true
  2517  	}
  2518  	// match: (MOVBreg (MOVVconst [c]))
  2519  	// result: (MOVVconst [int64(int8(c))])
  2520  	for {
  2521  		if v_0.Op != OpLOONG64MOVVconst {
  2522  			break
  2523  		}
  2524  		c := auxIntToInt64(v_0.AuxInt)
  2525  		v.reset(OpLOONG64MOVVconst)
  2526  		v.AuxInt = int64ToAuxInt(int64(int8(c)))
  2527  		return true
  2528  	}
  2529  	return false
  2530  }
  2531  func rewriteValueLOONG64_OpLOONG64MOVBstore(v *Value) bool {
  2532  	v_2 := v.Args[2]
  2533  	v_1 := v.Args[1]
  2534  	v_0 := v.Args[0]
  2535  	b := v.Block
  2536  	config := b.Func.Config
  2537  	// match: (MOVBstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  2538  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  2539  	// result: (MOVBstore [off1+int32(off2)] {sym} ptr val mem)
  2540  	for {
  2541  		off1 := auxIntToInt32(v.AuxInt)
  2542  		sym := auxToSym(v.Aux)
  2543  		if v_0.Op != OpLOONG64ADDVconst {
  2544  			break
  2545  		}
  2546  		off2 := auxIntToInt64(v_0.AuxInt)
  2547  		ptr := v_0.Args[0]
  2548  		val := v_1
  2549  		mem := v_2
  2550  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  2551  			break
  2552  		}
  2553  		v.reset(OpLOONG64MOVBstore)
  2554  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2555  		v.Aux = symToAux(sym)
  2556  		v.AddArg3(ptr, val, mem)
  2557  		return true
  2558  	}
  2559  	// match: (MOVBstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  2560  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  2561  	// result: (MOVBstore [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr val mem)
  2562  	for {
  2563  		off1 := auxIntToInt32(v.AuxInt)
  2564  		sym1 := auxToSym(v.Aux)
  2565  		if v_0.Op != OpLOONG64MOVVaddr {
  2566  			break
  2567  		}
  2568  		off2 := auxIntToInt32(v_0.AuxInt)
  2569  		sym2 := auxToSym(v_0.Aux)
  2570  		ptr := v_0.Args[0]
  2571  		val := v_1
  2572  		mem := v_2
  2573  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  2574  			break
  2575  		}
  2576  		v.reset(OpLOONG64MOVBstore)
  2577  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2578  		v.Aux = symToAux(mergeSym(sym1, sym2))
  2579  		v.AddArg3(ptr, val, mem)
  2580  		return true
  2581  	}
  2582  	// match: (MOVBstore [off] {sym} ptr (MOVBreg x) mem)
  2583  	// result: (MOVBstore [off] {sym} ptr x mem)
  2584  	for {
  2585  		off := auxIntToInt32(v.AuxInt)
  2586  		sym := auxToSym(v.Aux)
  2587  		ptr := v_0
  2588  		if v_1.Op != OpLOONG64MOVBreg {
  2589  			break
  2590  		}
  2591  		x := v_1.Args[0]
  2592  		mem := v_2
  2593  		v.reset(OpLOONG64MOVBstore)
  2594  		v.AuxInt = int32ToAuxInt(off)
  2595  		v.Aux = symToAux(sym)
  2596  		v.AddArg3(ptr, x, mem)
  2597  		return true
  2598  	}
  2599  	// match: (MOVBstore [off] {sym} ptr (MOVBUreg x) mem)
  2600  	// result: (MOVBstore [off] {sym} ptr x mem)
  2601  	for {
  2602  		off := auxIntToInt32(v.AuxInt)
  2603  		sym := auxToSym(v.Aux)
  2604  		ptr := v_0
  2605  		if v_1.Op != OpLOONG64MOVBUreg {
  2606  			break
  2607  		}
  2608  		x := v_1.Args[0]
  2609  		mem := v_2
  2610  		v.reset(OpLOONG64MOVBstore)
  2611  		v.AuxInt = int32ToAuxInt(off)
  2612  		v.Aux = symToAux(sym)
  2613  		v.AddArg3(ptr, x, mem)
  2614  		return true
  2615  	}
  2616  	// match: (MOVBstore [off] {sym} ptr (MOVHreg x) mem)
  2617  	// result: (MOVBstore [off] {sym} ptr x mem)
  2618  	for {
  2619  		off := auxIntToInt32(v.AuxInt)
  2620  		sym := auxToSym(v.Aux)
  2621  		ptr := v_0
  2622  		if v_1.Op != OpLOONG64MOVHreg {
  2623  			break
  2624  		}
  2625  		x := v_1.Args[0]
  2626  		mem := v_2
  2627  		v.reset(OpLOONG64MOVBstore)
  2628  		v.AuxInt = int32ToAuxInt(off)
  2629  		v.Aux = symToAux(sym)
  2630  		v.AddArg3(ptr, x, mem)
  2631  		return true
  2632  	}
  2633  	// match: (MOVBstore [off] {sym} ptr (MOVHUreg x) mem)
  2634  	// result: (MOVBstore [off] {sym} ptr x mem)
  2635  	for {
  2636  		off := auxIntToInt32(v.AuxInt)
  2637  		sym := auxToSym(v.Aux)
  2638  		ptr := v_0
  2639  		if v_1.Op != OpLOONG64MOVHUreg {
  2640  			break
  2641  		}
  2642  		x := v_1.Args[0]
  2643  		mem := v_2
  2644  		v.reset(OpLOONG64MOVBstore)
  2645  		v.AuxInt = int32ToAuxInt(off)
  2646  		v.Aux = symToAux(sym)
  2647  		v.AddArg3(ptr, x, mem)
  2648  		return true
  2649  	}
  2650  	// match: (MOVBstore [off] {sym} ptr (MOVWreg x) mem)
  2651  	// result: (MOVBstore [off] {sym} ptr x mem)
  2652  	for {
  2653  		off := auxIntToInt32(v.AuxInt)
  2654  		sym := auxToSym(v.Aux)
  2655  		ptr := v_0
  2656  		if v_1.Op != OpLOONG64MOVWreg {
  2657  			break
  2658  		}
  2659  		x := v_1.Args[0]
  2660  		mem := v_2
  2661  		v.reset(OpLOONG64MOVBstore)
  2662  		v.AuxInt = int32ToAuxInt(off)
  2663  		v.Aux = symToAux(sym)
  2664  		v.AddArg3(ptr, x, mem)
  2665  		return true
  2666  	}
  2667  	// match: (MOVBstore [off] {sym} ptr (MOVWUreg x) mem)
  2668  	// result: (MOVBstore [off] {sym} ptr x mem)
  2669  	for {
  2670  		off := auxIntToInt32(v.AuxInt)
  2671  		sym := auxToSym(v.Aux)
  2672  		ptr := v_0
  2673  		if v_1.Op != OpLOONG64MOVWUreg {
  2674  			break
  2675  		}
  2676  		x := v_1.Args[0]
  2677  		mem := v_2
  2678  		v.reset(OpLOONG64MOVBstore)
  2679  		v.AuxInt = int32ToAuxInt(off)
  2680  		v.Aux = symToAux(sym)
  2681  		v.AddArg3(ptr, x, mem)
  2682  		return true
  2683  	}
  2684  	// match: (MOVBstore [off] {sym} ptr (MOVVconst [0]) mem)
  2685  	// result: (MOVBstorezero [off] {sym} ptr mem)
  2686  	for {
  2687  		off := auxIntToInt32(v.AuxInt)
  2688  		sym := auxToSym(v.Aux)
  2689  		ptr := v_0
  2690  		if v_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
  2691  			break
  2692  		}
  2693  		mem := v_2
  2694  		v.reset(OpLOONG64MOVBstorezero)
  2695  		v.AuxInt = int32ToAuxInt(off)
  2696  		v.Aux = symToAux(sym)
  2697  		v.AddArg2(ptr, mem)
  2698  		return true
  2699  	}
  2700  	// match: (MOVBstore [off] {sym} (ADDV ptr idx) val mem)
  2701  	// cond: off == 0 && sym == nil
  2702  	// result: (MOVBstoreidx ptr idx val mem)
  2703  	for {
  2704  		off := auxIntToInt32(v.AuxInt)
  2705  		sym := auxToSym(v.Aux)
  2706  		if v_0.Op != OpLOONG64ADDV {
  2707  			break
  2708  		}
  2709  		idx := v_0.Args[1]
  2710  		ptr := v_0.Args[0]
  2711  		val := v_1
  2712  		mem := v_2
  2713  		if !(off == 0 && sym == nil) {
  2714  			break
  2715  		}
  2716  		v.reset(OpLOONG64MOVBstoreidx)
  2717  		v.AddArg4(ptr, idx, val, mem)
  2718  		return true
  2719  	}
  2720  	return false
  2721  }
  2722  func rewriteValueLOONG64_OpLOONG64MOVBstoreidx(v *Value) bool {
  2723  	v_3 := v.Args[3]
  2724  	v_2 := v.Args[2]
  2725  	v_1 := v.Args[1]
  2726  	v_0 := v.Args[0]
  2727  	// match: (MOVBstoreidx ptr (MOVVconst [c]) val mem)
  2728  	// cond: is32Bit(c)
  2729  	// result: (MOVBstore [int32(c)] ptr val mem)
  2730  	for {
  2731  		ptr := v_0
  2732  		if v_1.Op != OpLOONG64MOVVconst {
  2733  			break
  2734  		}
  2735  		c := auxIntToInt64(v_1.AuxInt)
  2736  		val := v_2
  2737  		mem := v_3
  2738  		if !(is32Bit(c)) {
  2739  			break
  2740  		}
  2741  		v.reset(OpLOONG64MOVBstore)
  2742  		v.AuxInt = int32ToAuxInt(int32(c))
  2743  		v.AddArg3(ptr, val, mem)
  2744  		return true
  2745  	}
  2746  	// match: (MOVBstoreidx (MOVVconst [c]) idx val mem)
  2747  	// cond: is32Bit(c)
  2748  	// result: (MOVBstore [int32(c)] idx val mem)
  2749  	for {
  2750  		if v_0.Op != OpLOONG64MOVVconst {
  2751  			break
  2752  		}
  2753  		c := auxIntToInt64(v_0.AuxInt)
  2754  		idx := v_1
  2755  		val := v_2
  2756  		mem := v_3
  2757  		if !(is32Bit(c)) {
  2758  			break
  2759  		}
  2760  		v.reset(OpLOONG64MOVBstore)
  2761  		v.AuxInt = int32ToAuxInt(int32(c))
  2762  		v.AddArg3(idx, val, mem)
  2763  		return true
  2764  	}
  2765  	// match: (MOVBstoreidx ptr idx (MOVVconst [0]) mem)
  2766  	// result: (MOVBstorezeroidx ptr idx mem)
  2767  	for {
  2768  		ptr := v_0
  2769  		idx := v_1
  2770  		if v_2.Op != OpLOONG64MOVVconst || auxIntToInt64(v_2.AuxInt) != 0 {
  2771  			break
  2772  		}
  2773  		mem := v_3
  2774  		v.reset(OpLOONG64MOVBstorezeroidx)
  2775  		v.AddArg3(ptr, idx, mem)
  2776  		return true
  2777  	}
  2778  	return false
  2779  }
  2780  func rewriteValueLOONG64_OpLOONG64MOVBstorezero(v *Value) bool {
  2781  	v_1 := v.Args[1]
  2782  	v_0 := v.Args[0]
  2783  	b := v.Block
  2784  	config := b.Func.Config
  2785  	// match: (MOVBstorezero [off1] {sym} (ADDVconst [off2] ptr) mem)
  2786  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  2787  	// result: (MOVBstorezero [off1+int32(off2)] {sym} ptr mem)
  2788  	for {
  2789  		off1 := auxIntToInt32(v.AuxInt)
  2790  		sym := auxToSym(v.Aux)
  2791  		if v_0.Op != OpLOONG64ADDVconst {
  2792  			break
  2793  		}
  2794  		off2 := auxIntToInt64(v_0.AuxInt)
  2795  		ptr := v_0.Args[0]
  2796  		mem := v_1
  2797  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  2798  			break
  2799  		}
  2800  		v.reset(OpLOONG64MOVBstorezero)
  2801  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2802  		v.Aux = symToAux(sym)
  2803  		v.AddArg2(ptr, mem)
  2804  		return true
  2805  	}
  2806  	// match: (MOVBstorezero [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  2807  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  2808  	// result: (MOVBstorezero [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  2809  	for {
  2810  		off1 := auxIntToInt32(v.AuxInt)
  2811  		sym1 := auxToSym(v.Aux)
  2812  		if v_0.Op != OpLOONG64MOVVaddr {
  2813  			break
  2814  		}
  2815  		off2 := auxIntToInt32(v_0.AuxInt)
  2816  		sym2 := auxToSym(v_0.Aux)
  2817  		ptr := v_0.Args[0]
  2818  		mem := v_1
  2819  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  2820  			break
  2821  		}
  2822  		v.reset(OpLOONG64MOVBstorezero)
  2823  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2824  		v.Aux = symToAux(mergeSym(sym1, sym2))
  2825  		v.AddArg2(ptr, mem)
  2826  		return true
  2827  	}
  2828  	// match: (MOVBstorezero [off] {sym} (ADDV ptr idx) mem)
  2829  	// cond: off == 0 && sym == nil
  2830  	// result: (MOVBstorezeroidx ptr idx mem)
  2831  	for {
  2832  		off := auxIntToInt32(v.AuxInt)
  2833  		sym := auxToSym(v.Aux)
  2834  		if v_0.Op != OpLOONG64ADDV {
  2835  			break
  2836  		}
  2837  		idx := v_0.Args[1]
  2838  		ptr := v_0.Args[0]
  2839  		mem := v_1
  2840  		if !(off == 0 && sym == nil) {
  2841  			break
  2842  		}
  2843  		v.reset(OpLOONG64MOVBstorezeroidx)
  2844  		v.AddArg3(ptr, idx, mem)
  2845  		return true
  2846  	}
  2847  	return false
  2848  }
  2849  func rewriteValueLOONG64_OpLOONG64MOVBstorezeroidx(v *Value) bool {
  2850  	v_2 := v.Args[2]
  2851  	v_1 := v.Args[1]
  2852  	v_0 := v.Args[0]
  2853  	// match: (MOVBstorezeroidx ptr (MOVVconst [c]) mem)
  2854  	// cond: is32Bit(c)
  2855  	// result: (MOVBstorezero [int32(c)] ptr mem)
  2856  	for {
  2857  		ptr := v_0
  2858  		if v_1.Op != OpLOONG64MOVVconst {
  2859  			break
  2860  		}
  2861  		c := auxIntToInt64(v_1.AuxInt)
  2862  		mem := v_2
  2863  		if !(is32Bit(c)) {
  2864  			break
  2865  		}
  2866  		v.reset(OpLOONG64MOVBstorezero)
  2867  		v.AuxInt = int32ToAuxInt(int32(c))
  2868  		v.AddArg2(ptr, mem)
  2869  		return true
  2870  	}
  2871  	// match: (MOVBstorezeroidx (MOVVconst [c]) idx mem)
  2872  	// cond: is32Bit(c)
  2873  	// result: (MOVBstorezero [int32(c)] idx mem)
  2874  	for {
  2875  		if v_0.Op != OpLOONG64MOVVconst {
  2876  			break
  2877  		}
  2878  		c := auxIntToInt64(v_0.AuxInt)
  2879  		idx := v_1
  2880  		mem := v_2
  2881  		if !(is32Bit(c)) {
  2882  			break
  2883  		}
  2884  		v.reset(OpLOONG64MOVBstorezero)
  2885  		v.AuxInt = int32ToAuxInt(int32(c))
  2886  		v.AddArg2(idx, mem)
  2887  		return true
  2888  	}
  2889  	return false
  2890  }
  2891  func rewriteValueLOONG64_OpLOONG64MOVDload(v *Value) bool {
  2892  	v_1 := v.Args[1]
  2893  	v_0 := v.Args[0]
  2894  	b := v.Block
  2895  	config := b.Func.Config
  2896  	// match: (MOVDload [off] {sym} ptr (MOVVstore [off] {sym} ptr val _))
  2897  	// result: (MOVVgpfp val)
  2898  	for {
  2899  		off := auxIntToInt32(v.AuxInt)
  2900  		sym := auxToSym(v.Aux)
  2901  		ptr := v_0
  2902  		if v_1.Op != OpLOONG64MOVVstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  2903  			break
  2904  		}
  2905  		val := v_1.Args[1]
  2906  		if ptr != v_1.Args[0] {
  2907  			break
  2908  		}
  2909  		v.reset(OpLOONG64MOVVgpfp)
  2910  		v.AddArg(val)
  2911  		return true
  2912  	}
  2913  	// match: (MOVDload [off1] {sym} (ADDVconst [off2] ptr) mem)
  2914  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  2915  	// result: (MOVDload [off1+int32(off2)] {sym} ptr mem)
  2916  	for {
  2917  		off1 := auxIntToInt32(v.AuxInt)
  2918  		sym := auxToSym(v.Aux)
  2919  		if v_0.Op != OpLOONG64ADDVconst {
  2920  			break
  2921  		}
  2922  		off2 := auxIntToInt64(v_0.AuxInt)
  2923  		ptr := v_0.Args[0]
  2924  		mem := v_1
  2925  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  2926  			break
  2927  		}
  2928  		v.reset(OpLOONG64MOVDload)
  2929  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2930  		v.Aux = symToAux(sym)
  2931  		v.AddArg2(ptr, mem)
  2932  		return true
  2933  	}
  2934  	// match: (MOVDload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  2935  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  2936  	// result: (MOVDload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  2937  	for {
  2938  		off1 := auxIntToInt32(v.AuxInt)
  2939  		sym1 := auxToSym(v.Aux)
  2940  		if v_0.Op != OpLOONG64MOVVaddr {
  2941  			break
  2942  		}
  2943  		off2 := auxIntToInt32(v_0.AuxInt)
  2944  		sym2 := auxToSym(v_0.Aux)
  2945  		ptr := v_0.Args[0]
  2946  		mem := v_1
  2947  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  2948  			break
  2949  		}
  2950  		v.reset(OpLOONG64MOVDload)
  2951  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2952  		v.Aux = symToAux(mergeSym(sym1, sym2))
  2953  		v.AddArg2(ptr, mem)
  2954  		return true
  2955  	}
  2956  	// match: (MOVDload [off] {sym} (ADDV ptr idx) mem)
  2957  	// cond: off == 0 && sym == nil
  2958  	// result: (MOVDloadidx ptr idx mem)
  2959  	for {
  2960  		off := auxIntToInt32(v.AuxInt)
  2961  		sym := auxToSym(v.Aux)
  2962  		if v_0.Op != OpLOONG64ADDV {
  2963  			break
  2964  		}
  2965  		idx := v_0.Args[1]
  2966  		ptr := v_0.Args[0]
  2967  		mem := v_1
  2968  		if !(off == 0 && sym == nil) {
  2969  			break
  2970  		}
  2971  		v.reset(OpLOONG64MOVDloadidx)
  2972  		v.AddArg3(ptr, idx, mem)
  2973  		return true
  2974  	}
  2975  	return false
  2976  }
  2977  func rewriteValueLOONG64_OpLOONG64MOVDloadidx(v *Value) bool {
  2978  	v_2 := v.Args[2]
  2979  	v_1 := v.Args[1]
  2980  	v_0 := v.Args[0]
  2981  	// match: (MOVDloadidx ptr (MOVVconst [c]) mem)
  2982  	// cond: is32Bit(c)
  2983  	// result: (MOVDload [int32(c)] ptr mem)
  2984  	for {
  2985  		ptr := v_0
  2986  		if v_1.Op != OpLOONG64MOVVconst {
  2987  			break
  2988  		}
  2989  		c := auxIntToInt64(v_1.AuxInt)
  2990  		mem := v_2
  2991  		if !(is32Bit(c)) {
  2992  			break
  2993  		}
  2994  		v.reset(OpLOONG64MOVDload)
  2995  		v.AuxInt = int32ToAuxInt(int32(c))
  2996  		v.AddArg2(ptr, mem)
  2997  		return true
  2998  	}
  2999  	// match: (MOVDloadidx (MOVVconst [c]) ptr mem)
  3000  	// cond: is32Bit(c)
  3001  	// result: (MOVDload [int32(c)] ptr mem)
  3002  	for {
  3003  		if v_0.Op != OpLOONG64MOVVconst {
  3004  			break
  3005  		}
  3006  		c := auxIntToInt64(v_0.AuxInt)
  3007  		ptr := v_1
  3008  		mem := v_2
  3009  		if !(is32Bit(c)) {
  3010  			break
  3011  		}
  3012  		v.reset(OpLOONG64MOVDload)
  3013  		v.AuxInt = int32ToAuxInt(int32(c))
  3014  		v.AddArg2(ptr, mem)
  3015  		return true
  3016  	}
  3017  	return false
  3018  }
  3019  func rewriteValueLOONG64_OpLOONG64MOVDstore(v *Value) bool {
  3020  	v_2 := v.Args[2]
  3021  	v_1 := v.Args[1]
  3022  	v_0 := v.Args[0]
  3023  	b := v.Block
  3024  	config := b.Func.Config
  3025  	// match: (MOVDstore [off] {sym} ptr (MOVVgpfp val) mem)
  3026  	// result: (MOVVstore [off] {sym} ptr val mem)
  3027  	for {
  3028  		off := auxIntToInt32(v.AuxInt)
  3029  		sym := auxToSym(v.Aux)
  3030  		ptr := v_0
  3031  		if v_1.Op != OpLOONG64MOVVgpfp {
  3032  			break
  3033  		}
  3034  		val := v_1.Args[0]
  3035  		mem := v_2
  3036  		v.reset(OpLOONG64MOVVstore)
  3037  		v.AuxInt = int32ToAuxInt(off)
  3038  		v.Aux = symToAux(sym)
  3039  		v.AddArg3(ptr, val, mem)
  3040  		return true
  3041  	}
  3042  	// match: (MOVDstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  3043  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  3044  	// result: (MOVDstore [off1+int32(off2)] {sym} ptr val mem)
  3045  	for {
  3046  		off1 := auxIntToInt32(v.AuxInt)
  3047  		sym := auxToSym(v.Aux)
  3048  		if v_0.Op != OpLOONG64ADDVconst {
  3049  			break
  3050  		}
  3051  		off2 := auxIntToInt64(v_0.AuxInt)
  3052  		ptr := v_0.Args[0]
  3053  		val := v_1
  3054  		mem := v_2
  3055  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  3056  			break
  3057  		}
  3058  		v.reset(OpLOONG64MOVDstore)
  3059  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3060  		v.Aux = symToAux(sym)
  3061  		v.AddArg3(ptr, val, mem)
  3062  		return true
  3063  	}
  3064  	// match: (MOVDstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  3065  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  3066  	// result: (MOVDstore [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr val mem)
  3067  	for {
  3068  		off1 := auxIntToInt32(v.AuxInt)
  3069  		sym1 := auxToSym(v.Aux)
  3070  		if v_0.Op != OpLOONG64MOVVaddr {
  3071  			break
  3072  		}
  3073  		off2 := auxIntToInt32(v_0.AuxInt)
  3074  		sym2 := auxToSym(v_0.Aux)
  3075  		ptr := v_0.Args[0]
  3076  		val := v_1
  3077  		mem := v_2
  3078  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  3079  			break
  3080  		}
  3081  		v.reset(OpLOONG64MOVDstore)
  3082  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3083  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3084  		v.AddArg3(ptr, val, mem)
  3085  		return true
  3086  	}
  3087  	// match: (MOVDstore [off] {sym} (ADDV ptr idx) val mem)
  3088  	// cond: off == 0 && sym == nil
  3089  	// result: (MOVDstoreidx ptr idx val mem)
  3090  	for {
  3091  		off := auxIntToInt32(v.AuxInt)
  3092  		sym := auxToSym(v.Aux)
  3093  		if v_0.Op != OpLOONG64ADDV {
  3094  			break
  3095  		}
  3096  		idx := v_0.Args[1]
  3097  		ptr := v_0.Args[0]
  3098  		val := v_1
  3099  		mem := v_2
  3100  		if !(off == 0 && sym == nil) {
  3101  			break
  3102  		}
  3103  		v.reset(OpLOONG64MOVDstoreidx)
  3104  		v.AddArg4(ptr, idx, val, mem)
  3105  		return true
  3106  	}
  3107  	return false
  3108  }
  3109  func rewriteValueLOONG64_OpLOONG64MOVDstoreidx(v *Value) bool {
  3110  	v_3 := v.Args[3]
  3111  	v_2 := v.Args[2]
  3112  	v_1 := v.Args[1]
  3113  	v_0 := v.Args[0]
  3114  	// match: (MOVDstoreidx ptr (MOVVconst [c]) val mem)
  3115  	// cond: is32Bit(c)
  3116  	// result: (MOVDstore [int32(c)] ptr val mem)
  3117  	for {
  3118  		ptr := v_0
  3119  		if v_1.Op != OpLOONG64MOVVconst {
  3120  			break
  3121  		}
  3122  		c := auxIntToInt64(v_1.AuxInt)
  3123  		val := v_2
  3124  		mem := v_3
  3125  		if !(is32Bit(c)) {
  3126  			break
  3127  		}
  3128  		v.reset(OpLOONG64MOVDstore)
  3129  		v.AuxInt = int32ToAuxInt(int32(c))
  3130  		v.AddArg3(ptr, val, mem)
  3131  		return true
  3132  	}
  3133  	// match: (MOVDstoreidx (MOVVconst [c]) idx val mem)
  3134  	// cond: is32Bit(c)
  3135  	// result: (MOVDstore [int32(c)] idx val mem)
  3136  	for {
  3137  		if v_0.Op != OpLOONG64MOVVconst {
  3138  			break
  3139  		}
  3140  		c := auxIntToInt64(v_0.AuxInt)
  3141  		idx := v_1
  3142  		val := v_2
  3143  		mem := v_3
  3144  		if !(is32Bit(c)) {
  3145  			break
  3146  		}
  3147  		v.reset(OpLOONG64MOVDstore)
  3148  		v.AuxInt = int32ToAuxInt(int32(c))
  3149  		v.AddArg3(idx, val, mem)
  3150  		return true
  3151  	}
  3152  	return false
  3153  }
  3154  func rewriteValueLOONG64_OpLOONG64MOVFload(v *Value) bool {
  3155  	v_1 := v.Args[1]
  3156  	v_0 := v.Args[0]
  3157  	b := v.Block
  3158  	config := b.Func.Config
  3159  	// match: (MOVFload [off] {sym} ptr (MOVWstore [off] {sym} ptr val _))
  3160  	// result: (MOVWgpfp val)
  3161  	for {
  3162  		off := auxIntToInt32(v.AuxInt)
  3163  		sym := auxToSym(v.Aux)
  3164  		ptr := v_0
  3165  		if v_1.Op != OpLOONG64MOVWstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  3166  			break
  3167  		}
  3168  		val := v_1.Args[1]
  3169  		if ptr != v_1.Args[0] {
  3170  			break
  3171  		}
  3172  		v.reset(OpLOONG64MOVWgpfp)
  3173  		v.AddArg(val)
  3174  		return true
  3175  	}
  3176  	// match: (MOVFload [off1] {sym} (ADDVconst [off2] ptr) mem)
  3177  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  3178  	// result: (MOVFload [off1+int32(off2)] {sym} ptr mem)
  3179  	for {
  3180  		off1 := auxIntToInt32(v.AuxInt)
  3181  		sym := auxToSym(v.Aux)
  3182  		if v_0.Op != OpLOONG64ADDVconst {
  3183  			break
  3184  		}
  3185  		off2 := auxIntToInt64(v_0.AuxInt)
  3186  		ptr := v_0.Args[0]
  3187  		mem := v_1
  3188  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  3189  			break
  3190  		}
  3191  		v.reset(OpLOONG64MOVFload)
  3192  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3193  		v.Aux = symToAux(sym)
  3194  		v.AddArg2(ptr, mem)
  3195  		return true
  3196  	}
  3197  	// match: (MOVFload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  3198  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  3199  	// result: (MOVFload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  3200  	for {
  3201  		off1 := auxIntToInt32(v.AuxInt)
  3202  		sym1 := auxToSym(v.Aux)
  3203  		if v_0.Op != OpLOONG64MOVVaddr {
  3204  			break
  3205  		}
  3206  		off2 := auxIntToInt32(v_0.AuxInt)
  3207  		sym2 := auxToSym(v_0.Aux)
  3208  		ptr := v_0.Args[0]
  3209  		mem := v_1
  3210  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  3211  			break
  3212  		}
  3213  		v.reset(OpLOONG64MOVFload)
  3214  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3215  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3216  		v.AddArg2(ptr, mem)
  3217  		return true
  3218  	}
  3219  	// match: (MOVFload [off] {sym} (ADDV ptr idx) mem)
  3220  	// cond: off == 0 && sym == nil
  3221  	// result: (MOVFloadidx ptr idx mem)
  3222  	for {
  3223  		off := auxIntToInt32(v.AuxInt)
  3224  		sym := auxToSym(v.Aux)
  3225  		if v_0.Op != OpLOONG64ADDV {
  3226  			break
  3227  		}
  3228  		idx := v_0.Args[1]
  3229  		ptr := v_0.Args[0]
  3230  		mem := v_1
  3231  		if !(off == 0 && sym == nil) {
  3232  			break
  3233  		}
  3234  		v.reset(OpLOONG64MOVFloadidx)
  3235  		v.AddArg3(ptr, idx, mem)
  3236  		return true
  3237  	}
  3238  	return false
  3239  }
  3240  func rewriteValueLOONG64_OpLOONG64MOVFloadidx(v *Value) bool {
  3241  	v_2 := v.Args[2]
  3242  	v_1 := v.Args[1]
  3243  	v_0 := v.Args[0]
  3244  	// match: (MOVFloadidx ptr (MOVVconst [c]) mem)
  3245  	// cond: is32Bit(c)
  3246  	// result: (MOVFload [int32(c)] ptr mem)
  3247  	for {
  3248  		ptr := v_0
  3249  		if v_1.Op != OpLOONG64MOVVconst {
  3250  			break
  3251  		}
  3252  		c := auxIntToInt64(v_1.AuxInt)
  3253  		mem := v_2
  3254  		if !(is32Bit(c)) {
  3255  			break
  3256  		}
  3257  		v.reset(OpLOONG64MOVFload)
  3258  		v.AuxInt = int32ToAuxInt(int32(c))
  3259  		v.AddArg2(ptr, mem)
  3260  		return true
  3261  	}
  3262  	// match: (MOVFloadidx (MOVVconst [c]) ptr mem)
  3263  	// cond: is32Bit(c)
  3264  	// result: (MOVFload [int32(c)] ptr mem)
  3265  	for {
  3266  		if v_0.Op != OpLOONG64MOVVconst {
  3267  			break
  3268  		}
  3269  		c := auxIntToInt64(v_0.AuxInt)
  3270  		ptr := v_1
  3271  		mem := v_2
  3272  		if !(is32Bit(c)) {
  3273  			break
  3274  		}
  3275  		v.reset(OpLOONG64MOVFload)
  3276  		v.AuxInt = int32ToAuxInt(int32(c))
  3277  		v.AddArg2(ptr, mem)
  3278  		return true
  3279  	}
  3280  	return false
  3281  }
  3282  func rewriteValueLOONG64_OpLOONG64MOVFstore(v *Value) bool {
  3283  	v_2 := v.Args[2]
  3284  	v_1 := v.Args[1]
  3285  	v_0 := v.Args[0]
  3286  	b := v.Block
  3287  	config := b.Func.Config
  3288  	// match: (MOVFstore [off] {sym} ptr (MOVWgpfp val) mem)
  3289  	// result: (MOVWstore [off] {sym} ptr val mem)
  3290  	for {
  3291  		off := auxIntToInt32(v.AuxInt)
  3292  		sym := auxToSym(v.Aux)
  3293  		ptr := v_0
  3294  		if v_1.Op != OpLOONG64MOVWgpfp {
  3295  			break
  3296  		}
  3297  		val := v_1.Args[0]
  3298  		mem := v_2
  3299  		v.reset(OpLOONG64MOVWstore)
  3300  		v.AuxInt = int32ToAuxInt(off)
  3301  		v.Aux = symToAux(sym)
  3302  		v.AddArg3(ptr, val, mem)
  3303  		return true
  3304  	}
  3305  	// match: (MOVFstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  3306  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  3307  	// result: (MOVFstore [off1+int32(off2)] {sym} ptr val mem)
  3308  	for {
  3309  		off1 := auxIntToInt32(v.AuxInt)
  3310  		sym := auxToSym(v.Aux)
  3311  		if v_0.Op != OpLOONG64ADDVconst {
  3312  			break
  3313  		}
  3314  		off2 := auxIntToInt64(v_0.AuxInt)
  3315  		ptr := v_0.Args[0]
  3316  		val := v_1
  3317  		mem := v_2
  3318  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  3319  			break
  3320  		}
  3321  		v.reset(OpLOONG64MOVFstore)
  3322  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3323  		v.Aux = symToAux(sym)
  3324  		v.AddArg3(ptr, val, mem)
  3325  		return true
  3326  	}
  3327  	// match: (MOVFstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  3328  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  3329  	// result: (MOVFstore [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr val mem)
  3330  	for {
  3331  		off1 := auxIntToInt32(v.AuxInt)
  3332  		sym1 := auxToSym(v.Aux)
  3333  		if v_0.Op != OpLOONG64MOVVaddr {
  3334  			break
  3335  		}
  3336  		off2 := auxIntToInt32(v_0.AuxInt)
  3337  		sym2 := auxToSym(v_0.Aux)
  3338  		ptr := v_0.Args[0]
  3339  		val := v_1
  3340  		mem := v_2
  3341  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  3342  			break
  3343  		}
  3344  		v.reset(OpLOONG64MOVFstore)
  3345  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3346  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3347  		v.AddArg3(ptr, val, mem)
  3348  		return true
  3349  	}
  3350  	// match: (MOVFstore [off] {sym} (ADDV ptr idx) val mem)
  3351  	// cond: off == 0 && sym == nil
  3352  	// result: (MOVFstoreidx ptr idx val mem)
  3353  	for {
  3354  		off := auxIntToInt32(v.AuxInt)
  3355  		sym := auxToSym(v.Aux)
  3356  		if v_0.Op != OpLOONG64ADDV {
  3357  			break
  3358  		}
  3359  		idx := v_0.Args[1]
  3360  		ptr := v_0.Args[0]
  3361  		val := v_1
  3362  		mem := v_2
  3363  		if !(off == 0 && sym == nil) {
  3364  			break
  3365  		}
  3366  		v.reset(OpLOONG64MOVFstoreidx)
  3367  		v.AddArg4(ptr, idx, val, mem)
  3368  		return true
  3369  	}
  3370  	return false
  3371  }
  3372  func rewriteValueLOONG64_OpLOONG64MOVFstoreidx(v *Value) bool {
  3373  	v_3 := v.Args[3]
  3374  	v_2 := v.Args[2]
  3375  	v_1 := v.Args[1]
  3376  	v_0 := v.Args[0]
  3377  	// match: (MOVFstoreidx ptr (MOVVconst [c]) val mem)
  3378  	// cond: is32Bit(c)
  3379  	// result: (MOVFstore [int32(c)] ptr val mem)
  3380  	for {
  3381  		ptr := v_0
  3382  		if v_1.Op != OpLOONG64MOVVconst {
  3383  			break
  3384  		}
  3385  		c := auxIntToInt64(v_1.AuxInt)
  3386  		val := v_2
  3387  		mem := v_3
  3388  		if !(is32Bit(c)) {
  3389  			break
  3390  		}
  3391  		v.reset(OpLOONG64MOVFstore)
  3392  		v.AuxInt = int32ToAuxInt(int32(c))
  3393  		v.AddArg3(ptr, val, mem)
  3394  		return true
  3395  	}
  3396  	// match: (MOVFstoreidx (MOVVconst [c]) idx val mem)
  3397  	// cond: is32Bit(c)
  3398  	// result: (MOVFstore [int32(c)] idx val mem)
  3399  	for {
  3400  		if v_0.Op != OpLOONG64MOVVconst {
  3401  			break
  3402  		}
  3403  		c := auxIntToInt64(v_0.AuxInt)
  3404  		idx := v_1
  3405  		val := v_2
  3406  		mem := v_3
  3407  		if !(is32Bit(c)) {
  3408  			break
  3409  		}
  3410  		v.reset(OpLOONG64MOVFstore)
  3411  		v.AuxInt = int32ToAuxInt(int32(c))
  3412  		v.AddArg3(idx, val, mem)
  3413  		return true
  3414  	}
  3415  	return false
  3416  }
  3417  func rewriteValueLOONG64_OpLOONG64MOVHUload(v *Value) bool {
  3418  	v_1 := v.Args[1]
  3419  	v_0 := v.Args[0]
  3420  	b := v.Block
  3421  	config := b.Func.Config
  3422  	// match: (MOVHUload [off1] {sym} (ADDVconst [off2] ptr) mem)
  3423  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  3424  	// result: (MOVHUload [off1+int32(off2)] {sym} ptr mem)
  3425  	for {
  3426  		off1 := auxIntToInt32(v.AuxInt)
  3427  		sym := auxToSym(v.Aux)
  3428  		if v_0.Op != OpLOONG64ADDVconst {
  3429  			break
  3430  		}
  3431  		off2 := auxIntToInt64(v_0.AuxInt)
  3432  		ptr := v_0.Args[0]
  3433  		mem := v_1
  3434  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  3435  			break
  3436  		}
  3437  		v.reset(OpLOONG64MOVHUload)
  3438  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3439  		v.Aux = symToAux(sym)
  3440  		v.AddArg2(ptr, mem)
  3441  		return true
  3442  	}
  3443  	// match: (MOVHUload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  3444  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  3445  	// result: (MOVHUload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  3446  	for {
  3447  		off1 := auxIntToInt32(v.AuxInt)
  3448  		sym1 := auxToSym(v.Aux)
  3449  		if v_0.Op != OpLOONG64MOVVaddr {
  3450  			break
  3451  		}
  3452  		off2 := auxIntToInt32(v_0.AuxInt)
  3453  		sym2 := auxToSym(v_0.Aux)
  3454  		ptr := v_0.Args[0]
  3455  		mem := v_1
  3456  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  3457  			break
  3458  		}
  3459  		v.reset(OpLOONG64MOVHUload)
  3460  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3461  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3462  		v.AddArg2(ptr, mem)
  3463  		return true
  3464  	}
  3465  	// match: (MOVHUload [off] {sym} (ADDV ptr idx) mem)
  3466  	// cond: off == 0 && sym == nil
  3467  	// result: (MOVHUloadidx ptr idx mem)
  3468  	for {
  3469  		off := auxIntToInt32(v.AuxInt)
  3470  		sym := auxToSym(v.Aux)
  3471  		if v_0.Op != OpLOONG64ADDV {
  3472  			break
  3473  		}
  3474  		idx := v_0.Args[1]
  3475  		ptr := v_0.Args[0]
  3476  		mem := v_1
  3477  		if !(off == 0 && sym == nil) {
  3478  			break
  3479  		}
  3480  		v.reset(OpLOONG64MOVHUloadidx)
  3481  		v.AddArg3(ptr, idx, mem)
  3482  		return true
  3483  	}
  3484  	return false
  3485  }
  3486  func rewriteValueLOONG64_OpLOONG64MOVHUloadidx(v *Value) bool {
  3487  	v_2 := v.Args[2]
  3488  	v_1 := v.Args[1]
  3489  	v_0 := v.Args[0]
  3490  	// match: (MOVHUloadidx ptr (MOVVconst [c]) mem)
  3491  	// cond: is32Bit(c)
  3492  	// result: (MOVHUload [int32(c)] ptr mem)
  3493  	for {
  3494  		ptr := v_0
  3495  		if v_1.Op != OpLOONG64MOVVconst {
  3496  			break
  3497  		}
  3498  		c := auxIntToInt64(v_1.AuxInt)
  3499  		mem := v_2
  3500  		if !(is32Bit(c)) {
  3501  			break
  3502  		}
  3503  		v.reset(OpLOONG64MOVHUload)
  3504  		v.AuxInt = int32ToAuxInt(int32(c))
  3505  		v.AddArg2(ptr, mem)
  3506  		return true
  3507  	}
  3508  	// match: (MOVHUloadidx (MOVVconst [c]) ptr mem)
  3509  	// cond: is32Bit(c)
  3510  	// result: (MOVHUload [int32(c)] ptr mem)
  3511  	for {
  3512  		if v_0.Op != OpLOONG64MOVVconst {
  3513  			break
  3514  		}
  3515  		c := auxIntToInt64(v_0.AuxInt)
  3516  		ptr := v_1
  3517  		mem := v_2
  3518  		if !(is32Bit(c)) {
  3519  			break
  3520  		}
  3521  		v.reset(OpLOONG64MOVHUload)
  3522  		v.AuxInt = int32ToAuxInt(int32(c))
  3523  		v.AddArg2(ptr, mem)
  3524  		return true
  3525  	}
  3526  	return false
  3527  }
  3528  func rewriteValueLOONG64_OpLOONG64MOVHUreg(v *Value) bool {
  3529  	v_0 := v.Args[0]
  3530  	// match: (MOVHUreg (SRLVconst [rc] x))
  3531  	// cond: rc < 16
  3532  	// result: (BSTRPICKV [rc + (15+rc)<<6] x)
  3533  	for {
  3534  		if v_0.Op != OpLOONG64SRLVconst {
  3535  			break
  3536  		}
  3537  		rc := auxIntToInt64(v_0.AuxInt)
  3538  		x := v_0.Args[0]
  3539  		if !(rc < 16) {
  3540  			break
  3541  		}
  3542  		v.reset(OpLOONG64BSTRPICKV)
  3543  		v.AuxInt = int64ToAuxInt(rc + (15+rc)<<6)
  3544  		v.AddArg(x)
  3545  		return true
  3546  	}
  3547  	// match: (MOVHUreg x:(MOVBUload _ _))
  3548  	// result: (MOVVreg x)
  3549  	for {
  3550  		x := v_0
  3551  		if x.Op != OpLOONG64MOVBUload {
  3552  			break
  3553  		}
  3554  		v.reset(OpLOONG64MOVVreg)
  3555  		v.AddArg(x)
  3556  		return true
  3557  	}
  3558  	// match: (MOVHUreg x:(MOVHUload _ _))
  3559  	// result: (MOVVreg x)
  3560  	for {
  3561  		x := v_0
  3562  		if x.Op != OpLOONG64MOVHUload {
  3563  			break
  3564  		}
  3565  		v.reset(OpLOONG64MOVVreg)
  3566  		v.AddArg(x)
  3567  		return true
  3568  	}
  3569  	// match: (MOVHUreg x:(MOVBUreg _))
  3570  	// result: (MOVVreg x)
  3571  	for {
  3572  		x := v_0
  3573  		if x.Op != OpLOONG64MOVBUreg {
  3574  			break
  3575  		}
  3576  		v.reset(OpLOONG64MOVVreg)
  3577  		v.AddArg(x)
  3578  		return true
  3579  	}
  3580  	// match: (MOVHUreg x:(MOVHUreg _))
  3581  	// result: (MOVVreg x)
  3582  	for {
  3583  		x := v_0
  3584  		if x.Op != OpLOONG64MOVHUreg {
  3585  			break
  3586  		}
  3587  		v.reset(OpLOONG64MOVVreg)
  3588  		v.AddArg(x)
  3589  		return true
  3590  	}
  3591  	// match: (MOVHUreg (SLLVconst [lc] x))
  3592  	// cond: lc >= 16
  3593  	// result: (MOVVconst [0])
  3594  	for {
  3595  		if v_0.Op != OpLOONG64SLLVconst {
  3596  			break
  3597  		}
  3598  		lc := auxIntToInt64(v_0.AuxInt)
  3599  		if !(lc >= 16) {
  3600  			break
  3601  		}
  3602  		v.reset(OpLOONG64MOVVconst)
  3603  		v.AuxInt = int64ToAuxInt(0)
  3604  		return true
  3605  	}
  3606  	// match: (MOVHUreg (MOVVconst [c]))
  3607  	// result: (MOVVconst [int64(uint16(c))])
  3608  	for {
  3609  		if v_0.Op != OpLOONG64MOVVconst {
  3610  			break
  3611  		}
  3612  		c := auxIntToInt64(v_0.AuxInt)
  3613  		v.reset(OpLOONG64MOVVconst)
  3614  		v.AuxInt = int64ToAuxInt(int64(uint16(c)))
  3615  		return true
  3616  	}
  3617  	return false
  3618  }
  3619  func rewriteValueLOONG64_OpLOONG64MOVHload(v *Value) bool {
  3620  	v_1 := v.Args[1]
  3621  	v_0 := v.Args[0]
  3622  	b := v.Block
  3623  	config := b.Func.Config
  3624  	// match: (MOVHload [off1] {sym} (ADDVconst [off2] ptr) mem)
  3625  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  3626  	// result: (MOVHload [off1+int32(off2)] {sym} ptr mem)
  3627  	for {
  3628  		off1 := auxIntToInt32(v.AuxInt)
  3629  		sym := auxToSym(v.Aux)
  3630  		if v_0.Op != OpLOONG64ADDVconst {
  3631  			break
  3632  		}
  3633  		off2 := auxIntToInt64(v_0.AuxInt)
  3634  		ptr := v_0.Args[0]
  3635  		mem := v_1
  3636  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  3637  			break
  3638  		}
  3639  		v.reset(OpLOONG64MOVHload)
  3640  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3641  		v.Aux = symToAux(sym)
  3642  		v.AddArg2(ptr, mem)
  3643  		return true
  3644  	}
  3645  	// match: (MOVHload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  3646  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  3647  	// result: (MOVHload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  3648  	for {
  3649  		off1 := auxIntToInt32(v.AuxInt)
  3650  		sym1 := auxToSym(v.Aux)
  3651  		if v_0.Op != OpLOONG64MOVVaddr {
  3652  			break
  3653  		}
  3654  		off2 := auxIntToInt32(v_0.AuxInt)
  3655  		sym2 := auxToSym(v_0.Aux)
  3656  		ptr := v_0.Args[0]
  3657  		mem := v_1
  3658  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  3659  			break
  3660  		}
  3661  		v.reset(OpLOONG64MOVHload)
  3662  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3663  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3664  		v.AddArg2(ptr, mem)
  3665  		return true
  3666  	}
  3667  	// match: (MOVHload [off] {sym} (ADDV ptr idx) mem)
  3668  	// cond: off == 0 && sym == nil
  3669  	// result: (MOVHloadidx ptr idx mem)
  3670  	for {
  3671  		off := auxIntToInt32(v.AuxInt)
  3672  		sym := auxToSym(v.Aux)
  3673  		if v_0.Op != OpLOONG64ADDV {
  3674  			break
  3675  		}
  3676  		idx := v_0.Args[1]
  3677  		ptr := v_0.Args[0]
  3678  		mem := v_1
  3679  		if !(off == 0 && sym == nil) {
  3680  			break
  3681  		}
  3682  		v.reset(OpLOONG64MOVHloadidx)
  3683  		v.AddArg3(ptr, idx, mem)
  3684  		return true
  3685  	}
  3686  	return false
  3687  }
  3688  func rewriteValueLOONG64_OpLOONG64MOVHloadidx(v *Value) bool {
  3689  	v_2 := v.Args[2]
  3690  	v_1 := v.Args[1]
  3691  	v_0 := v.Args[0]
  3692  	// match: (MOVHloadidx ptr (MOVVconst [c]) mem)
  3693  	// cond: is32Bit(c)
  3694  	// result: (MOVHload [int32(c)] ptr mem)
  3695  	for {
  3696  		ptr := v_0
  3697  		if v_1.Op != OpLOONG64MOVVconst {
  3698  			break
  3699  		}
  3700  		c := auxIntToInt64(v_1.AuxInt)
  3701  		mem := v_2
  3702  		if !(is32Bit(c)) {
  3703  			break
  3704  		}
  3705  		v.reset(OpLOONG64MOVHload)
  3706  		v.AuxInt = int32ToAuxInt(int32(c))
  3707  		v.AddArg2(ptr, mem)
  3708  		return true
  3709  	}
  3710  	// match: (MOVHloadidx (MOVVconst [c]) ptr mem)
  3711  	// cond: is32Bit(c)
  3712  	// result: (MOVHload [int32(c)] ptr mem)
  3713  	for {
  3714  		if v_0.Op != OpLOONG64MOVVconst {
  3715  			break
  3716  		}
  3717  		c := auxIntToInt64(v_0.AuxInt)
  3718  		ptr := v_1
  3719  		mem := v_2
  3720  		if !(is32Bit(c)) {
  3721  			break
  3722  		}
  3723  		v.reset(OpLOONG64MOVHload)
  3724  		v.AuxInt = int32ToAuxInt(int32(c))
  3725  		v.AddArg2(ptr, mem)
  3726  		return true
  3727  	}
  3728  	return false
  3729  }
  3730  func rewriteValueLOONG64_OpLOONG64MOVHreg(v *Value) bool {
  3731  	v_0 := v.Args[0]
  3732  	// match: (MOVHreg x:(MOVBload _ _))
  3733  	// result: (MOVVreg x)
  3734  	for {
  3735  		x := v_0
  3736  		if x.Op != OpLOONG64MOVBload {
  3737  			break
  3738  		}
  3739  		v.reset(OpLOONG64MOVVreg)
  3740  		v.AddArg(x)
  3741  		return true
  3742  	}
  3743  	// match: (MOVHreg x:(MOVBUload _ _))
  3744  	// result: (MOVVreg x)
  3745  	for {
  3746  		x := v_0
  3747  		if x.Op != OpLOONG64MOVBUload {
  3748  			break
  3749  		}
  3750  		v.reset(OpLOONG64MOVVreg)
  3751  		v.AddArg(x)
  3752  		return true
  3753  	}
  3754  	// match: (MOVHreg x:(MOVHload _ _))
  3755  	// result: (MOVVreg x)
  3756  	for {
  3757  		x := v_0
  3758  		if x.Op != OpLOONG64MOVHload {
  3759  			break
  3760  		}
  3761  		v.reset(OpLOONG64MOVVreg)
  3762  		v.AddArg(x)
  3763  		return true
  3764  	}
  3765  	// match: (MOVHreg x:(MOVBreg _))
  3766  	// result: (MOVVreg x)
  3767  	for {
  3768  		x := v_0
  3769  		if x.Op != OpLOONG64MOVBreg {
  3770  			break
  3771  		}
  3772  		v.reset(OpLOONG64MOVVreg)
  3773  		v.AddArg(x)
  3774  		return true
  3775  	}
  3776  	// match: (MOVHreg x:(MOVBUreg _))
  3777  	// result: (MOVVreg x)
  3778  	for {
  3779  		x := v_0
  3780  		if x.Op != OpLOONG64MOVBUreg {
  3781  			break
  3782  		}
  3783  		v.reset(OpLOONG64MOVVreg)
  3784  		v.AddArg(x)
  3785  		return true
  3786  	}
  3787  	// match: (MOVHreg x:(MOVHreg _))
  3788  	// result: (MOVVreg x)
  3789  	for {
  3790  		x := v_0
  3791  		if x.Op != OpLOONG64MOVHreg {
  3792  			break
  3793  		}
  3794  		v.reset(OpLOONG64MOVVreg)
  3795  		v.AddArg(x)
  3796  		return true
  3797  	}
  3798  	// match: (MOVHreg (MOVVconst [c]))
  3799  	// result: (MOVVconst [int64(int16(c))])
  3800  	for {
  3801  		if v_0.Op != OpLOONG64MOVVconst {
  3802  			break
  3803  		}
  3804  		c := auxIntToInt64(v_0.AuxInt)
  3805  		v.reset(OpLOONG64MOVVconst)
  3806  		v.AuxInt = int64ToAuxInt(int64(int16(c)))
  3807  		return true
  3808  	}
  3809  	return false
  3810  }
  3811  func rewriteValueLOONG64_OpLOONG64MOVHstore(v *Value) bool {
  3812  	v_2 := v.Args[2]
  3813  	v_1 := v.Args[1]
  3814  	v_0 := v.Args[0]
  3815  	b := v.Block
  3816  	config := b.Func.Config
  3817  	// match: (MOVHstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  3818  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  3819  	// result: (MOVHstore [off1+int32(off2)] {sym} ptr val mem)
  3820  	for {
  3821  		off1 := auxIntToInt32(v.AuxInt)
  3822  		sym := auxToSym(v.Aux)
  3823  		if v_0.Op != OpLOONG64ADDVconst {
  3824  			break
  3825  		}
  3826  		off2 := auxIntToInt64(v_0.AuxInt)
  3827  		ptr := v_0.Args[0]
  3828  		val := v_1
  3829  		mem := v_2
  3830  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  3831  			break
  3832  		}
  3833  		v.reset(OpLOONG64MOVHstore)
  3834  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3835  		v.Aux = symToAux(sym)
  3836  		v.AddArg3(ptr, val, mem)
  3837  		return true
  3838  	}
  3839  	// match: (MOVHstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  3840  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  3841  	// result: (MOVHstore [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr val mem)
  3842  	for {
  3843  		off1 := auxIntToInt32(v.AuxInt)
  3844  		sym1 := auxToSym(v.Aux)
  3845  		if v_0.Op != OpLOONG64MOVVaddr {
  3846  			break
  3847  		}
  3848  		off2 := auxIntToInt32(v_0.AuxInt)
  3849  		sym2 := auxToSym(v_0.Aux)
  3850  		ptr := v_0.Args[0]
  3851  		val := v_1
  3852  		mem := v_2
  3853  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  3854  			break
  3855  		}
  3856  		v.reset(OpLOONG64MOVHstore)
  3857  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3858  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3859  		v.AddArg3(ptr, val, mem)
  3860  		return true
  3861  	}
  3862  	// match: (MOVHstore [off] {sym} ptr (MOVHreg x) mem)
  3863  	// result: (MOVHstore [off] {sym} ptr x mem)
  3864  	for {
  3865  		off := auxIntToInt32(v.AuxInt)
  3866  		sym := auxToSym(v.Aux)
  3867  		ptr := v_0
  3868  		if v_1.Op != OpLOONG64MOVHreg {
  3869  			break
  3870  		}
  3871  		x := v_1.Args[0]
  3872  		mem := v_2
  3873  		v.reset(OpLOONG64MOVHstore)
  3874  		v.AuxInt = int32ToAuxInt(off)
  3875  		v.Aux = symToAux(sym)
  3876  		v.AddArg3(ptr, x, mem)
  3877  		return true
  3878  	}
  3879  	// match: (MOVHstore [off] {sym} ptr (MOVHUreg x) mem)
  3880  	// result: (MOVHstore [off] {sym} ptr x mem)
  3881  	for {
  3882  		off := auxIntToInt32(v.AuxInt)
  3883  		sym := auxToSym(v.Aux)
  3884  		ptr := v_0
  3885  		if v_1.Op != OpLOONG64MOVHUreg {
  3886  			break
  3887  		}
  3888  		x := v_1.Args[0]
  3889  		mem := v_2
  3890  		v.reset(OpLOONG64MOVHstore)
  3891  		v.AuxInt = int32ToAuxInt(off)
  3892  		v.Aux = symToAux(sym)
  3893  		v.AddArg3(ptr, x, mem)
  3894  		return true
  3895  	}
  3896  	// match: (MOVHstore [off] {sym} ptr (MOVWreg x) mem)
  3897  	// result: (MOVHstore [off] {sym} ptr x mem)
  3898  	for {
  3899  		off := auxIntToInt32(v.AuxInt)
  3900  		sym := auxToSym(v.Aux)
  3901  		ptr := v_0
  3902  		if v_1.Op != OpLOONG64MOVWreg {
  3903  			break
  3904  		}
  3905  		x := v_1.Args[0]
  3906  		mem := v_2
  3907  		v.reset(OpLOONG64MOVHstore)
  3908  		v.AuxInt = int32ToAuxInt(off)
  3909  		v.Aux = symToAux(sym)
  3910  		v.AddArg3(ptr, x, mem)
  3911  		return true
  3912  	}
  3913  	// match: (MOVHstore [off] {sym} ptr (MOVWUreg x) mem)
  3914  	// result: (MOVHstore [off] {sym} ptr x mem)
  3915  	for {
  3916  		off := auxIntToInt32(v.AuxInt)
  3917  		sym := auxToSym(v.Aux)
  3918  		ptr := v_0
  3919  		if v_1.Op != OpLOONG64MOVWUreg {
  3920  			break
  3921  		}
  3922  		x := v_1.Args[0]
  3923  		mem := v_2
  3924  		v.reset(OpLOONG64MOVHstore)
  3925  		v.AuxInt = int32ToAuxInt(off)
  3926  		v.Aux = symToAux(sym)
  3927  		v.AddArg3(ptr, x, mem)
  3928  		return true
  3929  	}
  3930  	// match: (MOVHstore [off] {sym} ptr (MOVVconst [0]) mem)
  3931  	// result: (MOVHstorezero [off] {sym} ptr mem)
  3932  	for {
  3933  		off := auxIntToInt32(v.AuxInt)
  3934  		sym := auxToSym(v.Aux)
  3935  		ptr := v_0
  3936  		if v_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
  3937  			break
  3938  		}
  3939  		mem := v_2
  3940  		v.reset(OpLOONG64MOVHstorezero)
  3941  		v.AuxInt = int32ToAuxInt(off)
  3942  		v.Aux = symToAux(sym)
  3943  		v.AddArg2(ptr, mem)
  3944  		return true
  3945  	}
  3946  	// match: (MOVHstore [off] {sym} (ADDV ptr idx) val mem)
  3947  	// cond: off == 0 && sym == nil
  3948  	// result: (MOVHstoreidx ptr idx val mem)
  3949  	for {
  3950  		off := auxIntToInt32(v.AuxInt)
  3951  		sym := auxToSym(v.Aux)
  3952  		if v_0.Op != OpLOONG64ADDV {
  3953  			break
  3954  		}
  3955  		idx := v_0.Args[1]
  3956  		ptr := v_0.Args[0]
  3957  		val := v_1
  3958  		mem := v_2
  3959  		if !(off == 0 && sym == nil) {
  3960  			break
  3961  		}
  3962  		v.reset(OpLOONG64MOVHstoreidx)
  3963  		v.AddArg4(ptr, idx, val, mem)
  3964  		return true
  3965  	}
  3966  	return false
  3967  }
  3968  func rewriteValueLOONG64_OpLOONG64MOVHstoreidx(v *Value) bool {
  3969  	v_3 := v.Args[3]
  3970  	v_2 := v.Args[2]
  3971  	v_1 := v.Args[1]
  3972  	v_0 := v.Args[0]
  3973  	// match: (MOVHstoreidx ptr (MOVVconst [c]) val mem)
  3974  	// cond: is32Bit(c)
  3975  	// result: (MOVHstore [int32(c)] ptr val mem)
  3976  	for {
  3977  		ptr := v_0
  3978  		if v_1.Op != OpLOONG64MOVVconst {
  3979  			break
  3980  		}
  3981  		c := auxIntToInt64(v_1.AuxInt)
  3982  		val := v_2
  3983  		mem := v_3
  3984  		if !(is32Bit(c)) {
  3985  			break
  3986  		}
  3987  		v.reset(OpLOONG64MOVHstore)
  3988  		v.AuxInt = int32ToAuxInt(int32(c))
  3989  		v.AddArg3(ptr, val, mem)
  3990  		return true
  3991  	}
  3992  	// match: (MOVHstoreidx (MOVVconst [c]) idx val mem)
  3993  	// cond: is32Bit(c)
  3994  	// result: (MOVHstore [int32(c)] idx val mem)
  3995  	for {
  3996  		if v_0.Op != OpLOONG64MOVVconst {
  3997  			break
  3998  		}
  3999  		c := auxIntToInt64(v_0.AuxInt)
  4000  		idx := v_1
  4001  		val := v_2
  4002  		mem := v_3
  4003  		if !(is32Bit(c)) {
  4004  			break
  4005  		}
  4006  		v.reset(OpLOONG64MOVHstore)
  4007  		v.AuxInt = int32ToAuxInt(int32(c))
  4008  		v.AddArg3(idx, val, mem)
  4009  		return true
  4010  	}
  4011  	// match: (MOVHstoreidx ptr idx (MOVVconst [0]) mem)
  4012  	// result: (MOVHstorezeroidx ptr idx mem)
  4013  	for {
  4014  		ptr := v_0
  4015  		idx := v_1
  4016  		if v_2.Op != OpLOONG64MOVVconst || auxIntToInt64(v_2.AuxInt) != 0 {
  4017  			break
  4018  		}
  4019  		mem := v_3
  4020  		v.reset(OpLOONG64MOVHstorezeroidx)
  4021  		v.AddArg3(ptr, idx, mem)
  4022  		return true
  4023  	}
  4024  	return false
  4025  }
  4026  func rewriteValueLOONG64_OpLOONG64MOVHstorezero(v *Value) bool {
  4027  	v_1 := v.Args[1]
  4028  	v_0 := v.Args[0]
  4029  	b := v.Block
  4030  	config := b.Func.Config
  4031  	// match: (MOVHstorezero [off1] {sym} (ADDVconst [off2] ptr) mem)
  4032  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  4033  	// result: (MOVHstorezero [off1+int32(off2)] {sym} ptr mem)
  4034  	for {
  4035  		off1 := auxIntToInt32(v.AuxInt)
  4036  		sym := auxToSym(v.Aux)
  4037  		if v_0.Op != OpLOONG64ADDVconst {
  4038  			break
  4039  		}
  4040  		off2 := auxIntToInt64(v_0.AuxInt)
  4041  		ptr := v_0.Args[0]
  4042  		mem := v_1
  4043  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4044  			break
  4045  		}
  4046  		v.reset(OpLOONG64MOVHstorezero)
  4047  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4048  		v.Aux = symToAux(sym)
  4049  		v.AddArg2(ptr, mem)
  4050  		return true
  4051  	}
  4052  	// match: (MOVHstorezero [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4053  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  4054  	// result: (MOVHstorezero [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  4055  	for {
  4056  		off1 := auxIntToInt32(v.AuxInt)
  4057  		sym1 := auxToSym(v.Aux)
  4058  		if v_0.Op != OpLOONG64MOVVaddr {
  4059  			break
  4060  		}
  4061  		off2 := auxIntToInt32(v_0.AuxInt)
  4062  		sym2 := auxToSym(v_0.Aux)
  4063  		ptr := v_0.Args[0]
  4064  		mem := v_1
  4065  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4066  			break
  4067  		}
  4068  		v.reset(OpLOONG64MOVHstorezero)
  4069  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4070  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4071  		v.AddArg2(ptr, mem)
  4072  		return true
  4073  	}
  4074  	// match: (MOVHstorezero [off] {sym} (ADDV ptr idx) mem)
  4075  	// cond: off == 0 && sym == nil
  4076  	// result: (MOVHstorezeroidx ptr idx mem)
  4077  	for {
  4078  		off := auxIntToInt32(v.AuxInt)
  4079  		sym := auxToSym(v.Aux)
  4080  		if v_0.Op != OpLOONG64ADDV {
  4081  			break
  4082  		}
  4083  		idx := v_0.Args[1]
  4084  		ptr := v_0.Args[0]
  4085  		mem := v_1
  4086  		if !(off == 0 && sym == nil) {
  4087  			break
  4088  		}
  4089  		v.reset(OpLOONG64MOVHstorezeroidx)
  4090  		v.AddArg3(ptr, idx, mem)
  4091  		return true
  4092  	}
  4093  	return false
  4094  }
  4095  func rewriteValueLOONG64_OpLOONG64MOVHstorezeroidx(v *Value) bool {
  4096  	v_2 := v.Args[2]
  4097  	v_1 := v.Args[1]
  4098  	v_0 := v.Args[0]
  4099  	// match: (MOVHstorezeroidx ptr (MOVVconst [c]) mem)
  4100  	// cond: is32Bit(c)
  4101  	// result: (MOVHstorezero [int32(c)] ptr mem)
  4102  	for {
  4103  		ptr := v_0
  4104  		if v_1.Op != OpLOONG64MOVVconst {
  4105  			break
  4106  		}
  4107  		c := auxIntToInt64(v_1.AuxInt)
  4108  		mem := v_2
  4109  		if !(is32Bit(c)) {
  4110  			break
  4111  		}
  4112  		v.reset(OpLOONG64MOVHstorezero)
  4113  		v.AuxInt = int32ToAuxInt(int32(c))
  4114  		v.AddArg2(ptr, mem)
  4115  		return true
  4116  	}
  4117  	// match: (MOVHstorezeroidx (MOVVconst [c]) idx mem)
  4118  	// cond: is32Bit(c)
  4119  	// result: (MOVHstorezero [int32(c)] idx mem)
  4120  	for {
  4121  		if v_0.Op != OpLOONG64MOVVconst {
  4122  			break
  4123  		}
  4124  		c := auxIntToInt64(v_0.AuxInt)
  4125  		idx := v_1
  4126  		mem := v_2
  4127  		if !(is32Bit(c)) {
  4128  			break
  4129  		}
  4130  		v.reset(OpLOONG64MOVHstorezero)
  4131  		v.AuxInt = int32ToAuxInt(int32(c))
  4132  		v.AddArg2(idx, mem)
  4133  		return true
  4134  	}
  4135  	return false
  4136  }
  4137  func rewriteValueLOONG64_OpLOONG64MOVVload(v *Value) bool {
  4138  	v_1 := v.Args[1]
  4139  	v_0 := v.Args[0]
  4140  	b := v.Block
  4141  	config := b.Func.Config
  4142  	// match: (MOVVload [off] {sym} ptr (MOVDstore [off] {sym} ptr val _))
  4143  	// result: (MOVVfpgp val)
  4144  	for {
  4145  		off := auxIntToInt32(v.AuxInt)
  4146  		sym := auxToSym(v.Aux)
  4147  		ptr := v_0
  4148  		if v_1.Op != OpLOONG64MOVDstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  4149  			break
  4150  		}
  4151  		val := v_1.Args[1]
  4152  		if ptr != v_1.Args[0] {
  4153  			break
  4154  		}
  4155  		v.reset(OpLOONG64MOVVfpgp)
  4156  		v.AddArg(val)
  4157  		return true
  4158  	}
  4159  	// match: (MOVVload [off1] {sym} (ADDVconst [off2] ptr) mem)
  4160  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  4161  	// result: (MOVVload [off1+int32(off2)] {sym} ptr mem)
  4162  	for {
  4163  		off1 := auxIntToInt32(v.AuxInt)
  4164  		sym := auxToSym(v.Aux)
  4165  		if v_0.Op != OpLOONG64ADDVconst {
  4166  			break
  4167  		}
  4168  		off2 := auxIntToInt64(v_0.AuxInt)
  4169  		ptr := v_0.Args[0]
  4170  		mem := v_1
  4171  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4172  			break
  4173  		}
  4174  		v.reset(OpLOONG64MOVVload)
  4175  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4176  		v.Aux = symToAux(sym)
  4177  		v.AddArg2(ptr, mem)
  4178  		return true
  4179  	}
  4180  	// match: (MOVVload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4181  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  4182  	// result: (MOVVload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  4183  	for {
  4184  		off1 := auxIntToInt32(v.AuxInt)
  4185  		sym1 := auxToSym(v.Aux)
  4186  		if v_0.Op != OpLOONG64MOVVaddr {
  4187  			break
  4188  		}
  4189  		off2 := auxIntToInt32(v_0.AuxInt)
  4190  		sym2 := auxToSym(v_0.Aux)
  4191  		ptr := v_0.Args[0]
  4192  		mem := v_1
  4193  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4194  			break
  4195  		}
  4196  		v.reset(OpLOONG64MOVVload)
  4197  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4198  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4199  		v.AddArg2(ptr, mem)
  4200  		return true
  4201  	}
  4202  	// match: (MOVVload [off] {sym} (ADDV ptr idx) mem)
  4203  	// cond: off == 0 && sym == nil
  4204  	// result: (MOVVloadidx ptr idx mem)
  4205  	for {
  4206  		off := auxIntToInt32(v.AuxInt)
  4207  		sym := auxToSym(v.Aux)
  4208  		if v_0.Op != OpLOONG64ADDV {
  4209  			break
  4210  		}
  4211  		idx := v_0.Args[1]
  4212  		ptr := v_0.Args[0]
  4213  		mem := v_1
  4214  		if !(off == 0 && sym == nil) {
  4215  			break
  4216  		}
  4217  		v.reset(OpLOONG64MOVVloadidx)
  4218  		v.AddArg3(ptr, idx, mem)
  4219  		return true
  4220  	}
  4221  	return false
  4222  }
  4223  func rewriteValueLOONG64_OpLOONG64MOVVloadidx(v *Value) bool {
  4224  	v_2 := v.Args[2]
  4225  	v_1 := v.Args[1]
  4226  	v_0 := v.Args[0]
  4227  	// match: (MOVVloadidx ptr (MOVVconst [c]) mem)
  4228  	// cond: is32Bit(c)
  4229  	// result: (MOVVload [int32(c)] ptr mem)
  4230  	for {
  4231  		ptr := v_0
  4232  		if v_1.Op != OpLOONG64MOVVconst {
  4233  			break
  4234  		}
  4235  		c := auxIntToInt64(v_1.AuxInt)
  4236  		mem := v_2
  4237  		if !(is32Bit(c)) {
  4238  			break
  4239  		}
  4240  		v.reset(OpLOONG64MOVVload)
  4241  		v.AuxInt = int32ToAuxInt(int32(c))
  4242  		v.AddArg2(ptr, mem)
  4243  		return true
  4244  	}
  4245  	// match: (MOVVloadidx (MOVVconst [c]) ptr mem)
  4246  	// cond: is32Bit(c)
  4247  	// result: (MOVVload [int32(c)] ptr mem)
  4248  	for {
  4249  		if v_0.Op != OpLOONG64MOVVconst {
  4250  			break
  4251  		}
  4252  		c := auxIntToInt64(v_0.AuxInt)
  4253  		ptr := v_1
  4254  		mem := v_2
  4255  		if !(is32Bit(c)) {
  4256  			break
  4257  		}
  4258  		v.reset(OpLOONG64MOVVload)
  4259  		v.AuxInt = int32ToAuxInt(int32(c))
  4260  		v.AddArg2(ptr, mem)
  4261  		return true
  4262  	}
  4263  	return false
  4264  }
  4265  func rewriteValueLOONG64_OpLOONG64MOVVnop(v *Value) bool {
  4266  	v_0 := v.Args[0]
  4267  	// match: (MOVVnop (MOVVconst [c]))
  4268  	// result: (MOVVconst [c])
  4269  	for {
  4270  		if v_0.Op != OpLOONG64MOVVconst {
  4271  			break
  4272  		}
  4273  		c := auxIntToInt64(v_0.AuxInt)
  4274  		v.reset(OpLOONG64MOVVconst)
  4275  		v.AuxInt = int64ToAuxInt(c)
  4276  		return true
  4277  	}
  4278  	return false
  4279  }
  4280  func rewriteValueLOONG64_OpLOONG64MOVVreg(v *Value) bool {
  4281  	v_0 := v.Args[0]
  4282  	// match: (MOVVreg x)
  4283  	// cond: x.Uses == 1
  4284  	// result: (MOVVnop x)
  4285  	for {
  4286  		x := v_0
  4287  		if !(x.Uses == 1) {
  4288  			break
  4289  		}
  4290  		v.reset(OpLOONG64MOVVnop)
  4291  		v.AddArg(x)
  4292  		return true
  4293  	}
  4294  	// match: (MOVVreg (MOVVconst [c]))
  4295  	// result: (MOVVconst [c])
  4296  	for {
  4297  		if v_0.Op != OpLOONG64MOVVconst {
  4298  			break
  4299  		}
  4300  		c := auxIntToInt64(v_0.AuxInt)
  4301  		v.reset(OpLOONG64MOVVconst)
  4302  		v.AuxInt = int64ToAuxInt(c)
  4303  		return true
  4304  	}
  4305  	return false
  4306  }
  4307  func rewriteValueLOONG64_OpLOONG64MOVVstore(v *Value) bool {
  4308  	v_2 := v.Args[2]
  4309  	v_1 := v.Args[1]
  4310  	v_0 := v.Args[0]
  4311  	b := v.Block
  4312  	config := b.Func.Config
  4313  	// match: (MOVVstore [off] {sym} ptr (MOVVfpgp val) mem)
  4314  	// result: (MOVDstore [off] {sym} ptr val mem)
  4315  	for {
  4316  		off := auxIntToInt32(v.AuxInt)
  4317  		sym := auxToSym(v.Aux)
  4318  		ptr := v_0
  4319  		if v_1.Op != OpLOONG64MOVVfpgp {
  4320  			break
  4321  		}
  4322  		val := v_1.Args[0]
  4323  		mem := v_2
  4324  		v.reset(OpLOONG64MOVDstore)
  4325  		v.AuxInt = int32ToAuxInt(off)
  4326  		v.Aux = symToAux(sym)
  4327  		v.AddArg3(ptr, val, mem)
  4328  		return true
  4329  	}
  4330  	// match: (MOVVstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  4331  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  4332  	// result: (MOVVstore [off1+int32(off2)] {sym} ptr val mem)
  4333  	for {
  4334  		off1 := auxIntToInt32(v.AuxInt)
  4335  		sym := auxToSym(v.Aux)
  4336  		if v_0.Op != OpLOONG64ADDVconst {
  4337  			break
  4338  		}
  4339  		off2 := auxIntToInt64(v_0.AuxInt)
  4340  		ptr := v_0.Args[0]
  4341  		val := v_1
  4342  		mem := v_2
  4343  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4344  			break
  4345  		}
  4346  		v.reset(OpLOONG64MOVVstore)
  4347  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4348  		v.Aux = symToAux(sym)
  4349  		v.AddArg3(ptr, val, mem)
  4350  		return true
  4351  	}
  4352  	// match: (MOVVstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  4353  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  4354  	// result: (MOVVstore [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr val mem)
  4355  	for {
  4356  		off1 := auxIntToInt32(v.AuxInt)
  4357  		sym1 := auxToSym(v.Aux)
  4358  		if v_0.Op != OpLOONG64MOVVaddr {
  4359  			break
  4360  		}
  4361  		off2 := auxIntToInt32(v_0.AuxInt)
  4362  		sym2 := auxToSym(v_0.Aux)
  4363  		ptr := v_0.Args[0]
  4364  		val := v_1
  4365  		mem := v_2
  4366  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4367  			break
  4368  		}
  4369  		v.reset(OpLOONG64MOVVstore)
  4370  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4371  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4372  		v.AddArg3(ptr, val, mem)
  4373  		return true
  4374  	}
  4375  	// match: (MOVVstore [off] {sym} ptr (MOVVconst [0]) mem)
  4376  	// result: (MOVVstorezero [off] {sym} ptr mem)
  4377  	for {
  4378  		off := auxIntToInt32(v.AuxInt)
  4379  		sym := auxToSym(v.Aux)
  4380  		ptr := v_0
  4381  		if v_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
  4382  			break
  4383  		}
  4384  		mem := v_2
  4385  		v.reset(OpLOONG64MOVVstorezero)
  4386  		v.AuxInt = int32ToAuxInt(off)
  4387  		v.Aux = symToAux(sym)
  4388  		v.AddArg2(ptr, mem)
  4389  		return true
  4390  	}
  4391  	// match: (MOVVstore [off] {sym} (ADDV ptr idx) val mem)
  4392  	// cond: off == 0 && sym == nil
  4393  	// result: (MOVVstoreidx ptr idx val mem)
  4394  	for {
  4395  		off := auxIntToInt32(v.AuxInt)
  4396  		sym := auxToSym(v.Aux)
  4397  		if v_0.Op != OpLOONG64ADDV {
  4398  			break
  4399  		}
  4400  		idx := v_0.Args[1]
  4401  		ptr := v_0.Args[0]
  4402  		val := v_1
  4403  		mem := v_2
  4404  		if !(off == 0 && sym == nil) {
  4405  			break
  4406  		}
  4407  		v.reset(OpLOONG64MOVVstoreidx)
  4408  		v.AddArg4(ptr, idx, val, mem)
  4409  		return true
  4410  	}
  4411  	return false
  4412  }
  4413  func rewriteValueLOONG64_OpLOONG64MOVVstoreidx(v *Value) bool {
  4414  	v_3 := v.Args[3]
  4415  	v_2 := v.Args[2]
  4416  	v_1 := v.Args[1]
  4417  	v_0 := v.Args[0]
  4418  	// match: (MOVVstoreidx ptr (MOVVconst [c]) val mem)
  4419  	// cond: is32Bit(c)
  4420  	// result: (MOVVstore [int32(c)] ptr val mem)
  4421  	for {
  4422  		ptr := v_0
  4423  		if v_1.Op != OpLOONG64MOVVconst {
  4424  			break
  4425  		}
  4426  		c := auxIntToInt64(v_1.AuxInt)
  4427  		val := v_2
  4428  		mem := v_3
  4429  		if !(is32Bit(c)) {
  4430  			break
  4431  		}
  4432  		v.reset(OpLOONG64MOVVstore)
  4433  		v.AuxInt = int32ToAuxInt(int32(c))
  4434  		v.AddArg3(ptr, val, mem)
  4435  		return true
  4436  	}
  4437  	// match: (MOVVstoreidx (MOVVconst [c]) idx val mem)
  4438  	// cond: is32Bit(c)
  4439  	// result: (MOVVstore [int32(c)] idx val mem)
  4440  	for {
  4441  		if v_0.Op != OpLOONG64MOVVconst {
  4442  			break
  4443  		}
  4444  		c := auxIntToInt64(v_0.AuxInt)
  4445  		idx := v_1
  4446  		val := v_2
  4447  		mem := v_3
  4448  		if !(is32Bit(c)) {
  4449  			break
  4450  		}
  4451  		v.reset(OpLOONG64MOVVstore)
  4452  		v.AuxInt = int32ToAuxInt(int32(c))
  4453  		v.AddArg3(idx, val, mem)
  4454  		return true
  4455  	}
  4456  	// match: (MOVVstoreidx ptr idx (MOVVconst [0]) mem)
  4457  	// result: (MOVVstorezeroidx ptr idx mem)
  4458  	for {
  4459  		ptr := v_0
  4460  		idx := v_1
  4461  		if v_2.Op != OpLOONG64MOVVconst || auxIntToInt64(v_2.AuxInt) != 0 {
  4462  			break
  4463  		}
  4464  		mem := v_3
  4465  		v.reset(OpLOONG64MOVVstorezeroidx)
  4466  		v.AddArg3(ptr, idx, mem)
  4467  		return true
  4468  	}
  4469  	return false
  4470  }
  4471  func rewriteValueLOONG64_OpLOONG64MOVVstorezero(v *Value) bool {
  4472  	v_1 := v.Args[1]
  4473  	v_0 := v.Args[0]
  4474  	b := v.Block
  4475  	config := b.Func.Config
  4476  	// match: (MOVVstorezero [off1] {sym} (ADDVconst [off2] ptr) mem)
  4477  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  4478  	// result: (MOVVstorezero [off1+int32(off2)] {sym} ptr mem)
  4479  	for {
  4480  		off1 := auxIntToInt32(v.AuxInt)
  4481  		sym := auxToSym(v.Aux)
  4482  		if v_0.Op != OpLOONG64ADDVconst {
  4483  			break
  4484  		}
  4485  		off2 := auxIntToInt64(v_0.AuxInt)
  4486  		ptr := v_0.Args[0]
  4487  		mem := v_1
  4488  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4489  			break
  4490  		}
  4491  		v.reset(OpLOONG64MOVVstorezero)
  4492  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4493  		v.Aux = symToAux(sym)
  4494  		v.AddArg2(ptr, mem)
  4495  		return true
  4496  	}
  4497  	// match: (MOVVstorezero [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4498  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  4499  	// result: (MOVVstorezero [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  4500  	for {
  4501  		off1 := auxIntToInt32(v.AuxInt)
  4502  		sym1 := auxToSym(v.Aux)
  4503  		if v_0.Op != OpLOONG64MOVVaddr {
  4504  			break
  4505  		}
  4506  		off2 := auxIntToInt32(v_0.AuxInt)
  4507  		sym2 := auxToSym(v_0.Aux)
  4508  		ptr := v_0.Args[0]
  4509  		mem := v_1
  4510  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4511  			break
  4512  		}
  4513  		v.reset(OpLOONG64MOVVstorezero)
  4514  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4515  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4516  		v.AddArg2(ptr, mem)
  4517  		return true
  4518  	}
  4519  	// match: (MOVVstorezero [off] {sym} (ADDV ptr idx) mem)
  4520  	// cond: off == 0 && sym == nil
  4521  	// result: (MOVVstorezeroidx ptr idx mem)
  4522  	for {
  4523  		off := auxIntToInt32(v.AuxInt)
  4524  		sym := auxToSym(v.Aux)
  4525  		if v_0.Op != OpLOONG64ADDV {
  4526  			break
  4527  		}
  4528  		idx := v_0.Args[1]
  4529  		ptr := v_0.Args[0]
  4530  		mem := v_1
  4531  		if !(off == 0 && sym == nil) {
  4532  			break
  4533  		}
  4534  		v.reset(OpLOONG64MOVVstorezeroidx)
  4535  		v.AddArg3(ptr, idx, mem)
  4536  		return true
  4537  	}
  4538  	return false
  4539  }
  4540  func rewriteValueLOONG64_OpLOONG64MOVVstorezeroidx(v *Value) bool {
  4541  	v_2 := v.Args[2]
  4542  	v_1 := v.Args[1]
  4543  	v_0 := v.Args[0]
  4544  	// match: (MOVVstorezeroidx ptr (MOVVconst [c]) mem)
  4545  	// cond: is32Bit(c)
  4546  	// result: (MOVVstorezero [int32(c)] ptr mem)
  4547  	for {
  4548  		ptr := v_0
  4549  		if v_1.Op != OpLOONG64MOVVconst {
  4550  			break
  4551  		}
  4552  		c := auxIntToInt64(v_1.AuxInt)
  4553  		mem := v_2
  4554  		if !(is32Bit(c)) {
  4555  			break
  4556  		}
  4557  		v.reset(OpLOONG64MOVVstorezero)
  4558  		v.AuxInt = int32ToAuxInt(int32(c))
  4559  		v.AddArg2(ptr, mem)
  4560  		return true
  4561  	}
  4562  	// match: (MOVVstorezeroidx (MOVVconst [c]) idx mem)
  4563  	// cond: is32Bit(c)
  4564  	// result: (MOVVstorezero [int32(c)] idx mem)
  4565  	for {
  4566  		if v_0.Op != OpLOONG64MOVVconst {
  4567  			break
  4568  		}
  4569  		c := auxIntToInt64(v_0.AuxInt)
  4570  		idx := v_1
  4571  		mem := v_2
  4572  		if !(is32Bit(c)) {
  4573  			break
  4574  		}
  4575  		v.reset(OpLOONG64MOVVstorezero)
  4576  		v.AuxInt = int32ToAuxInt(int32(c))
  4577  		v.AddArg2(idx, mem)
  4578  		return true
  4579  	}
  4580  	return false
  4581  }
  4582  func rewriteValueLOONG64_OpLOONG64MOVWUload(v *Value) bool {
  4583  	v_1 := v.Args[1]
  4584  	v_0 := v.Args[0]
  4585  	b := v.Block
  4586  	config := b.Func.Config
  4587  	typ := &b.Func.Config.Types
  4588  	// match: (MOVWUload [off] {sym} ptr (MOVFstore [off] {sym} ptr val _))
  4589  	// result: (ZeroExt32to64 (MOVWfpgp <typ.Float32> val))
  4590  	for {
  4591  		off := auxIntToInt32(v.AuxInt)
  4592  		sym := auxToSym(v.Aux)
  4593  		ptr := v_0
  4594  		if v_1.Op != OpLOONG64MOVFstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  4595  			break
  4596  		}
  4597  		val := v_1.Args[1]
  4598  		if ptr != v_1.Args[0] {
  4599  			break
  4600  		}
  4601  		v.reset(OpZeroExt32to64)
  4602  		v0 := b.NewValue0(v_1.Pos, OpLOONG64MOVWfpgp, typ.Float32)
  4603  		v0.AddArg(val)
  4604  		v.AddArg(v0)
  4605  		return true
  4606  	}
  4607  	// match: (MOVWUload [off1] {sym} (ADDVconst [off2] ptr) mem)
  4608  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  4609  	// result: (MOVWUload [off1+int32(off2)] {sym} ptr mem)
  4610  	for {
  4611  		off1 := auxIntToInt32(v.AuxInt)
  4612  		sym := auxToSym(v.Aux)
  4613  		if v_0.Op != OpLOONG64ADDVconst {
  4614  			break
  4615  		}
  4616  		off2 := auxIntToInt64(v_0.AuxInt)
  4617  		ptr := v_0.Args[0]
  4618  		mem := v_1
  4619  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4620  			break
  4621  		}
  4622  		v.reset(OpLOONG64MOVWUload)
  4623  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4624  		v.Aux = symToAux(sym)
  4625  		v.AddArg2(ptr, mem)
  4626  		return true
  4627  	}
  4628  	// match: (MOVWUload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4629  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  4630  	// result: (MOVWUload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  4631  	for {
  4632  		off1 := auxIntToInt32(v.AuxInt)
  4633  		sym1 := auxToSym(v.Aux)
  4634  		if v_0.Op != OpLOONG64MOVVaddr {
  4635  			break
  4636  		}
  4637  		off2 := auxIntToInt32(v_0.AuxInt)
  4638  		sym2 := auxToSym(v_0.Aux)
  4639  		ptr := v_0.Args[0]
  4640  		mem := v_1
  4641  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4642  			break
  4643  		}
  4644  		v.reset(OpLOONG64MOVWUload)
  4645  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4646  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4647  		v.AddArg2(ptr, mem)
  4648  		return true
  4649  	}
  4650  	// match: (MOVWUload [off] {sym} (ADDV ptr idx) mem)
  4651  	// cond: off == 0 && sym == nil
  4652  	// result: (MOVWUloadidx ptr idx mem)
  4653  	for {
  4654  		off := auxIntToInt32(v.AuxInt)
  4655  		sym := auxToSym(v.Aux)
  4656  		if v_0.Op != OpLOONG64ADDV {
  4657  			break
  4658  		}
  4659  		idx := v_0.Args[1]
  4660  		ptr := v_0.Args[0]
  4661  		mem := v_1
  4662  		if !(off == 0 && sym == nil) {
  4663  			break
  4664  		}
  4665  		v.reset(OpLOONG64MOVWUloadidx)
  4666  		v.AddArg3(ptr, idx, mem)
  4667  		return true
  4668  	}
  4669  	return false
  4670  }
  4671  func rewriteValueLOONG64_OpLOONG64MOVWUloadidx(v *Value) bool {
  4672  	v_2 := v.Args[2]
  4673  	v_1 := v.Args[1]
  4674  	v_0 := v.Args[0]
  4675  	// match: (MOVWUloadidx ptr (MOVVconst [c]) mem)
  4676  	// cond: is32Bit(c)
  4677  	// result: (MOVWUload [int32(c)] ptr mem)
  4678  	for {
  4679  		ptr := v_0
  4680  		if v_1.Op != OpLOONG64MOVVconst {
  4681  			break
  4682  		}
  4683  		c := auxIntToInt64(v_1.AuxInt)
  4684  		mem := v_2
  4685  		if !(is32Bit(c)) {
  4686  			break
  4687  		}
  4688  		v.reset(OpLOONG64MOVWUload)
  4689  		v.AuxInt = int32ToAuxInt(int32(c))
  4690  		v.AddArg2(ptr, mem)
  4691  		return true
  4692  	}
  4693  	// match: (MOVWUloadidx (MOVVconst [c]) ptr mem)
  4694  	// cond: is32Bit(c)
  4695  	// result: (MOVWUload [int32(c)] ptr mem)
  4696  	for {
  4697  		if v_0.Op != OpLOONG64MOVVconst {
  4698  			break
  4699  		}
  4700  		c := auxIntToInt64(v_0.AuxInt)
  4701  		ptr := v_1
  4702  		mem := v_2
  4703  		if !(is32Bit(c)) {
  4704  			break
  4705  		}
  4706  		v.reset(OpLOONG64MOVWUload)
  4707  		v.AuxInt = int32ToAuxInt(int32(c))
  4708  		v.AddArg2(ptr, mem)
  4709  		return true
  4710  	}
  4711  	return false
  4712  }
  4713  func rewriteValueLOONG64_OpLOONG64MOVWUreg(v *Value) bool {
  4714  	v_0 := v.Args[0]
  4715  	// match: (MOVWUreg (SRLVconst [rc] x))
  4716  	// cond: rc < 32
  4717  	// result: (BSTRPICKV [rc + (31+rc)<<6] x)
  4718  	for {
  4719  		if v_0.Op != OpLOONG64SRLVconst {
  4720  			break
  4721  		}
  4722  		rc := auxIntToInt64(v_0.AuxInt)
  4723  		x := v_0.Args[0]
  4724  		if !(rc < 32) {
  4725  			break
  4726  		}
  4727  		v.reset(OpLOONG64BSTRPICKV)
  4728  		v.AuxInt = int64ToAuxInt(rc + (31+rc)<<6)
  4729  		v.AddArg(x)
  4730  		return true
  4731  	}
  4732  	// match: (MOVWUreg x:(MOVBUload _ _))
  4733  	// result: (MOVVreg x)
  4734  	for {
  4735  		x := v_0
  4736  		if x.Op != OpLOONG64MOVBUload {
  4737  			break
  4738  		}
  4739  		v.reset(OpLOONG64MOVVreg)
  4740  		v.AddArg(x)
  4741  		return true
  4742  	}
  4743  	// match: (MOVWUreg x:(MOVHUload _ _))
  4744  	// result: (MOVVreg x)
  4745  	for {
  4746  		x := v_0
  4747  		if x.Op != OpLOONG64MOVHUload {
  4748  			break
  4749  		}
  4750  		v.reset(OpLOONG64MOVVreg)
  4751  		v.AddArg(x)
  4752  		return true
  4753  	}
  4754  	// match: (MOVWUreg x:(MOVWUload _ _))
  4755  	// result: (MOVVreg x)
  4756  	for {
  4757  		x := v_0
  4758  		if x.Op != OpLOONG64MOVWUload {
  4759  			break
  4760  		}
  4761  		v.reset(OpLOONG64MOVVreg)
  4762  		v.AddArg(x)
  4763  		return true
  4764  	}
  4765  	// match: (MOVWUreg x:(MOVBUreg _))
  4766  	// result: (MOVVreg x)
  4767  	for {
  4768  		x := v_0
  4769  		if x.Op != OpLOONG64MOVBUreg {
  4770  			break
  4771  		}
  4772  		v.reset(OpLOONG64MOVVreg)
  4773  		v.AddArg(x)
  4774  		return true
  4775  	}
  4776  	// match: (MOVWUreg x:(MOVHUreg _))
  4777  	// result: (MOVVreg x)
  4778  	for {
  4779  		x := v_0
  4780  		if x.Op != OpLOONG64MOVHUreg {
  4781  			break
  4782  		}
  4783  		v.reset(OpLOONG64MOVVreg)
  4784  		v.AddArg(x)
  4785  		return true
  4786  	}
  4787  	// match: (MOVWUreg x:(MOVWUreg _))
  4788  	// result: (MOVVreg x)
  4789  	for {
  4790  		x := v_0
  4791  		if x.Op != OpLOONG64MOVWUreg {
  4792  			break
  4793  		}
  4794  		v.reset(OpLOONG64MOVVreg)
  4795  		v.AddArg(x)
  4796  		return true
  4797  	}
  4798  	// match: (MOVWUreg (SLLVconst [lc] x))
  4799  	// cond: lc >= 32
  4800  	// result: (MOVVconst [0])
  4801  	for {
  4802  		if v_0.Op != OpLOONG64SLLVconst {
  4803  			break
  4804  		}
  4805  		lc := auxIntToInt64(v_0.AuxInt)
  4806  		if !(lc >= 32) {
  4807  			break
  4808  		}
  4809  		v.reset(OpLOONG64MOVVconst)
  4810  		v.AuxInt = int64ToAuxInt(0)
  4811  		return true
  4812  	}
  4813  	// match: (MOVWUreg (MOVVconst [c]))
  4814  	// result: (MOVVconst [int64(uint32(c))])
  4815  	for {
  4816  		if v_0.Op != OpLOONG64MOVVconst {
  4817  			break
  4818  		}
  4819  		c := auxIntToInt64(v_0.AuxInt)
  4820  		v.reset(OpLOONG64MOVVconst)
  4821  		v.AuxInt = int64ToAuxInt(int64(uint32(c)))
  4822  		return true
  4823  	}
  4824  	return false
  4825  }
  4826  func rewriteValueLOONG64_OpLOONG64MOVWload(v *Value) bool {
  4827  	v_1 := v.Args[1]
  4828  	v_0 := v.Args[0]
  4829  	b := v.Block
  4830  	config := b.Func.Config
  4831  	// match: (MOVWload [off1] {sym} (ADDVconst [off2] ptr) mem)
  4832  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  4833  	// result: (MOVWload [off1+int32(off2)] {sym} ptr mem)
  4834  	for {
  4835  		off1 := auxIntToInt32(v.AuxInt)
  4836  		sym := auxToSym(v.Aux)
  4837  		if v_0.Op != OpLOONG64ADDVconst {
  4838  			break
  4839  		}
  4840  		off2 := auxIntToInt64(v_0.AuxInt)
  4841  		ptr := v_0.Args[0]
  4842  		mem := v_1
  4843  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4844  			break
  4845  		}
  4846  		v.reset(OpLOONG64MOVWload)
  4847  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4848  		v.Aux = symToAux(sym)
  4849  		v.AddArg2(ptr, mem)
  4850  		return true
  4851  	}
  4852  	// match: (MOVWload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4853  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  4854  	// result: (MOVWload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  4855  	for {
  4856  		off1 := auxIntToInt32(v.AuxInt)
  4857  		sym1 := auxToSym(v.Aux)
  4858  		if v_0.Op != OpLOONG64MOVVaddr {
  4859  			break
  4860  		}
  4861  		off2 := auxIntToInt32(v_0.AuxInt)
  4862  		sym2 := auxToSym(v_0.Aux)
  4863  		ptr := v_0.Args[0]
  4864  		mem := v_1
  4865  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4866  			break
  4867  		}
  4868  		v.reset(OpLOONG64MOVWload)
  4869  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4870  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4871  		v.AddArg2(ptr, mem)
  4872  		return true
  4873  	}
  4874  	// match: (MOVWload [off] {sym} (ADDV ptr idx) mem)
  4875  	// cond: off == 0 && sym == nil
  4876  	// result: (MOVWloadidx ptr idx mem)
  4877  	for {
  4878  		off := auxIntToInt32(v.AuxInt)
  4879  		sym := auxToSym(v.Aux)
  4880  		if v_0.Op != OpLOONG64ADDV {
  4881  			break
  4882  		}
  4883  		idx := v_0.Args[1]
  4884  		ptr := v_0.Args[0]
  4885  		mem := v_1
  4886  		if !(off == 0 && sym == nil) {
  4887  			break
  4888  		}
  4889  		v.reset(OpLOONG64MOVWloadidx)
  4890  		v.AddArg3(ptr, idx, mem)
  4891  		return true
  4892  	}
  4893  	return false
  4894  }
  4895  func rewriteValueLOONG64_OpLOONG64MOVWloadidx(v *Value) bool {
  4896  	v_2 := v.Args[2]
  4897  	v_1 := v.Args[1]
  4898  	v_0 := v.Args[0]
  4899  	// match: (MOVWloadidx ptr (MOVVconst [c]) mem)
  4900  	// cond: is32Bit(c)
  4901  	// result: (MOVWload [int32(c)] ptr mem)
  4902  	for {
  4903  		ptr := v_0
  4904  		if v_1.Op != OpLOONG64MOVVconst {
  4905  			break
  4906  		}
  4907  		c := auxIntToInt64(v_1.AuxInt)
  4908  		mem := v_2
  4909  		if !(is32Bit(c)) {
  4910  			break
  4911  		}
  4912  		v.reset(OpLOONG64MOVWload)
  4913  		v.AuxInt = int32ToAuxInt(int32(c))
  4914  		v.AddArg2(ptr, mem)
  4915  		return true
  4916  	}
  4917  	// match: (MOVWloadidx (MOVVconst [c]) ptr mem)
  4918  	// cond: is32Bit(c)
  4919  	// result: (MOVWload [int32(c)] ptr mem)
  4920  	for {
  4921  		if v_0.Op != OpLOONG64MOVVconst {
  4922  			break
  4923  		}
  4924  		c := auxIntToInt64(v_0.AuxInt)
  4925  		ptr := v_1
  4926  		mem := v_2
  4927  		if !(is32Bit(c)) {
  4928  			break
  4929  		}
  4930  		v.reset(OpLOONG64MOVWload)
  4931  		v.AuxInt = int32ToAuxInt(int32(c))
  4932  		v.AddArg2(ptr, mem)
  4933  		return true
  4934  	}
  4935  	return false
  4936  }
  4937  func rewriteValueLOONG64_OpLOONG64MOVWreg(v *Value) bool {
  4938  	v_0 := v.Args[0]
  4939  	// match: (MOVWreg x:(MOVBload _ _))
  4940  	// result: (MOVVreg x)
  4941  	for {
  4942  		x := v_0
  4943  		if x.Op != OpLOONG64MOVBload {
  4944  			break
  4945  		}
  4946  		v.reset(OpLOONG64MOVVreg)
  4947  		v.AddArg(x)
  4948  		return true
  4949  	}
  4950  	// match: (MOVWreg x:(MOVBUload _ _))
  4951  	// result: (MOVVreg x)
  4952  	for {
  4953  		x := v_0
  4954  		if x.Op != OpLOONG64MOVBUload {
  4955  			break
  4956  		}
  4957  		v.reset(OpLOONG64MOVVreg)
  4958  		v.AddArg(x)
  4959  		return true
  4960  	}
  4961  	// match: (MOVWreg x:(MOVHload _ _))
  4962  	// result: (MOVVreg x)
  4963  	for {
  4964  		x := v_0
  4965  		if x.Op != OpLOONG64MOVHload {
  4966  			break
  4967  		}
  4968  		v.reset(OpLOONG64MOVVreg)
  4969  		v.AddArg(x)
  4970  		return true
  4971  	}
  4972  	// match: (MOVWreg x:(MOVHUload _ _))
  4973  	// result: (MOVVreg x)
  4974  	for {
  4975  		x := v_0
  4976  		if x.Op != OpLOONG64MOVHUload {
  4977  			break
  4978  		}
  4979  		v.reset(OpLOONG64MOVVreg)
  4980  		v.AddArg(x)
  4981  		return true
  4982  	}
  4983  	// match: (MOVWreg x:(MOVWload _ _))
  4984  	// result: (MOVVreg x)
  4985  	for {
  4986  		x := v_0
  4987  		if x.Op != OpLOONG64MOVWload {
  4988  			break
  4989  		}
  4990  		v.reset(OpLOONG64MOVVreg)
  4991  		v.AddArg(x)
  4992  		return true
  4993  	}
  4994  	// match: (MOVWreg x:(MOVBreg _))
  4995  	// result: (MOVVreg x)
  4996  	for {
  4997  		x := v_0
  4998  		if x.Op != OpLOONG64MOVBreg {
  4999  			break
  5000  		}
  5001  		v.reset(OpLOONG64MOVVreg)
  5002  		v.AddArg(x)
  5003  		return true
  5004  	}
  5005  	// match: (MOVWreg x:(MOVBUreg _))
  5006  	// result: (MOVVreg x)
  5007  	for {
  5008  		x := v_0
  5009  		if x.Op != OpLOONG64MOVBUreg {
  5010  			break
  5011  		}
  5012  		v.reset(OpLOONG64MOVVreg)
  5013  		v.AddArg(x)
  5014  		return true
  5015  	}
  5016  	// match: (MOVWreg x:(MOVHreg _))
  5017  	// result: (MOVVreg x)
  5018  	for {
  5019  		x := v_0
  5020  		if x.Op != OpLOONG64MOVHreg {
  5021  			break
  5022  		}
  5023  		v.reset(OpLOONG64MOVVreg)
  5024  		v.AddArg(x)
  5025  		return true
  5026  	}
  5027  	// match: (MOVWreg x:(MOVWreg _))
  5028  	// result: (MOVVreg x)
  5029  	for {
  5030  		x := v_0
  5031  		if x.Op != OpLOONG64MOVWreg {
  5032  			break
  5033  		}
  5034  		v.reset(OpLOONG64MOVVreg)
  5035  		v.AddArg(x)
  5036  		return true
  5037  	}
  5038  	// match: (MOVWreg (MOVVconst [c]))
  5039  	// result: (MOVVconst [int64(int32(c))])
  5040  	for {
  5041  		if v_0.Op != OpLOONG64MOVVconst {
  5042  			break
  5043  		}
  5044  		c := auxIntToInt64(v_0.AuxInt)
  5045  		v.reset(OpLOONG64MOVVconst)
  5046  		v.AuxInt = int64ToAuxInt(int64(int32(c)))
  5047  		return true
  5048  	}
  5049  	return false
  5050  }
  5051  func rewriteValueLOONG64_OpLOONG64MOVWstore(v *Value) bool {
  5052  	v_2 := v.Args[2]
  5053  	v_1 := v.Args[1]
  5054  	v_0 := v.Args[0]
  5055  	b := v.Block
  5056  	config := b.Func.Config
  5057  	// match: (MOVWstore [off] {sym} ptr (MOVWfpgp val) mem)
  5058  	// result: (MOVFstore [off] {sym} ptr val mem)
  5059  	for {
  5060  		off := auxIntToInt32(v.AuxInt)
  5061  		sym := auxToSym(v.Aux)
  5062  		ptr := v_0
  5063  		if v_1.Op != OpLOONG64MOVWfpgp {
  5064  			break
  5065  		}
  5066  		val := v_1.Args[0]
  5067  		mem := v_2
  5068  		v.reset(OpLOONG64MOVFstore)
  5069  		v.AuxInt = int32ToAuxInt(off)
  5070  		v.Aux = symToAux(sym)
  5071  		v.AddArg3(ptr, val, mem)
  5072  		return true
  5073  	}
  5074  	// match: (MOVWstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  5075  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  5076  	// result: (MOVWstore [off1+int32(off2)] {sym} ptr val mem)
  5077  	for {
  5078  		off1 := auxIntToInt32(v.AuxInt)
  5079  		sym := auxToSym(v.Aux)
  5080  		if v_0.Op != OpLOONG64ADDVconst {
  5081  			break
  5082  		}
  5083  		off2 := auxIntToInt64(v_0.AuxInt)
  5084  		ptr := v_0.Args[0]
  5085  		val := v_1
  5086  		mem := v_2
  5087  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  5088  			break
  5089  		}
  5090  		v.reset(OpLOONG64MOVWstore)
  5091  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5092  		v.Aux = symToAux(sym)
  5093  		v.AddArg3(ptr, val, mem)
  5094  		return true
  5095  	}
  5096  	// match: (MOVWstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  5097  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  5098  	// result: (MOVWstore [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr val mem)
  5099  	for {
  5100  		off1 := auxIntToInt32(v.AuxInt)
  5101  		sym1 := auxToSym(v.Aux)
  5102  		if v_0.Op != OpLOONG64MOVVaddr {
  5103  			break
  5104  		}
  5105  		off2 := auxIntToInt32(v_0.AuxInt)
  5106  		sym2 := auxToSym(v_0.Aux)
  5107  		ptr := v_0.Args[0]
  5108  		val := v_1
  5109  		mem := v_2
  5110  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  5111  			break
  5112  		}
  5113  		v.reset(OpLOONG64MOVWstore)
  5114  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5115  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5116  		v.AddArg3(ptr, val, mem)
  5117  		return true
  5118  	}
  5119  	// match: (MOVWstore [off] {sym} ptr (MOVWreg x) mem)
  5120  	// result: (MOVWstore [off] {sym} ptr x mem)
  5121  	for {
  5122  		off := auxIntToInt32(v.AuxInt)
  5123  		sym := auxToSym(v.Aux)
  5124  		ptr := v_0
  5125  		if v_1.Op != OpLOONG64MOVWreg {
  5126  			break
  5127  		}
  5128  		x := v_1.Args[0]
  5129  		mem := v_2
  5130  		v.reset(OpLOONG64MOVWstore)
  5131  		v.AuxInt = int32ToAuxInt(off)
  5132  		v.Aux = symToAux(sym)
  5133  		v.AddArg3(ptr, x, mem)
  5134  		return true
  5135  	}
  5136  	// match: (MOVWstore [off] {sym} ptr (MOVWUreg x) mem)
  5137  	// result: (MOVWstore [off] {sym} ptr x mem)
  5138  	for {
  5139  		off := auxIntToInt32(v.AuxInt)
  5140  		sym := auxToSym(v.Aux)
  5141  		ptr := v_0
  5142  		if v_1.Op != OpLOONG64MOVWUreg {
  5143  			break
  5144  		}
  5145  		x := v_1.Args[0]
  5146  		mem := v_2
  5147  		v.reset(OpLOONG64MOVWstore)
  5148  		v.AuxInt = int32ToAuxInt(off)
  5149  		v.Aux = symToAux(sym)
  5150  		v.AddArg3(ptr, x, mem)
  5151  		return true
  5152  	}
  5153  	// match: (MOVWstore [off] {sym} ptr (MOVVconst [0]) mem)
  5154  	// result: (MOVWstorezero [off] {sym} ptr mem)
  5155  	for {
  5156  		off := auxIntToInt32(v.AuxInt)
  5157  		sym := auxToSym(v.Aux)
  5158  		ptr := v_0
  5159  		if v_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
  5160  			break
  5161  		}
  5162  		mem := v_2
  5163  		v.reset(OpLOONG64MOVWstorezero)
  5164  		v.AuxInt = int32ToAuxInt(off)
  5165  		v.Aux = symToAux(sym)
  5166  		v.AddArg2(ptr, mem)
  5167  		return true
  5168  	}
  5169  	// match: (MOVWstore [off] {sym} (ADDV ptr idx) val mem)
  5170  	// cond: off == 0 && sym == nil
  5171  	// result: (MOVWstoreidx ptr idx val mem)
  5172  	for {
  5173  		off := auxIntToInt32(v.AuxInt)
  5174  		sym := auxToSym(v.Aux)
  5175  		if v_0.Op != OpLOONG64ADDV {
  5176  			break
  5177  		}
  5178  		idx := v_0.Args[1]
  5179  		ptr := v_0.Args[0]
  5180  		val := v_1
  5181  		mem := v_2
  5182  		if !(off == 0 && sym == nil) {
  5183  			break
  5184  		}
  5185  		v.reset(OpLOONG64MOVWstoreidx)
  5186  		v.AddArg4(ptr, idx, val, mem)
  5187  		return true
  5188  	}
  5189  	return false
  5190  }
  5191  func rewriteValueLOONG64_OpLOONG64MOVWstoreidx(v *Value) bool {
  5192  	v_3 := v.Args[3]
  5193  	v_2 := v.Args[2]
  5194  	v_1 := v.Args[1]
  5195  	v_0 := v.Args[0]
  5196  	// match: (MOVWstoreidx ptr (MOVVconst [c]) val mem)
  5197  	// cond: is32Bit(c)
  5198  	// result: (MOVWstore [int32(c)] ptr val mem)
  5199  	for {
  5200  		ptr := v_0
  5201  		if v_1.Op != OpLOONG64MOVVconst {
  5202  			break
  5203  		}
  5204  		c := auxIntToInt64(v_1.AuxInt)
  5205  		val := v_2
  5206  		mem := v_3
  5207  		if !(is32Bit(c)) {
  5208  			break
  5209  		}
  5210  		v.reset(OpLOONG64MOVWstore)
  5211  		v.AuxInt = int32ToAuxInt(int32(c))
  5212  		v.AddArg3(ptr, val, mem)
  5213  		return true
  5214  	}
  5215  	// match: (MOVWstoreidx (MOVVconst [c]) idx val mem)
  5216  	// cond: is32Bit(c)
  5217  	// result: (MOVWstore [int32(c)] idx val mem)
  5218  	for {
  5219  		if v_0.Op != OpLOONG64MOVVconst {
  5220  			break
  5221  		}
  5222  		c := auxIntToInt64(v_0.AuxInt)
  5223  		idx := v_1
  5224  		val := v_2
  5225  		mem := v_3
  5226  		if !(is32Bit(c)) {
  5227  			break
  5228  		}
  5229  		v.reset(OpLOONG64MOVWstore)
  5230  		v.AuxInt = int32ToAuxInt(int32(c))
  5231  		v.AddArg3(idx, val, mem)
  5232  		return true
  5233  	}
  5234  	// match: (MOVWstoreidx ptr idx (MOVVconst [0]) mem)
  5235  	// result: (MOVWstorezeroidx ptr idx mem)
  5236  	for {
  5237  		ptr := v_0
  5238  		idx := v_1
  5239  		if v_2.Op != OpLOONG64MOVVconst || auxIntToInt64(v_2.AuxInt) != 0 {
  5240  			break
  5241  		}
  5242  		mem := v_3
  5243  		v.reset(OpLOONG64MOVWstorezeroidx)
  5244  		v.AddArg3(ptr, idx, mem)
  5245  		return true
  5246  	}
  5247  	return false
  5248  }
  5249  func rewriteValueLOONG64_OpLOONG64MOVWstorezero(v *Value) bool {
  5250  	v_1 := v.Args[1]
  5251  	v_0 := v.Args[0]
  5252  	b := v.Block
  5253  	config := b.Func.Config
  5254  	// match: (MOVWstorezero [off1] {sym} (ADDVconst [off2] ptr) mem)
  5255  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  5256  	// result: (MOVWstorezero [off1+int32(off2)] {sym} ptr mem)
  5257  	for {
  5258  		off1 := auxIntToInt32(v.AuxInt)
  5259  		sym := auxToSym(v.Aux)
  5260  		if v_0.Op != OpLOONG64ADDVconst {
  5261  			break
  5262  		}
  5263  		off2 := auxIntToInt64(v_0.AuxInt)
  5264  		ptr := v_0.Args[0]
  5265  		mem := v_1
  5266  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  5267  			break
  5268  		}
  5269  		v.reset(OpLOONG64MOVWstorezero)
  5270  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5271  		v.Aux = symToAux(sym)
  5272  		v.AddArg2(ptr, mem)
  5273  		return true
  5274  	}
  5275  	// match: (MOVWstorezero [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  5276  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  5277  	// result: (MOVWstorezero [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  5278  	for {
  5279  		off1 := auxIntToInt32(v.AuxInt)
  5280  		sym1 := auxToSym(v.Aux)
  5281  		if v_0.Op != OpLOONG64MOVVaddr {
  5282  			break
  5283  		}
  5284  		off2 := auxIntToInt32(v_0.AuxInt)
  5285  		sym2 := auxToSym(v_0.Aux)
  5286  		ptr := v_0.Args[0]
  5287  		mem := v_1
  5288  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  5289  			break
  5290  		}
  5291  		v.reset(OpLOONG64MOVWstorezero)
  5292  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5293  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5294  		v.AddArg2(ptr, mem)
  5295  		return true
  5296  	}
  5297  	// match: (MOVWstorezero [off] {sym} (ADDV ptr idx) mem)
  5298  	// cond: off == 0 && sym == nil
  5299  	// result: (MOVWstorezeroidx ptr idx mem)
  5300  	for {
  5301  		off := auxIntToInt32(v.AuxInt)
  5302  		sym := auxToSym(v.Aux)
  5303  		if v_0.Op != OpLOONG64ADDV {
  5304  			break
  5305  		}
  5306  		idx := v_0.Args[1]
  5307  		ptr := v_0.Args[0]
  5308  		mem := v_1
  5309  		if !(off == 0 && sym == nil) {
  5310  			break
  5311  		}
  5312  		v.reset(OpLOONG64MOVWstorezeroidx)
  5313  		v.AddArg3(ptr, idx, mem)
  5314  		return true
  5315  	}
  5316  	return false
  5317  }
  5318  func rewriteValueLOONG64_OpLOONG64MOVWstorezeroidx(v *Value) bool {
  5319  	v_2 := v.Args[2]
  5320  	v_1 := v.Args[1]
  5321  	v_0 := v.Args[0]
  5322  	// match: (MOVWstorezeroidx ptr (MOVVconst [c]) mem)
  5323  	// cond: is32Bit(c)
  5324  	// result: (MOVWstorezero [int32(c)] ptr mem)
  5325  	for {
  5326  		ptr := v_0
  5327  		if v_1.Op != OpLOONG64MOVVconst {
  5328  			break
  5329  		}
  5330  		c := auxIntToInt64(v_1.AuxInt)
  5331  		mem := v_2
  5332  		if !(is32Bit(c)) {
  5333  			break
  5334  		}
  5335  		v.reset(OpLOONG64MOVWstorezero)
  5336  		v.AuxInt = int32ToAuxInt(int32(c))
  5337  		v.AddArg2(ptr, mem)
  5338  		return true
  5339  	}
  5340  	// match: (MOVWstorezeroidx (MOVVconst [c]) idx mem)
  5341  	// cond: is32Bit(c)
  5342  	// result: (MOVWstorezero [int32(c)] idx mem)
  5343  	for {
  5344  		if v_0.Op != OpLOONG64MOVVconst {
  5345  			break
  5346  		}
  5347  		c := auxIntToInt64(v_0.AuxInt)
  5348  		idx := v_1
  5349  		mem := v_2
  5350  		if !(is32Bit(c)) {
  5351  			break
  5352  		}
  5353  		v.reset(OpLOONG64MOVWstorezero)
  5354  		v.AuxInt = int32ToAuxInt(int32(c))
  5355  		v.AddArg2(idx, mem)
  5356  		return true
  5357  	}
  5358  	return false
  5359  }
  5360  func rewriteValueLOONG64_OpLOONG64MULV(v *Value) bool {
  5361  	v_1 := v.Args[1]
  5362  	v_0 := v.Args[0]
  5363  	// match: (MULV x (MOVVconst [-1]))
  5364  	// result: (NEGV x)
  5365  	for {
  5366  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  5367  			x := v_0
  5368  			if v_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_1.AuxInt) != -1 {
  5369  				continue
  5370  			}
  5371  			v.reset(OpLOONG64NEGV)
  5372  			v.AddArg(x)
  5373  			return true
  5374  		}
  5375  		break
  5376  	}
  5377  	// match: (MULV _ (MOVVconst [0]))
  5378  	// result: (MOVVconst [0])
  5379  	for {
  5380  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  5381  			if v_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
  5382  				continue
  5383  			}
  5384  			v.reset(OpLOONG64MOVVconst)
  5385  			v.AuxInt = int64ToAuxInt(0)
  5386  			return true
  5387  		}
  5388  		break
  5389  	}
  5390  	// match: (MULV x (MOVVconst [1]))
  5391  	// result: x
  5392  	for {
  5393  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  5394  			x := v_0
  5395  			if v_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_1.AuxInt) != 1 {
  5396  				continue
  5397  			}
  5398  			v.copyOf(x)
  5399  			return true
  5400  		}
  5401  		break
  5402  	}
  5403  	// match: (MULV x (MOVVconst [c]))
  5404  	// cond: isPowerOfTwo(c)
  5405  	// result: (SLLVconst [log64(c)] x)
  5406  	for {
  5407  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  5408  			x := v_0
  5409  			if v_1.Op != OpLOONG64MOVVconst {
  5410  				continue
  5411  			}
  5412  			c := auxIntToInt64(v_1.AuxInt)
  5413  			if !(isPowerOfTwo(c)) {
  5414  				continue
  5415  			}
  5416  			v.reset(OpLOONG64SLLVconst)
  5417  			v.AuxInt = int64ToAuxInt(log64(c))
  5418  			v.AddArg(x)
  5419  			return true
  5420  		}
  5421  		break
  5422  	}
  5423  	// match: (MULV (MOVVconst [c]) (MOVVconst [d]))
  5424  	// result: (MOVVconst [c*d])
  5425  	for {
  5426  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  5427  			if v_0.Op != OpLOONG64MOVVconst {
  5428  				continue
  5429  			}
  5430  			c := auxIntToInt64(v_0.AuxInt)
  5431  			if v_1.Op != OpLOONG64MOVVconst {
  5432  				continue
  5433  			}
  5434  			d := auxIntToInt64(v_1.AuxInt)
  5435  			v.reset(OpLOONG64MOVVconst)
  5436  			v.AuxInt = int64ToAuxInt(c * d)
  5437  			return true
  5438  		}
  5439  		break
  5440  	}
  5441  	return false
  5442  }
  5443  func rewriteValueLOONG64_OpLOONG64NEGV(v *Value) bool {
  5444  	v_0 := v.Args[0]
  5445  	b := v.Block
  5446  	// match: (NEGV (SUBV x y))
  5447  	// result: (SUBV y x)
  5448  	for {
  5449  		if v_0.Op != OpLOONG64SUBV {
  5450  			break
  5451  		}
  5452  		y := v_0.Args[1]
  5453  		x := v_0.Args[0]
  5454  		v.reset(OpLOONG64SUBV)
  5455  		v.AddArg2(y, x)
  5456  		return true
  5457  	}
  5458  	// match: (NEGV <t> s:(ADDVconst [c] (SUBV x y)))
  5459  	// cond: s.Uses == 1 && is12Bit(-c)
  5460  	// result: (ADDVconst [-c] (SUBV <t> y x))
  5461  	for {
  5462  		t := v.Type
  5463  		s := v_0
  5464  		if s.Op != OpLOONG64ADDVconst {
  5465  			break
  5466  		}
  5467  		c := auxIntToInt64(s.AuxInt)
  5468  		s_0 := s.Args[0]
  5469  		if s_0.Op != OpLOONG64SUBV {
  5470  			break
  5471  		}
  5472  		y := s_0.Args[1]
  5473  		x := s_0.Args[0]
  5474  		if !(s.Uses == 1 && is12Bit(-c)) {
  5475  			break
  5476  		}
  5477  		v.reset(OpLOONG64ADDVconst)
  5478  		v.AuxInt = int64ToAuxInt(-c)
  5479  		v0 := b.NewValue0(v.Pos, OpLOONG64SUBV, t)
  5480  		v0.AddArg2(y, x)
  5481  		v.AddArg(v0)
  5482  		return true
  5483  	}
  5484  	// match: (NEGV (NEGV x))
  5485  	// result: x
  5486  	for {
  5487  		if v_0.Op != OpLOONG64NEGV {
  5488  			break
  5489  		}
  5490  		x := v_0.Args[0]
  5491  		v.copyOf(x)
  5492  		return true
  5493  	}
  5494  	// match: (NEGV <t> s:(ADDVconst [c] (NEGV x)))
  5495  	// cond: s.Uses == 1 && is12Bit(-c)
  5496  	// result: (ADDVconst [-c] x)
  5497  	for {
  5498  		s := v_0
  5499  		if s.Op != OpLOONG64ADDVconst {
  5500  			break
  5501  		}
  5502  		c := auxIntToInt64(s.AuxInt)
  5503  		s_0 := s.Args[0]
  5504  		if s_0.Op != OpLOONG64NEGV {
  5505  			break
  5506  		}
  5507  		x := s_0.Args[0]
  5508  		if !(s.Uses == 1 && is12Bit(-c)) {
  5509  			break
  5510  		}
  5511  		v.reset(OpLOONG64ADDVconst)
  5512  		v.AuxInt = int64ToAuxInt(-c)
  5513  		v.AddArg(x)
  5514  		return true
  5515  	}
  5516  	// match: (NEGV (MOVVconst [c]))
  5517  	// result: (MOVVconst [-c])
  5518  	for {
  5519  		if v_0.Op != OpLOONG64MOVVconst {
  5520  			break
  5521  		}
  5522  		c := auxIntToInt64(v_0.AuxInt)
  5523  		v.reset(OpLOONG64MOVVconst)
  5524  		v.AuxInt = int64ToAuxInt(-c)
  5525  		return true
  5526  	}
  5527  	return false
  5528  }
  5529  func rewriteValueLOONG64_OpLOONG64NOR(v *Value) bool {
  5530  	v_1 := v.Args[1]
  5531  	v_0 := v.Args[0]
  5532  	// match: (NOR x (MOVVconst [c]))
  5533  	// cond: is32Bit(c)
  5534  	// result: (NORconst [c] x)
  5535  	for {
  5536  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  5537  			x := v_0
  5538  			if v_1.Op != OpLOONG64MOVVconst {
  5539  				continue
  5540  			}
  5541  			c := auxIntToInt64(v_1.AuxInt)
  5542  			if !(is32Bit(c)) {
  5543  				continue
  5544  			}
  5545  			v.reset(OpLOONG64NORconst)
  5546  			v.AuxInt = int64ToAuxInt(c)
  5547  			v.AddArg(x)
  5548  			return true
  5549  		}
  5550  		break
  5551  	}
  5552  	return false
  5553  }
  5554  func rewriteValueLOONG64_OpLOONG64NORconst(v *Value) bool {
  5555  	v_0 := v.Args[0]
  5556  	// match: (NORconst [c] (MOVVconst [d]))
  5557  	// result: (MOVVconst [^(c|d)])
  5558  	for {
  5559  		c := auxIntToInt64(v.AuxInt)
  5560  		if v_0.Op != OpLOONG64MOVVconst {
  5561  			break
  5562  		}
  5563  		d := auxIntToInt64(v_0.AuxInt)
  5564  		v.reset(OpLOONG64MOVVconst)
  5565  		v.AuxInt = int64ToAuxInt(^(c | d))
  5566  		return true
  5567  	}
  5568  	return false
  5569  }
  5570  func rewriteValueLOONG64_OpLOONG64OR(v *Value) bool {
  5571  	v_1 := v.Args[1]
  5572  	v_0 := v.Args[0]
  5573  	// match: (OR x (MOVVconst [c]))
  5574  	// cond: is32Bit(c)
  5575  	// result: (ORconst [c] x)
  5576  	for {
  5577  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  5578  			x := v_0
  5579  			if v_1.Op != OpLOONG64MOVVconst {
  5580  				continue
  5581  			}
  5582  			c := auxIntToInt64(v_1.AuxInt)
  5583  			if !(is32Bit(c)) {
  5584  				continue
  5585  			}
  5586  			v.reset(OpLOONG64ORconst)
  5587  			v.AuxInt = int64ToAuxInt(c)
  5588  			v.AddArg(x)
  5589  			return true
  5590  		}
  5591  		break
  5592  	}
  5593  	// match: (OR x x)
  5594  	// result: x
  5595  	for {
  5596  		x := v_0
  5597  		if x != v_1 {
  5598  			break
  5599  		}
  5600  		v.copyOf(x)
  5601  		return true
  5602  	}
  5603  	// match: (OR x (NORconst [0] y))
  5604  	// result: (ORN x y)
  5605  	for {
  5606  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  5607  			x := v_0
  5608  			if v_1.Op != OpLOONG64NORconst || auxIntToInt64(v_1.AuxInt) != 0 {
  5609  				continue
  5610  			}
  5611  			y := v_1.Args[0]
  5612  			v.reset(OpLOONG64ORN)
  5613  			v.AddArg2(x, y)
  5614  			return true
  5615  		}
  5616  		break
  5617  	}
  5618  	return false
  5619  }
  5620  func rewriteValueLOONG64_OpLOONG64ORN(v *Value) bool {
  5621  	v_1 := v.Args[1]
  5622  	v_0 := v.Args[0]
  5623  	// match: (ORN x (MOVVconst [-1]))
  5624  	// result: x
  5625  	for {
  5626  		x := v_0
  5627  		if v_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_1.AuxInt) != -1 {
  5628  			break
  5629  		}
  5630  		v.copyOf(x)
  5631  		return true
  5632  	}
  5633  	return false
  5634  }
  5635  func rewriteValueLOONG64_OpLOONG64ORconst(v *Value) bool {
  5636  	v_0 := v.Args[0]
  5637  	// match: (ORconst [0] x)
  5638  	// result: x
  5639  	for {
  5640  		if auxIntToInt64(v.AuxInt) != 0 {
  5641  			break
  5642  		}
  5643  		x := v_0
  5644  		v.copyOf(x)
  5645  		return true
  5646  	}
  5647  	// match: (ORconst [-1] _)
  5648  	// result: (MOVVconst [-1])
  5649  	for {
  5650  		if auxIntToInt64(v.AuxInt) != -1 {
  5651  			break
  5652  		}
  5653  		v.reset(OpLOONG64MOVVconst)
  5654  		v.AuxInt = int64ToAuxInt(-1)
  5655  		return true
  5656  	}
  5657  	// match: (ORconst [c] (MOVVconst [d]))
  5658  	// result: (MOVVconst [c|d])
  5659  	for {
  5660  		c := auxIntToInt64(v.AuxInt)
  5661  		if v_0.Op != OpLOONG64MOVVconst {
  5662  			break
  5663  		}
  5664  		d := auxIntToInt64(v_0.AuxInt)
  5665  		v.reset(OpLOONG64MOVVconst)
  5666  		v.AuxInt = int64ToAuxInt(c | d)
  5667  		return true
  5668  	}
  5669  	// match: (ORconst [c] (ORconst [d] x))
  5670  	// cond: is32Bit(c|d)
  5671  	// result: (ORconst [c|d] x)
  5672  	for {
  5673  		c := auxIntToInt64(v.AuxInt)
  5674  		if v_0.Op != OpLOONG64ORconst {
  5675  			break
  5676  		}
  5677  		d := auxIntToInt64(v_0.AuxInt)
  5678  		x := v_0.Args[0]
  5679  		if !(is32Bit(c | d)) {
  5680  			break
  5681  		}
  5682  		v.reset(OpLOONG64ORconst)
  5683  		v.AuxInt = int64ToAuxInt(c | d)
  5684  		v.AddArg(x)
  5685  		return true
  5686  	}
  5687  	return false
  5688  }
  5689  func rewriteValueLOONG64_OpLOONG64REMV(v *Value) bool {
  5690  	v_1 := v.Args[1]
  5691  	v_0 := v.Args[0]
  5692  	// match: (REMV (MOVVconst [c]) (MOVVconst [d]))
  5693  	// cond: d != 0
  5694  	// result: (MOVVconst [c%d])
  5695  	for {
  5696  		if v_0.Op != OpLOONG64MOVVconst {
  5697  			break
  5698  		}
  5699  		c := auxIntToInt64(v_0.AuxInt)
  5700  		if v_1.Op != OpLOONG64MOVVconst {
  5701  			break
  5702  		}
  5703  		d := auxIntToInt64(v_1.AuxInt)
  5704  		if !(d != 0) {
  5705  			break
  5706  		}
  5707  		v.reset(OpLOONG64MOVVconst)
  5708  		v.AuxInt = int64ToAuxInt(c % d)
  5709  		return true
  5710  	}
  5711  	return false
  5712  }
  5713  func rewriteValueLOONG64_OpLOONG64REMVU(v *Value) bool {
  5714  	v_1 := v.Args[1]
  5715  	v_0 := v.Args[0]
  5716  	// match: (REMVU _ (MOVVconst [1]))
  5717  	// result: (MOVVconst [0])
  5718  	for {
  5719  		if v_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_1.AuxInt) != 1 {
  5720  			break
  5721  		}
  5722  		v.reset(OpLOONG64MOVVconst)
  5723  		v.AuxInt = int64ToAuxInt(0)
  5724  		return true
  5725  	}
  5726  	// match: (REMVU x (MOVVconst [c]))
  5727  	// cond: isPowerOfTwo(c)
  5728  	// result: (ANDconst [c-1] x)
  5729  	for {
  5730  		x := v_0
  5731  		if v_1.Op != OpLOONG64MOVVconst {
  5732  			break
  5733  		}
  5734  		c := auxIntToInt64(v_1.AuxInt)
  5735  		if !(isPowerOfTwo(c)) {
  5736  			break
  5737  		}
  5738  		v.reset(OpLOONG64ANDconst)
  5739  		v.AuxInt = int64ToAuxInt(c - 1)
  5740  		v.AddArg(x)
  5741  		return true
  5742  	}
  5743  	// match: (REMVU (MOVVconst [c]) (MOVVconst [d]))
  5744  	// cond: d != 0
  5745  	// result: (MOVVconst [int64(uint64(c)%uint64(d))])
  5746  	for {
  5747  		if v_0.Op != OpLOONG64MOVVconst {
  5748  			break
  5749  		}
  5750  		c := auxIntToInt64(v_0.AuxInt)
  5751  		if v_1.Op != OpLOONG64MOVVconst {
  5752  			break
  5753  		}
  5754  		d := auxIntToInt64(v_1.AuxInt)
  5755  		if !(d != 0) {
  5756  			break
  5757  		}
  5758  		v.reset(OpLOONG64MOVVconst)
  5759  		v.AuxInt = int64ToAuxInt(int64(uint64(c) % uint64(d)))
  5760  		return true
  5761  	}
  5762  	return false
  5763  }
  5764  func rewriteValueLOONG64_OpLOONG64ROTR(v *Value) bool {
  5765  	v_1 := v.Args[1]
  5766  	v_0 := v.Args[0]
  5767  	// match: (ROTR x (MOVVconst [c]))
  5768  	// result: (ROTRconst x [c&31])
  5769  	for {
  5770  		x := v_0
  5771  		if v_1.Op != OpLOONG64MOVVconst {
  5772  			break
  5773  		}
  5774  		c := auxIntToInt64(v_1.AuxInt)
  5775  		v.reset(OpLOONG64ROTRconst)
  5776  		v.AuxInt = int64ToAuxInt(c & 31)
  5777  		v.AddArg(x)
  5778  		return true
  5779  	}
  5780  	return false
  5781  }
  5782  func rewriteValueLOONG64_OpLOONG64ROTRV(v *Value) bool {
  5783  	v_1 := v.Args[1]
  5784  	v_0 := v.Args[0]
  5785  	// match: (ROTRV x (MOVVconst [c]))
  5786  	// result: (ROTRVconst x [c&63])
  5787  	for {
  5788  		x := v_0
  5789  		if v_1.Op != OpLOONG64MOVVconst {
  5790  			break
  5791  		}
  5792  		c := auxIntToInt64(v_1.AuxInt)
  5793  		v.reset(OpLOONG64ROTRVconst)
  5794  		v.AuxInt = int64ToAuxInt(c & 63)
  5795  		v.AddArg(x)
  5796  		return true
  5797  	}
  5798  	return false
  5799  }
  5800  func rewriteValueLOONG64_OpLOONG64SGT(v *Value) bool {
  5801  	v_1 := v.Args[1]
  5802  	v_0 := v.Args[0]
  5803  	b := v.Block
  5804  	typ := &b.Func.Config.Types
  5805  	// match: (SGT (MOVVconst [c]) (NEGV (SUBVconst [d] x)))
  5806  	// cond: is32Bit(d-c)
  5807  	// result: (SGT x (MOVVconst [d-c]))
  5808  	for {
  5809  		if v_0.Op != OpLOONG64MOVVconst {
  5810  			break
  5811  		}
  5812  		c := auxIntToInt64(v_0.AuxInt)
  5813  		if v_1.Op != OpLOONG64NEGV {
  5814  			break
  5815  		}
  5816  		v_1_0 := v_1.Args[0]
  5817  		if v_1_0.Op != OpLOONG64SUBVconst {
  5818  			break
  5819  		}
  5820  		d := auxIntToInt64(v_1_0.AuxInt)
  5821  		x := v_1_0.Args[0]
  5822  		if !(is32Bit(d - c)) {
  5823  			break
  5824  		}
  5825  		v.reset(OpLOONG64SGT)
  5826  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  5827  		v0.AuxInt = int64ToAuxInt(d - c)
  5828  		v.AddArg2(x, v0)
  5829  		return true
  5830  	}
  5831  	// match: (SGT (MOVVconst [c]) x)
  5832  	// cond: is32Bit(c)
  5833  	// result: (SGTconst [c] x)
  5834  	for {
  5835  		if v_0.Op != OpLOONG64MOVVconst {
  5836  			break
  5837  		}
  5838  		c := auxIntToInt64(v_0.AuxInt)
  5839  		x := v_1
  5840  		if !(is32Bit(c)) {
  5841  			break
  5842  		}
  5843  		v.reset(OpLOONG64SGTconst)
  5844  		v.AuxInt = int64ToAuxInt(c)
  5845  		v.AddArg(x)
  5846  		return true
  5847  	}
  5848  	// match: (SGT x x)
  5849  	// result: (MOVVconst [0])
  5850  	for {
  5851  		x := v_0
  5852  		if x != v_1 {
  5853  			break
  5854  		}
  5855  		v.reset(OpLOONG64MOVVconst)
  5856  		v.AuxInt = int64ToAuxInt(0)
  5857  		return true
  5858  	}
  5859  	return false
  5860  }
  5861  func rewriteValueLOONG64_OpLOONG64SGTU(v *Value) bool {
  5862  	v_1 := v.Args[1]
  5863  	v_0 := v.Args[0]
  5864  	// match: (SGTU (MOVVconst [c]) x)
  5865  	// cond: is32Bit(c)
  5866  	// result: (SGTUconst [c] x)
  5867  	for {
  5868  		if v_0.Op != OpLOONG64MOVVconst {
  5869  			break
  5870  		}
  5871  		c := auxIntToInt64(v_0.AuxInt)
  5872  		x := v_1
  5873  		if !(is32Bit(c)) {
  5874  			break
  5875  		}
  5876  		v.reset(OpLOONG64SGTUconst)
  5877  		v.AuxInt = int64ToAuxInt(c)
  5878  		v.AddArg(x)
  5879  		return true
  5880  	}
  5881  	// match: (SGTU x x)
  5882  	// result: (MOVVconst [0])
  5883  	for {
  5884  		x := v_0
  5885  		if x != v_1 {
  5886  			break
  5887  		}
  5888  		v.reset(OpLOONG64MOVVconst)
  5889  		v.AuxInt = int64ToAuxInt(0)
  5890  		return true
  5891  	}
  5892  	return false
  5893  }
  5894  func rewriteValueLOONG64_OpLOONG64SGTUconst(v *Value) bool {
  5895  	v_0 := v.Args[0]
  5896  	// match: (SGTUconst [c] (MOVVconst [d]))
  5897  	// cond: uint64(c)>uint64(d)
  5898  	// result: (MOVVconst [1])
  5899  	for {
  5900  		c := auxIntToInt64(v.AuxInt)
  5901  		if v_0.Op != OpLOONG64MOVVconst {
  5902  			break
  5903  		}
  5904  		d := auxIntToInt64(v_0.AuxInt)
  5905  		if !(uint64(c) > uint64(d)) {
  5906  			break
  5907  		}
  5908  		v.reset(OpLOONG64MOVVconst)
  5909  		v.AuxInt = int64ToAuxInt(1)
  5910  		return true
  5911  	}
  5912  	// match: (SGTUconst [c] (MOVVconst [d]))
  5913  	// cond: uint64(c)<=uint64(d)
  5914  	// result: (MOVVconst [0])
  5915  	for {
  5916  		c := auxIntToInt64(v.AuxInt)
  5917  		if v_0.Op != OpLOONG64MOVVconst {
  5918  			break
  5919  		}
  5920  		d := auxIntToInt64(v_0.AuxInt)
  5921  		if !(uint64(c) <= uint64(d)) {
  5922  			break
  5923  		}
  5924  		v.reset(OpLOONG64MOVVconst)
  5925  		v.AuxInt = int64ToAuxInt(0)
  5926  		return true
  5927  	}
  5928  	// match: (SGTUconst [c] (MOVBUreg _))
  5929  	// cond: 0xff < uint64(c)
  5930  	// result: (MOVVconst [1])
  5931  	for {
  5932  		c := auxIntToInt64(v.AuxInt)
  5933  		if v_0.Op != OpLOONG64MOVBUreg || !(0xff < uint64(c)) {
  5934  			break
  5935  		}
  5936  		v.reset(OpLOONG64MOVVconst)
  5937  		v.AuxInt = int64ToAuxInt(1)
  5938  		return true
  5939  	}
  5940  	// match: (SGTUconst [c] (MOVHUreg _))
  5941  	// cond: 0xffff < uint64(c)
  5942  	// result: (MOVVconst [1])
  5943  	for {
  5944  		c := auxIntToInt64(v.AuxInt)
  5945  		if v_0.Op != OpLOONG64MOVHUreg || !(0xffff < uint64(c)) {
  5946  			break
  5947  		}
  5948  		v.reset(OpLOONG64MOVVconst)
  5949  		v.AuxInt = int64ToAuxInt(1)
  5950  		return true
  5951  	}
  5952  	// match: (SGTUconst [c] (ANDconst [m] _))
  5953  	// cond: uint64(m) < uint64(c)
  5954  	// result: (MOVVconst [1])
  5955  	for {
  5956  		c := auxIntToInt64(v.AuxInt)
  5957  		if v_0.Op != OpLOONG64ANDconst {
  5958  			break
  5959  		}
  5960  		m := auxIntToInt64(v_0.AuxInt)
  5961  		if !(uint64(m) < uint64(c)) {
  5962  			break
  5963  		}
  5964  		v.reset(OpLOONG64MOVVconst)
  5965  		v.AuxInt = int64ToAuxInt(1)
  5966  		return true
  5967  	}
  5968  	// match: (SGTUconst [c] (SRLVconst _ [d]))
  5969  	// cond: 0 < d && d <= 63 && 0xffffffffffffffff>>uint64(d) < uint64(c)
  5970  	// result: (MOVVconst [1])
  5971  	for {
  5972  		c := auxIntToInt64(v.AuxInt)
  5973  		if v_0.Op != OpLOONG64SRLVconst {
  5974  			break
  5975  		}
  5976  		d := auxIntToInt64(v_0.AuxInt)
  5977  		if !(0 < d && d <= 63 && 0xffffffffffffffff>>uint64(d) < uint64(c)) {
  5978  			break
  5979  		}
  5980  		v.reset(OpLOONG64MOVVconst)
  5981  		v.AuxInt = int64ToAuxInt(1)
  5982  		return true
  5983  	}
  5984  	return false
  5985  }
  5986  func rewriteValueLOONG64_OpLOONG64SGTconst(v *Value) bool {
  5987  	v_0 := v.Args[0]
  5988  	// match: (SGTconst [c] (MOVVconst [d]))
  5989  	// cond: c>d
  5990  	// result: (MOVVconst [1])
  5991  	for {
  5992  		c := auxIntToInt64(v.AuxInt)
  5993  		if v_0.Op != OpLOONG64MOVVconst {
  5994  			break
  5995  		}
  5996  		d := auxIntToInt64(v_0.AuxInt)
  5997  		if !(c > d) {
  5998  			break
  5999  		}
  6000  		v.reset(OpLOONG64MOVVconst)
  6001  		v.AuxInt = int64ToAuxInt(1)
  6002  		return true
  6003  	}
  6004  	// match: (SGTconst [c] (MOVVconst [d]))
  6005  	// cond: c<=d
  6006  	// result: (MOVVconst [0])
  6007  	for {
  6008  		c := auxIntToInt64(v.AuxInt)
  6009  		if v_0.Op != OpLOONG64MOVVconst {
  6010  			break
  6011  		}
  6012  		d := auxIntToInt64(v_0.AuxInt)
  6013  		if !(c <= d) {
  6014  			break
  6015  		}
  6016  		v.reset(OpLOONG64MOVVconst)
  6017  		v.AuxInt = int64ToAuxInt(0)
  6018  		return true
  6019  	}
  6020  	// match: (SGTconst [c] (MOVBreg _))
  6021  	// cond: 0x7f < c
  6022  	// result: (MOVVconst [1])
  6023  	for {
  6024  		c := auxIntToInt64(v.AuxInt)
  6025  		if v_0.Op != OpLOONG64MOVBreg || !(0x7f < c) {
  6026  			break
  6027  		}
  6028  		v.reset(OpLOONG64MOVVconst)
  6029  		v.AuxInt = int64ToAuxInt(1)
  6030  		return true
  6031  	}
  6032  	// match: (SGTconst [c] (MOVBreg _))
  6033  	// cond: c <= -0x80
  6034  	// result: (MOVVconst [0])
  6035  	for {
  6036  		c := auxIntToInt64(v.AuxInt)
  6037  		if v_0.Op != OpLOONG64MOVBreg || !(c <= -0x80) {
  6038  			break
  6039  		}
  6040  		v.reset(OpLOONG64MOVVconst)
  6041  		v.AuxInt = int64ToAuxInt(0)
  6042  		return true
  6043  	}
  6044  	// match: (SGTconst [c] (MOVBUreg _))
  6045  	// cond: 0xff < c
  6046  	// result: (MOVVconst [1])
  6047  	for {
  6048  		c := auxIntToInt64(v.AuxInt)
  6049  		if v_0.Op != OpLOONG64MOVBUreg || !(0xff < c) {
  6050  			break
  6051  		}
  6052  		v.reset(OpLOONG64MOVVconst)
  6053  		v.AuxInt = int64ToAuxInt(1)
  6054  		return true
  6055  	}
  6056  	// match: (SGTconst [c] (MOVBUreg _))
  6057  	// cond: c < 0
  6058  	// result: (MOVVconst [0])
  6059  	for {
  6060  		c := auxIntToInt64(v.AuxInt)
  6061  		if v_0.Op != OpLOONG64MOVBUreg || !(c < 0) {
  6062  			break
  6063  		}
  6064  		v.reset(OpLOONG64MOVVconst)
  6065  		v.AuxInt = int64ToAuxInt(0)
  6066  		return true
  6067  	}
  6068  	// match: (SGTconst [c] (MOVHreg _))
  6069  	// cond: 0x7fff < c
  6070  	// result: (MOVVconst [1])
  6071  	for {
  6072  		c := auxIntToInt64(v.AuxInt)
  6073  		if v_0.Op != OpLOONG64MOVHreg || !(0x7fff < c) {
  6074  			break
  6075  		}
  6076  		v.reset(OpLOONG64MOVVconst)
  6077  		v.AuxInt = int64ToAuxInt(1)
  6078  		return true
  6079  	}
  6080  	// match: (SGTconst [c] (MOVHreg _))
  6081  	// cond: c <= -0x8000
  6082  	// result: (MOVVconst [0])
  6083  	for {
  6084  		c := auxIntToInt64(v.AuxInt)
  6085  		if v_0.Op != OpLOONG64MOVHreg || !(c <= -0x8000) {
  6086  			break
  6087  		}
  6088  		v.reset(OpLOONG64MOVVconst)
  6089  		v.AuxInt = int64ToAuxInt(0)
  6090  		return true
  6091  	}
  6092  	// match: (SGTconst [c] (MOVHUreg _))
  6093  	// cond: 0xffff < c
  6094  	// result: (MOVVconst [1])
  6095  	for {
  6096  		c := auxIntToInt64(v.AuxInt)
  6097  		if v_0.Op != OpLOONG64MOVHUreg || !(0xffff < c) {
  6098  			break
  6099  		}
  6100  		v.reset(OpLOONG64MOVVconst)
  6101  		v.AuxInt = int64ToAuxInt(1)
  6102  		return true
  6103  	}
  6104  	// match: (SGTconst [c] (MOVHUreg _))
  6105  	// cond: c < 0
  6106  	// result: (MOVVconst [0])
  6107  	for {
  6108  		c := auxIntToInt64(v.AuxInt)
  6109  		if v_0.Op != OpLOONG64MOVHUreg || !(c < 0) {
  6110  			break
  6111  		}
  6112  		v.reset(OpLOONG64MOVVconst)
  6113  		v.AuxInt = int64ToAuxInt(0)
  6114  		return true
  6115  	}
  6116  	// match: (SGTconst [c] (MOVWUreg _))
  6117  	// cond: c < 0
  6118  	// result: (MOVVconst [0])
  6119  	for {
  6120  		c := auxIntToInt64(v.AuxInt)
  6121  		if v_0.Op != OpLOONG64MOVWUreg || !(c < 0) {
  6122  			break
  6123  		}
  6124  		v.reset(OpLOONG64MOVVconst)
  6125  		v.AuxInt = int64ToAuxInt(0)
  6126  		return true
  6127  	}
  6128  	// match: (SGTconst [c] (ANDconst [m] _))
  6129  	// cond: 0 <= m && m < c
  6130  	// result: (MOVVconst [1])
  6131  	for {
  6132  		c := auxIntToInt64(v.AuxInt)
  6133  		if v_0.Op != OpLOONG64ANDconst {
  6134  			break
  6135  		}
  6136  		m := auxIntToInt64(v_0.AuxInt)
  6137  		if !(0 <= m && m < c) {
  6138  			break
  6139  		}
  6140  		v.reset(OpLOONG64MOVVconst)
  6141  		v.AuxInt = int64ToAuxInt(1)
  6142  		return true
  6143  	}
  6144  	// match: (SGTconst [c] (SRLVconst _ [d]))
  6145  	// cond: 0 <= c && 0 < d && d <= 63 && 0xffffffffffffffff>>uint64(d) < uint64(c)
  6146  	// result: (MOVVconst [1])
  6147  	for {
  6148  		c := auxIntToInt64(v.AuxInt)
  6149  		if v_0.Op != OpLOONG64SRLVconst {
  6150  			break
  6151  		}
  6152  		d := auxIntToInt64(v_0.AuxInt)
  6153  		if !(0 <= c && 0 < d && d <= 63 && 0xffffffffffffffff>>uint64(d) < uint64(c)) {
  6154  			break
  6155  		}
  6156  		v.reset(OpLOONG64MOVVconst)
  6157  		v.AuxInt = int64ToAuxInt(1)
  6158  		return true
  6159  	}
  6160  	return false
  6161  }
  6162  func rewriteValueLOONG64_OpLOONG64SLL(v *Value) bool {
  6163  	v_1 := v.Args[1]
  6164  	v_0 := v.Args[0]
  6165  	// match: (SLL _ (MOVVconst [c]))
  6166  	// cond: uint64(c)>=32
  6167  	// result: (MOVVconst [0])
  6168  	for {
  6169  		if v_1.Op != OpLOONG64MOVVconst {
  6170  			break
  6171  		}
  6172  		c := auxIntToInt64(v_1.AuxInt)
  6173  		if !(uint64(c) >= 32) {
  6174  			break
  6175  		}
  6176  		v.reset(OpLOONG64MOVVconst)
  6177  		v.AuxInt = int64ToAuxInt(0)
  6178  		return true
  6179  	}
  6180  	// match: (SLL x (MOVVconst [c]))
  6181  	// cond: uint64(c) >=0 && uint64(c) <=31
  6182  	// result: (SLLconst x [c])
  6183  	for {
  6184  		x := v_0
  6185  		if v_1.Op != OpLOONG64MOVVconst {
  6186  			break
  6187  		}
  6188  		c := auxIntToInt64(v_1.AuxInt)
  6189  		if !(uint64(c) >= 0 && uint64(c) <= 31) {
  6190  			break
  6191  		}
  6192  		v.reset(OpLOONG64SLLconst)
  6193  		v.AuxInt = int64ToAuxInt(c)
  6194  		v.AddArg(x)
  6195  		return true
  6196  	}
  6197  	// match: (SLL x (ANDconst [31] y))
  6198  	// result: (SLL x y)
  6199  	for {
  6200  		x := v_0
  6201  		if v_1.Op != OpLOONG64ANDconst || auxIntToInt64(v_1.AuxInt) != 31 {
  6202  			break
  6203  		}
  6204  		y := v_1.Args[0]
  6205  		v.reset(OpLOONG64SLL)
  6206  		v.AddArg2(x, y)
  6207  		return true
  6208  	}
  6209  	return false
  6210  }
  6211  func rewriteValueLOONG64_OpLOONG64SLLV(v *Value) bool {
  6212  	v_1 := v.Args[1]
  6213  	v_0 := v.Args[0]
  6214  	// match: (SLLV _ (MOVVconst [c]))
  6215  	// cond: uint64(c)>=64
  6216  	// result: (MOVVconst [0])
  6217  	for {
  6218  		if v_1.Op != OpLOONG64MOVVconst {
  6219  			break
  6220  		}
  6221  		c := auxIntToInt64(v_1.AuxInt)
  6222  		if !(uint64(c) >= 64) {
  6223  			break
  6224  		}
  6225  		v.reset(OpLOONG64MOVVconst)
  6226  		v.AuxInt = int64ToAuxInt(0)
  6227  		return true
  6228  	}
  6229  	// match: (SLLV x (MOVVconst [c]))
  6230  	// result: (SLLVconst x [c])
  6231  	for {
  6232  		x := v_0
  6233  		if v_1.Op != OpLOONG64MOVVconst {
  6234  			break
  6235  		}
  6236  		c := auxIntToInt64(v_1.AuxInt)
  6237  		v.reset(OpLOONG64SLLVconst)
  6238  		v.AuxInt = int64ToAuxInt(c)
  6239  		v.AddArg(x)
  6240  		return true
  6241  	}
  6242  	// match: (SLLV x (ANDconst [63] y))
  6243  	// result: (SLLV x y)
  6244  	for {
  6245  		x := v_0
  6246  		if v_1.Op != OpLOONG64ANDconst || auxIntToInt64(v_1.AuxInt) != 63 {
  6247  			break
  6248  		}
  6249  		y := v_1.Args[0]
  6250  		v.reset(OpLOONG64SLLV)
  6251  		v.AddArg2(x, y)
  6252  		return true
  6253  	}
  6254  	return false
  6255  }
  6256  func rewriteValueLOONG64_OpLOONG64SLLVconst(v *Value) bool {
  6257  	v_0 := v.Args[0]
  6258  	// match: (SLLVconst [c] (MOVVconst [d]))
  6259  	// result: (MOVVconst [d<<uint64(c)])
  6260  	for {
  6261  		c := auxIntToInt64(v.AuxInt)
  6262  		if v_0.Op != OpLOONG64MOVVconst {
  6263  			break
  6264  		}
  6265  		d := auxIntToInt64(v_0.AuxInt)
  6266  		v.reset(OpLOONG64MOVVconst)
  6267  		v.AuxInt = int64ToAuxInt(d << uint64(c))
  6268  		return true
  6269  	}
  6270  	return false
  6271  }
  6272  func rewriteValueLOONG64_OpLOONG64SRA(v *Value) bool {
  6273  	v_1 := v.Args[1]
  6274  	v_0 := v.Args[0]
  6275  	// match: (SRA x (MOVVconst [c]))
  6276  	// cond: uint64(c)>=32
  6277  	// result: (SRAconst x [31])
  6278  	for {
  6279  		x := v_0
  6280  		if v_1.Op != OpLOONG64MOVVconst {
  6281  			break
  6282  		}
  6283  		c := auxIntToInt64(v_1.AuxInt)
  6284  		if !(uint64(c) >= 32) {
  6285  			break
  6286  		}
  6287  		v.reset(OpLOONG64SRAconst)
  6288  		v.AuxInt = int64ToAuxInt(31)
  6289  		v.AddArg(x)
  6290  		return true
  6291  	}
  6292  	// match: (SRA x (MOVVconst [c]))
  6293  	// cond: uint64(c) >=0 && uint64(c) <=31
  6294  	// result: (SRAconst x [c])
  6295  	for {
  6296  		x := v_0
  6297  		if v_1.Op != OpLOONG64MOVVconst {
  6298  			break
  6299  		}
  6300  		c := auxIntToInt64(v_1.AuxInt)
  6301  		if !(uint64(c) >= 0 && uint64(c) <= 31) {
  6302  			break
  6303  		}
  6304  		v.reset(OpLOONG64SRAconst)
  6305  		v.AuxInt = int64ToAuxInt(c)
  6306  		v.AddArg(x)
  6307  		return true
  6308  	}
  6309  	// match: (SRA x (ANDconst [31] y))
  6310  	// result: (SRA x y)
  6311  	for {
  6312  		x := v_0
  6313  		if v_1.Op != OpLOONG64ANDconst || auxIntToInt64(v_1.AuxInt) != 31 {
  6314  			break
  6315  		}
  6316  		y := v_1.Args[0]
  6317  		v.reset(OpLOONG64SRA)
  6318  		v.AddArg2(x, y)
  6319  		return true
  6320  	}
  6321  	return false
  6322  }
  6323  func rewriteValueLOONG64_OpLOONG64SRAV(v *Value) bool {
  6324  	v_1 := v.Args[1]
  6325  	v_0 := v.Args[0]
  6326  	// match: (SRAV x (MOVVconst [c]))
  6327  	// cond: uint64(c)>=64
  6328  	// result: (SRAVconst x [63])
  6329  	for {
  6330  		x := v_0
  6331  		if v_1.Op != OpLOONG64MOVVconst {
  6332  			break
  6333  		}
  6334  		c := auxIntToInt64(v_1.AuxInt)
  6335  		if !(uint64(c) >= 64) {
  6336  			break
  6337  		}
  6338  		v.reset(OpLOONG64SRAVconst)
  6339  		v.AuxInt = int64ToAuxInt(63)
  6340  		v.AddArg(x)
  6341  		return true
  6342  	}
  6343  	// match: (SRAV x (MOVVconst [c]))
  6344  	// result: (SRAVconst x [c])
  6345  	for {
  6346  		x := v_0
  6347  		if v_1.Op != OpLOONG64MOVVconst {
  6348  			break
  6349  		}
  6350  		c := auxIntToInt64(v_1.AuxInt)
  6351  		v.reset(OpLOONG64SRAVconst)
  6352  		v.AuxInt = int64ToAuxInt(c)
  6353  		v.AddArg(x)
  6354  		return true
  6355  	}
  6356  	// match: (SRAV x (ANDconst [63] y))
  6357  	// result: (SRAV x y)
  6358  	for {
  6359  		x := v_0
  6360  		if v_1.Op != OpLOONG64ANDconst || auxIntToInt64(v_1.AuxInt) != 63 {
  6361  			break
  6362  		}
  6363  		y := v_1.Args[0]
  6364  		v.reset(OpLOONG64SRAV)
  6365  		v.AddArg2(x, y)
  6366  		return true
  6367  	}
  6368  	return false
  6369  }
  6370  func rewriteValueLOONG64_OpLOONG64SRAVconst(v *Value) bool {
  6371  	v_0 := v.Args[0]
  6372  	b := v.Block
  6373  	// match: (SRAVconst [rc] (MOVWreg y))
  6374  	// cond: rc >= 0 && rc <= 31
  6375  	// result: (SRAconst [int64(rc)] y)
  6376  	for {
  6377  		rc := auxIntToInt64(v.AuxInt)
  6378  		if v_0.Op != OpLOONG64MOVWreg {
  6379  			break
  6380  		}
  6381  		y := v_0.Args[0]
  6382  		if !(rc >= 0 && rc <= 31) {
  6383  			break
  6384  		}
  6385  		v.reset(OpLOONG64SRAconst)
  6386  		v.AuxInt = int64ToAuxInt(int64(rc))
  6387  		v.AddArg(y)
  6388  		return true
  6389  	}
  6390  	// match: (SRAVconst <t> [rc] (MOVBreg y))
  6391  	// cond: rc >= 8
  6392  	// result: (SRAVconst [63] (SLLVconst <t> [56] y))
  6393  	for {
  6394  		t := v.Type
  6395  		rc := auxIntToInt64(v.AuxInt)
  6396  		if v_0.Op != OpLOONG64MOVBreg {
  6397  			break
  6398  		}
  6399  		y := v_0.Args[0]
  6400  		if !(rc >= 8) {
  6401  			break
  6402  		}
  6403  		v.reset(OpLOONG64SRAVconst)
  6404  		v.AuxInt = int64ToAuxInt(63)
  6405  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, t)
  6406  		v0.AuxInt = int64ToAuxInt(56)
  6407  		v0.AddArg(y)
  6408  		v.AddArg(v0)
  6409  		return true
  6410  	}
  6411  	// match: (SRAVconst <t> [rc] (MOVHreg y))
  6412  	// cond: rc >= 16
  6413  	// result: (SRAVconst [63] (SLLVconst <t> [48] y))
  6414  	for {
  6415  		t := v.Type
  6416  		rc := auxIntToInt64(v.AuxInt)
  6417  		if v_0.Op != OpLOONG64MOVHreg {
  6418  			break
  6419  		}
  6420  		y := v_0.Args[0]
  6421  		if !(rc >= 16) {
  6422  			break
  6423  		}
  6424  		v.reset(OpLOONG64SRAVconst)
  6425  		v.AuxInt = int64ToAuxInt(63)
  6426  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, t)
  6427  		v0.AuxInt = int64ToAuxInt(48)
  6428  		v0.AddArg(y)
  6429  		v.AddArg(v0)
  6430  		return true
  6431  	}
  6432  	// match: (SRAVconst <t> [rc] (MOVWreg y))
  6433  	// cond: rc >= 32
  6434  	// result: (SRAconst [31] y)
  6435  	for {
  6436  		rc := auxIntToInt64(v.AuxInt)
  6437  		if v_0.Op != OpLOONG64MOVWreg {
  6438  			break
  6439  		}
  6440  		y := v_0.Args[0]
  6441  		if !(rc >= 32) {
  6442  			break
  6443  		}
  6444  		v.reset(OpLOONG64SRAconst)
  6445  		v.AuxInt = int64ToAuxInt(31)
  6446  		v.AddArg(y)
  6447  		return true
  6448  	}
  6449  	// match: (SRAVconst [c] (MOVVconst [d]))
  6450  	// result: (MOVVconst [d>>uint64(c)])
  6451  	for {
  6452  		c := auxIntToInt64(v.AuxInt)
  6453  		if v_0.Op != OpLOONG64MOVVconst {
  6454  			break
  6455  		}
  6456  		d := auxIntToInt64(v_0.AuxInt)
  6457  		v.reset(OpLOONG64MOVVconst)
  6458  		v.AuxInt = int64ToAuxInt(d >> uint64(c))
  6459  		return true
  6460  	}
  6461  	return false
  6462  }
  6463  func rewriteValueLOONG64_OpLOONG64SRL(v *Value) bool {
  6464  	v_1 := v.Args[1]
  6465  	v_0 := v.Args[0]
  6466  	// match: (SRL _ (MOVVconst [c]))
  6467  	// cond: uint64(c)>=32
  6468  	// result: (MOVVconst [0])
  6469  	for {
  6470  		if v_1.Op != OpLOONG64MOVVconst {
  6471  			break
  6472  		}
  6473  		c := auxIntToInt64(v_1.AuxInt)
  6474  		if !(uint64(c) >= 32) {
  6475  			break
  6476  		}
  6477  		v.reset(OpLOONG64MOVVconst)
  6478  		v.AuxInt = int64ToAuxInt(0)
  6479  		return true
  6480  	}
  6481  	// match: (SRL x (MOVVconst [c]))
  6482  	// cond: uint64(c) >=0 && uint64(c) <=31
  6483  	// result: (SRLconst x [c])
  6484  	for {
  6485  		x := v_0
  6486  		if v_1.Op != OpLOONG64MOVVconst {
  6487  			break
  6488  		}
  6489  		c := auxIntToInt64(v_1.AuxInt)
  6490  		if !(uint64(c) >= 0 && uint64(c) <= 31) {
  6491  			break
  6492  		}
  6493  		v.reset(OpLOONG64SRLconst)
  6494  		v.AuxInt = int64ToAuxInt(c)
  6495  		v.AddArg(x)
  6496  		return true
  6497  	}
  6498  	// match: (SRL x (ANDconst [31] y))
  6499  	// result: (SRL x y)
  6500  	for {
  6501  		x := v_0
  6502  		if v_1.Op != OpLOONG64ANDconst || auxIntToInt64(v_1.AuxInt) != 31 {
  6503  			break
  6504  		}
  6505  		y := v_1.Args[0]
  6506  		v.reset(OpLOONG64SRL)
  6507  		v.AddArg2(x, y)
  6508  		return true
  6509  	}
  6510  	return false
  6511  }
  6512  func rewriteValueLOONG64_OpLOONG64SRLV(v *Value) bool {
  6513  	v_1 := v.Args[1]
  6514  	v_0 := v.Args[0]
  6515  	// match: (SRLV _ (MOVVconst [c]))
  6516  	// cond: uint64(c)>=64
  6517  	// result: (MOVVconst [0])
  6518  	for {
  6519  		if v_1.Op != OpLOONG64MOVVconst {
  6520  			break
  6521  		}
  6522  		c := auxIntToInt64(v_1.AuxInt)
  6523  		if !(uint64(c) >= 64) {
  6524  			break
  6525  		}
  6526  		v.reset(OpLOONG64MOVVconst)
  6527  		v.AuxInt = int64ToAuxInt(0)
  6528  		return true
  6529  	}
  6530  	// match: (SRLV x (MOVVconst [c]))
  6531  	// result: (SRLVconst x [c])
  6532  	for {
  6533  		x := v_0
  6534  		if v_1.Op != OpLOONG64MOVVconst {
  6535  			break
  6536  		}
  6537  		c := auxIntToInt64(v_1.AuxInt)
  6538  		v.reset(OpLOONG64SRLVconst)
  6539  		v.AuxInt = int64ToAuxInt(c)
  6540  		v.AddArg(x)
  6541  		return true
  6542  	}
  6543  	// match: (SRLV x (ANDconst [63] y))
  6544  	// result: (SRLV x y)
  6545  	for {
  6546  		x := v_0
  6547  		if v_1.Op != OpLOONG64ANDconst || auxIntToInt64(v_1.AuxInt) != 63 {
  6548  			break
  6549  		}
  6550  		y := v_1.Args[0]
  6551  		v.reset(OpLOONG64SRLV)
  6552  		v.AddArg2(x, y)
  6553  		return true
  6554  	}
  6555  	return false
  6556  }
  6557  func rewriteValueLOONG64_OpLOONG64SRLVconst(v *Value) bool {
  6558  	v_0 := v.Args[0]
  6559  	// match: (SRLVconst [rc] (SLLVconst [lc] x))
  6560  	// cond: lc <= rc
  6561  	// result: (BSTRPICKV [rc-lc + ((64-lc)-1)<<6] x)
  6562  	for {
  6563  		rc := auxIntToInt64(v.AuxInt)
  6564  		if v_0.Op != OpLOONG64SLLVconst {
  6565  			break
  6566  		}
  6567  		lc := auxIntToInt64(v_0.AuxInt)
  6568  		x := v_0.Args[0]
  6569  		if !(lc <= rc) {
  6570  			break
  6571  		}
  6572  		v.reset(OpLOONG64BSTRPICKV)
  6573  		v.AuxInt = int64ToAuxInt(rc - lc + ((64-lc)-1)<<6)
  6574  		v.AddArg(x)
  6575  		return true
  6576  	}
  6577  	// match: (SRLVconst [rc] (MOVWUreg x))
  6578  	// cond: rc < 32
  6579  	// result: (BSTRPICKV [rc + 31<<6] x)
  6580  	for {
  6581  		rc := auxIntToInt64(v.AuxInt)
  6582  		if v_0.Op != OpLOONG64MOVWUreg {
  6583  			break
  6584  		}
  6585  		x := v_0.Args[0]
  6586  		if !(rc < 32) {
  6587  			break
  6588  		}
  6589  		v.reset(OpLOONG64BSTRPICKV)
  6590  		v.AuxInt = int64ToAuxInt(rc + 31<<6)
  6591  		v.AddArg(x)
  6592  		return true
  6593  	}
  6594  	// match: (SRLVconst [rc] (MOVHUreg x))
  6595  	// cond: rc < 16
  6596  	// result: (BSTRPICKV [rc + 15<<6] x)
  6597  	for {
  6598  		rc := auxIntToInt64(v.AuxInt)
  6599  		if v_0.Op != OpLOONG64MOVHUreg {
  6600  			break
  6601  		}
  6602  		x := v_0.Args[0]
  6603  		if !(rc < 16) {
  6604  			break
  6605  		}
  6606  		v.reset(OpLOONG64BSTRPICKV)
  6607  		v.AuxInt = int64ToAuxInt(rc + 15<<6)
  6608  		v.AddArg(x)
  6609  		return true
  6610  	}
  6611  	// match: (SRLVconst [rc] (MOVBUreg x))
  6612  	// cond: rc < 8
  6613  	// result: (BSTRPICKV [rc + 7<<6] x)
  6614  	for {
  6615  		rc := auxIntToInt64(v.AuxInt)
  6616  		if v_0.Op != OpLOONG64MOVBUreg {
  6617  			break
  6618  		}
  6619  		x := v_0.Args[0]
  6620  		if !(rc < 8) {
  6621  			break
  6622  		}
  6623  		v.reset(OpLOONG64BSTRPICKV)
  6624  		v.AuxInt = int64ToAuxInt(rc + 7<<6)
  6625  		v.AddArg(x)
  6626  		return true
  6627  	}
  6628  	// match: (SRLVconst [rc] (MOVWUreg y))
  6629  	// cond: rc >= 0 && rc <= 31
  6630  	// result: (SRLconst [int64(rc)] y)
  6631  	for {
  6632  		rc := auxIntToInt64(v.AuxInt)
  6633  		if v_0.Op != OpLOONG64MOVWUreg {
  6634  			break
  6635  		}
  6636  		y := v_0.Args[0]
  6637  		if !(rc >= 0 && rc <= 31) {
  6638  			break
  6639  		}
  6640  		v.reset(OpLOONG64SRLconst)
  6641  		v.AuxInt = int64ToAuxInt(int64(rc))
  6642  		v.AddArg(y)
  6643  		return true
  6644  	}
  6645  	// match: (SRLVconst [rc] (MOVWUreg x))
  6646  	// cond: rc >= 32
  6647  	// result: (MOVVconst [0])
  6648  	for {
  6649  		rc := auxIntToInt64(v.AuxInt)
  6650  		if v_0.Op != OpLOONG64MOVWUreg {
  6651  			break
  6652  		}
  6653  		if !(rc >= 32) {
  6654  			break
  6655  		}
  6656  		v.reset(OpLOONG64MOVVconst)
  6657  		v.AuxInt = int64ToAuxInt(0)
  6658  		return true
  6659  	}
  6660  	// match: (SRLVconst [rc] (MOVHUreg x))
  6661  	// cond: rc >= 16
  6662  	// result: (MOVVconst [0])
  6663  	for {
  6664  		rc := auxIntToInt64(v.AuxInt)
  6665  		if v_0.Op != OpLOONG64MOVHUreg {
  6666  			break
  6667  		}
  6668  		if !(rc >= 16) {
  6669  			break
  6670  		}
  6671  		v.reset(OpLOONG64MOVVconst)
  6672  		v.AuxInt = int64ToAuxInt(0)
  6673  		return true
  6674  	}
  6675  	// match: (SRLVconst [rc] (MOVBUreg x))
  6676  	// cond: rc >= 8
  6677  	// result: (MOVVconst [0])
  6678  	for {
  6679  		rc := auxIntToInt64(v.AuxInt)
  6680  		if v_0.Op != OpLOONG64MOVBUreg {
  6681  			break
  6682  		}
  6683  		if !(rc >= 8) {
  6684  			break
  6685  		}
  6686  		v.reset(OpLOONG64MOVVconst)
  6687  		v.AuxInt = int64ToAuxInt(0)
  6688  		return true
  6689  	}
  6690  	// match: (SRLVconst [c] (MOVVconst [d]))
  6691  	// result: (MOVVconst [int64(uint64(d)>>uint64(c))])
  6692  	for {
  6693  		c := auxIntToInt64(v.AuxInt)
  6694  		if v_0.Op != OpLOONG64MOVVconst {
  6695  			break
  6696  		}
  6697  		d := auxIntToInt64(v_0.AuxInt)
  6698  		v.reset(OpLOONG64MOVVconst)
  6699  		v.AuxInt = int64ToAuxInt(int64(uint64(d) >> uint64(c)))
  6700  		return true
  6701  	}
  6702  	return false
  6703  }
  6704  func rewriteValueLOONG64_OpLOONG64SUBD(v *Value) bool {
  6705  	v_1 := v.Args[1]
  6706  	v_0 := v.Args[0]
  6707  	// match: (SUBD (MULD x y) z)
  6708  	// cond: z.Block.Func.useFMA(v)
  6709  	// result: (FMSUBD x y z)
  6710  	for {
  6711  		if v_0.Op != OpLOONG64MULD {
  6712  			break
  6713  		}
  6714  		y := v_0.Args[1]
  6715  		x := v_0.Args[0]
  6716  		z := v_1
  6717  		if !(z.Block.Func.useFMA(v)) {
  6718  			break
  6719  		}
  6720  		v.reset(OpLOONG64FMSUBD)
  6721  		v.AddArg3(x, y, z)
  6722  		return true
  6723  	}
  6724  	// match: (SUBD z (MULD x y))
  6725  	// cond: z.Block.Func.useFMA(v)
  6726  	// result: (FNMSUBD x y z)
  6727  	for {
  6728  		z := v_0
  6729  		if v_1.Op != OpLOONG64MULD {
  6730  			break
  6731  		}
  6732  		y := v_1.Args[1]
  6733  		x := v_1.Args[0]
  6734  		if !(z.Block.Func.useFMA(v)) {
  6735  			break
  6736  		}
  6737  		v.reset(OpLOONG64FNMSUBD)
  6738  		v.AddArg3(x, y, z)
  6739  		return true
  6740  	}
  6741  	// match: (SUBD z (NEGD (MULD x y)))
  6742  	// cond: z.Block.Func.useFMA(v)
  6743  	// result: (FMADDD x y z)
  6744  	for {
  6745  		z := v_0
  6746  		if v_1.Op != OpLOONG64NEGD {
  6747  			break
  6748  		}
  6749  		v_1_0 := v_1.Args[0]
  6750  		if v_1_0.Op != OpLOONG64MULD {
  6751  			break
  6752  		}
  6753  		y := v_1_0.Args[1]
  6754  		x := v_1_0.Args[0]
  6755  		if !(z.Block.Func.useFMA(v)) {
  6756  			break
  6757  		}
  6758  		v.reset(OpLOONG64FMADDD)
  6759  		v.AddArg3(x, y, z)
  6760  		return true
  6761  	}
  6762  	// match: (SUBD (NEGD (MULD x y)) z)
  6763  	// cond: z.Block.Func.useFMA(v)
  6764  	// result: (FNMADDD x y z)
  6765  	for {
  6766  		if v_0.Op != OpLOONG64NEGD {
  6767  			break
  6768  		}
  6769  		v_0_0 := v_0.Args[0]
  6770  		if v_0_0.Op != OpLOONG64MULD {
  6771  			break
  6772  		}
  6773  		y := v_0_0.Args[1]
  6774  		x := v_0_0.Args[0]
  6775  		z := v_1
  6776  		if !(z.Block.Func.useFMA(v)) {
  6777  			break
  6778  		}
  6779  		v.reset(OpLOONG64FNMADDD)
  6780  		v.AddArg3(x, y, z)
  6781  		return true
  6782  	}
  6783  	return false
  6784  }
  6785  func rewriteValueLOONG64_OpLOONG64SUBF(v *Value) bool {
  6786  	v_1 := v.Args[1]
  6787  	v_0 := v.Args[0]
  6788  	// match: (SUBF (MULF x y) z)
  6789  	// cond: z.Block.Func.useFMA(v)
  6790  	// result: (FMSUBF x y z)
  6791  	for {
  6792  		if v_0.Op != OpLOONG64MULF {
  6793  			break
  6794  		}
  6795  		y := v_0.Args[1]
  6796  		x := v_0.Args[0]
  6797  		z := v_1
  6798  		if !(z.Block.Func.useFMA(v)) {
  6799  			break
  6800  		}
  6801  		v.reset(OpLOONG64FMSUBF)
  6802  		v.AddArg3(x, y, z)
  6803  		return true
  6804  	}
  6805  	// match: (SUBF z (MULF x y))
  6806  	// cond: z.Block.Func.useFMA(v)
  6807  	// result: (FNMSUBF x y z)
  6808  	for {
  6809  		z := v_0
  6810  		if v_1.Op != OpLOONG64MULF {
  6811  			break
  6812  		}
  6813  		y := v_1.Args[1]
  6814  		x := v_1.Args[0]
  6815  		if !(z.Block.Func.useFMA(v)) {
  6816  			break
  6817  		}
  6818  		v.reset(OpLOONG64FNMSUBF)
  6819  		v.AddArg3(x, y, z)
  6820  		return true
  6821  	}
  6822  	// match: (SUBF z (NEGF (MULF x y)))
  6823  	// cond: z.Block.Func.useFMA(v)
  6824  	// result: (FMADDF x y z)
  6825  	for {
  6826  		z := v_0
  6827  		if v_1.Op != OpLOONG64NEGF {
  6828  			break
  6829  		}
  6830  		v_1_0 := v_1.Args[0]
  6831  		if v_1_0.Op != OpLOONG64MULF {
  6832  			break
  6833  		}
  6834  		y := v_1_0.Args[1]
  6835  		x := v_1_0.Args[0]
  6836  		if !(z.Block.Func.useFMA(v)) {
  6837  			break
  6838  		}
  6839  		v.reset(OpLOONG64FMADDF)
  6840  		v.AddArg3(x, y, z)
  6841  		return true
  6842  	}
  6843  	// match: (SUBF (NEGF (MULF x y)) z)
  6844  	// cond: z.Block.Func.useFMA(v)
  6845  	// result: (FNMADDF x y z)
  6846  	for {
  6847  		if v_0.Op != OpLOONG64NEGF {
  6848  			break
  6849  		}
  6850  		v_0_0 := v_0.Args[0]
  6851  		if v_0_0.Op != OpLOONG64MULF {
  6852  			break
  6853  		}
  6854  		y := v_0_0.Args[1]
  6855  		x := v_0_0.Args[0]
  6856  		z := v_1
  6857  		if !(z.Block.Func.useFMA(v)) {
  6858  			break
  6859  		}
  6860  		v.reset(OpLOONG64FNMADDF)
  6861  		v.AddArg3(x, y, z)
  6862  		return true
  6863  	}
  6864  	return false
  6865  }
  6866  func rewriteValueLOONG64_OpLOONG64SUBV(v *Value) bool {
  6867  	v_1 := v.Args[1]
  6868  	v_0 := v.Args[0]
  6869  	// match: (SUBV x (MOVVconst [c]))
  6870  	// cond: is32Bit(c)
  6871  	// result: (SUBVconst [c] x)
  6872  	for {
  6873  		x := v_0
  6874  		if v_1.Op != OpLOONG64MOVVconst {
  6875  			break
  6876  		}
  6877  		c := auxIntToInt64(v_1.AuxInt)
  6878  		if !(is32Bit(c)) {
  6879  			break
  6880  		}
  6881  		v.reset(OpLOONG64SUBVconst)
  6882  		v.AuxInt = int64ToAuxInt(c)
  6883  		v.AddArg(x)
  6884  		return true
  6885  	}
  6886  	// match: (SUBV x (NEGV y))
  6887  	// result: (ADDV x y)
  6888  	for {
  6889  		x := v_0
  6890  		if v_1.Op != OpLOONG64NEGV {
  6891  			break
  6892  		}
  6893  		y := v_1.Args[0]
  6894  		v.reset(OpLOONG64ADDV)
  6895  		v.AddArg2(x, y)
  6896  		return true
  6897  	}
  6898  	// match: (SUBV x x)
  6899  	// result: (MOVVconst [0])
  6900  	for {
  6901  		x := v_0
  6902  		if x != v_1 {
  6903  			break
  6904  		}
  6905  		v.reset(OpLOONG64MOVVconst)
  6906  		v.AuxInt = int64ToAuxInt(0)
  6907  		return true
  6908  	}
  6909  	// match: (SUBV (MOVVconst [0]) x)
  6910  	// result: (NEGV x)
  6911  	for {
  6912  		if v_0.Op != OpLOONG64MOVVconst || auxIntToInt64(v_0.AuxInt) != 0 {
  6913  			break
  6914  		}
  6915  		x := v_1
  6916  		v.reset(OpLOONG64NEGV)
  6917  		v.AddArg(x)
  6918  		return true
  6919  	}
  6920  	// match: (SUBV (MOVVconst [c]) (NEGV (SUBVconst [d] x)))
  6921  	// result: (ADDVconst [c-d] x)
  6922  	for {
  6923  		if v_0.Op != OpLOONG64MOVVconst {
  6924  			break
  6925  		}
  6926  		c := auxIntToInt64(v_0.AuxInt)
  6927  		if v_1.Op != OpLOONG64NEGV {
  6928  			break
  6929  		}
  6930  		v_1_0 := v_1.Args[0]
  6931  		if v_1_0.Op != OpLOONG64SUBVconst {
  6932  			break
  6933  		}
  6934  		d := auxIntToInt64(v_1_0.AuxInt)
  6935  		x := v_1_0.Args[0]
  6936  		v.reset(OpLOONG64ADDVconst)
  6937  		v.AuxInt = int64ToAuxInt(c - d)
  6938  		v.AddArg(x)
  6939  		return true
  6940  	}
  6941  	return false
  6942  }
  6943  func rewriteValueLOONG64_OpLOONG64SUBVconst(v *Value) bool {
  6944  	v_0 := v.Args[0]
  6945  	// match: (SUBVconst [0] x)
  6946  	// result: x
  6947  	for {
  6948  		if auxIntToInt64(v.AuxInt) != 0 {
  6949  			break
  6950  		}
  6951  		x := v_0
  6952  		v.copyOf(x)
  6953  		return true
  6954  	}
  6955  	// match: (SUBVconst [c] (MOVVconst [d]))
  6956  	// result: (MOVVconst [d-c])
  6957  	for {
  6958  		c := auxIntToInt64(v.AuxInt)
  6959  		if v_0.Op != OpLOONG64MOVVconst {
  6960  			break
  6961  		}
  6962  		d := auxIntToInt64(v_0.AuxInt)
  6963  		v.reset(OpLOONG64MOVVconst)
  6964  		v.AuxInt = int64ToAuxInt(d - c)
  6965  		return true
  6966  	}
  6967  	// match: (SUBVconst [c] (SUBVconst [d] x))
  6968  	// cond: is32Bit(-c-d)
  6969  	// result: (ADDVconst [-c-d] x)
  6970  	for {
  6971  		c := auxIntToInt64(v.AuxInt)
  6972  		if v_0.Op != OpLOONG64SUBVconst {
  6973  			break
  6974  		}
  6975  		d := auxIntToInt64(v_0.AuxInt)
  6976  		x := v_0.Args[0]
  6977  		if !(is32Bit(-c - d)) {
  6978  			break
  6979  		}
  6980  		v.reset(OpLOONG64ADDVconst)
  6981  		v.AuxInt = int64ToAuxInt(-c - d)
  6982  		v.AddArg(x)
  6983  		return true
  6984  	}
  6985  	// match: (SUBVconst [c] (ADDVconst [d] x))
  6986  	// cond: is32Bit(-c+d)
  6987  	// result: (ADDVconst [-c+d] x)
  6988  	for {
  6989  		c := auxIntToInt64(v.AuxInt)
  6990  		if v_0.Op != OpLOONG64ADDVconst {
  6991  			break
  6992  		}
  6993  		d := auxIntToInt64(v_0.AuxInt)
  6994  		x := v_0.Args[0]
  6995  		if !(is32Bit(-c + d)) {
  6996  			break
  6997  		}
  6998  		v.reset(OpLOONG64ADDVconst)
  6999  		v.AuxInt = int64ToAuxInt(-c + d)
  7000  		v.AddArg(x)
  7001  		return true
  7002  	}
  7003  	return false
  7004  }
  7005  func rewriteValueLOONG64_OpLOONG64XOR(v *Value) bool {
  7006  	v_1 := v.Args[1]
  7007  	v_0 := v.Args[0]
  7008  	// match: (XOR x (MOVVconst [c]))
  7009  	// cond: is32Bit(c)
  7010  	// result: (XORconst [c] x)
  7011  	for {
  7012  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7013  			x := v_0
  7014  			if v_1.Op != OpLOONG64MOVVconst {
  7015  				continue
  7016  			}
  7017  			c := auxIntToInt64(v_1.AuxInt)
  7018  			if !(is32Bit(c)) {
  7019  				continue
  7020  			}
  7021  			v.reset(OpLOONG64XORconst)
  7022  			v.AuxInt = int64ToAuxInt(c)
  7023  			v.AddArg(x)
  7024  			return true
  7025  		}
  7026  		break
  7027  	}
  7028  	// match: (XOR x x)
  7029  	// result: (MOVVconst [0])
  7030  	for {
  7031  		x := v_0
  7032  		if x != v_1 {
  7033  			break
  7034  		}
  7035  		v.reset(OpLOONG64MOVVconst)
  7036  		v.AuxInt = int64ToAuxInt(0)
  7037  		return true
  7038  	}
  7039  	return false
  7040  }
  7041  func rewriteValueLOONG64_OpLOONG64XORconst(v *Value) bool {
  7042  	v_0 := v.Args[0]
  7043  	// match: (XORconst [0] x)
  7044  	// result: x
  7045  	for {
  7046  		if auxIntToInt64(v.AuxInt) != 0 {
  7047  			break
  7048  		}
  7049  		x := v_0
  7050  		v.copyOf(x)
  7051  		return true
  7052  	}
  7053  	// match: (XORconst [-1] x)
  7054  	// result: (NORconst [0] x)
  7055  	for {
  7056  		if auxIntToInt64(v.AuxInt) != -1 {
  7057  			break
  7058  		}
  7059  		x := v_0
  7060  		v.reset(OpLOONG64NORconst)
  7061  		v.AuxInt = int64ToAuxInt(0)
  7062  		v.AddArg(x)
  7063  		return true
  7064  	}
  7065  	// match: (XORconst [c] (MOVVconst [d]))
  7066  	// result: (MOVVconst [c^d])
  7067  	for {
  7068  		c := auxIntToInt64(v.AuxInt)
  7069  		if v_0.Op != OpLOONG64MOVVconst {
  7070  			break
  7071  		}
  7072  		d := auxIntToInt64(v_0.AuxInt)
  7073  		v.reset(OpLOONG64MOVVconst)
  7074  		v.AuxInt = int64ToAuxInt(c ^ d)
  7075  		return true
  7076  	}
  7077  	// match: (XORconst [c] (XORconst [d] x))
  7078  	// cond: is32Bit(c^d)
  7079  	// result: (XORconst [c^d] x)
  7080  	for {
  7081  		c := auxIntToInt64(v.AuxInt)
  7082  		if v_0.Op != OpLOONG64XORconst {
  7083  			break
  7084  		}
  7085  		d := auxIntToInt64(v_0.AuxInt)
  7086  		x := v_0.Args[0]
  7087  		if !(is32Bit(c ^ d)) {
  7088  			break
  7089  		}
  7090  		v.reset(OpLOONG64XORconst)
  7091  		v.AuxInt = int64ToAuxInt(c ^ d)
  7092  		v.AddArg(x)
  7093  		return true
  7094  	}
  7095  	return false
  7096  }
  7097  func rewriteValueLOONG64_OpLeq16(v *Value) bool {
  7098  	v_1 := v.Args[1]
  7099  	v_0 := v.Args[0]
  7100  	b := v.Block
  7101  	typ := &b.Func.Config.Types
  7102  	// match: (Leq16 x y)
  7103  	// result: (XOR (MOVVconst [1]) (SGT (SignExt16to64 x) (SignExt16to64 y)))
  7104  	for {
  7105  		x := v_0
  7106  		y := v_1
  7107  		v.reset(OpLOONG64XOR)
  7108  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  7109  		v0.AuxInt = int64ToAuxInt(1)
  7110  		v1 := b.NewValue0(v.Pos, OpLOONG64SGT, typ.Bool)
  7111  		v2 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  7112  		v2.AddArg(x)
  7113  		v3 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  7114  		v3.AddArg(y)
  7115  		v1.AddArg2(v2, v3)
  7116  		v.AddArg2(v0, v1)
  7117  		return true
  7118  	}
  7119  }
  7120  func rewriteValueLOONG64_OpLeq16U(v *Value) bool {
  7121  	v_1 := v.Args[1]
  7122  	v_0 := v.Args[0]
  7123  	b := v.Block
  7124  	typ := &b.Func.Config.Types
  7125  	// match: (Leq16U x y)
  7126  	// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt16to64 x) (ZeroExt16to64 y)))
  7127  	for {
  7128  		x := v_0
  7129  		y := v_1
  7130  		v.reset(OpLOONG64XOR)
  7131  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  7132  		v0.AuxInt = int64ToAuxInt(1)
  7133  		v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  7134  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7135  		v2.AddArg(x)
  7136  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7137  		v3.AddArg(y)
  7138  		v1.AddArg2(v2, v3)
  7139  		v.AddArg2(v0, v1)
  7140  		return true
  7141  	}
  7142  }
  7143  func rewriteValueLOONG64_OpLeq32(v *Value) bool {
  7144  	v_1 := v.Args[1]
  7145  	v_0 := v.Args[0]
  7146  	b := v.Block
  7147  	typ := &b.Func.Config.Types
  7148  	// match: (Leq32 x y)
  7149  	// result: (XOR (MOVVconst [1]) (SGT (SignExt32to64 x) (SignExt32to64 y)))
  7150  	for {
  7151  		x := v_0
  7152  		y := v_1
  7153  		v.reset(OpLOONG64XOR)
  7154  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  7155  		v0.AuxInt = int64ToAuxInt(1)
  7156  		v1 := b.NewValue0(v.Pos, OpLOONG64SGT, typ.Bool)
  7157  		v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  7158  		v2.AddArg(x)
  7159  		v3 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  7160  		v3.AddArg(y)
  7161  		v1.AddArg2(v2, v3)
  7162  		v.AddArg2(v0, v1)
  7163  		return true
  7164  	}
  7165  }
  7166  func rewriteValueLOONG64_OpLeq32F(v *Value) bool {
  7167  	v_1 := v.Args[1]
  7168  	v_0 := v.Args[0]
  7169  	b := v.Block
  7170  	// match: (Leq32F x y)
  7171  	// result: (FPFlagTrue (CMPGEF y x))
  7172  	for {
  7173  		x := v_0
  7174  		y := v_1
  7175  		v.reset(OpLOONG64FPFlagTrue)
  7176  		v0 := b.NewValue0(v.Pos, OpLOONG64CMPGEF, types.TypeFlags)
  7177  		v0.AddArg2(y, x)
  7178  		v.AddArg(v0)
  7179  		return true
  7180  	}
  7181  }
  7182  func rewriteValueLOONG64_OpLeq32U(v *Value) bool {
  7183  	v_1 := v.Args[1]
  7184  	v_0 := v.Args[0]
  7185  	b := v.Block
  7186  	typ := &b.Func.Config.Types
  7187  	// match: (Leq32U x y)
  7188  	// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt32to64 x) (ZeroExt32to64 y)))
  7189  	for {
  7190  		x := v_0
  7191  		y := v_1
  7192  		v.reset(OpLOONG64XOR)
  7193  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  7194  		v0.AuxInt = int64ToAuxInt(1)
  7195  		v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  7196  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7197  		v2.AddArg(x)
  7198  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7199  		v3.AddArg(y)
  7200  		v1.AddArg2(v2, v3)
  7201  		v.AddArg2(v0, v1)
  7202  		return true
  7203  	}
  7204  }
  7205  func rewriteValueLOONG64_OpLeq64(v *Value) bool {
  7206  	v_1 := v.Args[1]
  7207  	v_0 := v.Args[0]
  7208  	b := v.Block
  7209  	typ := &b.Func.Config.Types
  7210  	// match: (Leq64 x y)
  7211  	// result: (XOR (MOVVconst [1]) (SGT x y))
  7212  	for {
  7213  		x := v_0
  7214  		y := v_1
  7215  		v.reset(OpLOONG64XOR)
  7216  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  7217  		v0.AuxInt = int64ToAuxInt(1)
  7218  		v1 := b.NewValue0(v.Pos, OpLOONG64SGT, typ.Bool)
  7219  		v1.AddArg2(x, y)
  7220  		v.AddArg2(v0, v1)
  7221  		return true
  7222  	}
  7223  }
  7224  func rewriteValueLOONG64_OpLeq64F(v *Value) bool {
  7225  	v_1 := v.Args[1]
  7226  	v_0 := v.Args[0]
  7227  	b := v.Block
  7228  	// match: (Leq64F x y)
  7229  	// result: (FPFlagTrue (CMPGED y x))
  7230  	for {
  7231  		x := v_0
  7232  		y := v_1
  7233  		v.reset(OpLOONG64FPFlagTrue)
  7234  		v0 := b.NewValue0(v.Pos, OpLOONG64CMPGED, types.TypeFlags)
  7235  		v0.AddArg2(y, x)
  7236  		v.AddArg(v0)
  7237  		return true
  7238  	}
  7239  }
  7240  func rewriteValueLOONG64_OpLeq64U(v *Value) bool {
  7241  	v_1 := v.Args[1]
  7242  	v_0 := v.Args[0]
  7243  	b := v.Block
  7244  	typ := &b.Func.Config.Types
  7245  	// match: (Leq64U x y)
  7246  	// result: (XOR (MOVVconst [1]) (SGTU x y))
  7247  	for {
  7248  		x := v_0
  7249  		y := v_1
  7250  		v.reset(OpLOONG64XOR)
  7251  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  7252  		v0.AuxInt = int64ToAuxInt(1)
  7253  		v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  7254  		v1.AddArg2(x, y)
  7255  		v.AddArg2(v0, v1)
  7256  		return true
  7257  	}
  7258  }
  7259  func rewriteValueLOONG64_OpLeq8(v *Value) bool {
  7260  	v_1 := v.Args[1]
  7261  	v_0 := v.Args[0]
  7262  	b := v.Block
  7263  	typ := &b.Func.Config.Types
  7264  	// match: (Leq8 x y)
  7265  	// result: (XOR (MOVVconst [1]) (SGT (SignExt8to64 x) (SignExt8to64 y)))
  7266  	for {
  7267  		x := v_0
  7268  		y := v_1
  7269  		v.reset(OpLOONG64XOR)
  7270  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  7271  		v0.AuxInt = int64ToAuxInt(1)
  7272  		v1 := b.NewValue0(v.Pos, OpLOONG64SGT, typ.Bool)
  7273  		v2 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  7274  		v2.AddArg(x)
  7275  		v3 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  7276  		v3.AddArg(y)
  7277  		v1.AddArg2(v2, v3)
  7278  		v.AddArg2(v0, v1)
  7279  		return true
  7280  	}
  7281  }
  7282  func rewriteValueLOONG64_OpLeq8U(v *Value) bool {
  7283  	v_1 := v.Args[1]
  7284  	v_0 := v.Args[0]
  7285  	b := v.Block
  7286  	typ := &b.Func.Config.Types
  7287  	// match: (Leq8U x y)
  7288  	// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt8to64 x) (ZeroExt8to64 y)))
  7289  	for {
  7290  		x := v_0
  7291  		y := v_1
  7292  		v.reset(OpLOONG64XOR)
  7293  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  7294  		v0.AuxInt = int64ToAuxInt(1)
  7295  		v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  7296  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7297  		v2.AddArg(x)
  7298  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7299  		v3.AddArg(y)
  7300  		v1.AddArg2(v2, v3)
  7301  		v.AddArg2(v0, v1)
  7302  		return true
  7303  	}
  7304  }
  7305  func rewriteValueLOONG64_OpLess16(v *Value) bool {
  7306  	v_1 := v.Args[1]
  7307  	v_0 := v.Args[0]
  7308  	b := v.Block
  7309  	typ := &b.Func.Config.Types
  7310  	// match: (Less16 x y)
  7311  	// result: (SGT (SignExt16to64 y) (SignExt16to64 x))
  7312  	for {
  7313  		x := v_0
  7314  		y := v_1
  7315  		v.reset(OpLOONG64SGT)
  7316  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  7317  		v0.AddArg(y)
  7318  		v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  7319  		v1.AddArg(x)
  7320  		v.AddArg2(v0, v1)
  7321  		return true
  7322  	}
  7323  }
  7324  func rewriteValueLOONG64_OpLess16U(v *Value) bool {
  7325  	v_1 := v.Args[1]
  7326  	v_0 := v.Args[0]
  7327  	b := v.Block
  7328  	typ := &b.Func.Config.Types
  7329  	// match: (Less16U x y)
  7330  	// result: (SGTU (ZeroExt16to64 y) (ZeroExt16to64 x))
  7331  	for {
  7332  		x := v_0
  7333  		y := v_1
  7334  		v.reset(OpLOONG64SGTU)
  7335  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7336  		v0.AddArg(y)
  7337  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7338  		v1.AddArg(x)
  7339  		v.AddArg2(v0, v1)
  7340  		return true
  7341  	}
  7342  }
  7343  func rewriteValueLOONG64_OpLess32(v *Value) bool {
  7344  	v_1 := v.Args[1]
  7345  	v_0 := v.Args[0]
  7346  	b := v.Block
  7347  	typ := &b.Func.Config.Types
  7348  	// match: (Less32 x y)
  7349  	// result: (SGT (SignExt32to64 y) (SignExt32to64 x))
  7350  	for {
  7351  		x := v_0
  7352  		y := v_1
  7353  		v.reset(OpLOONG64SGT)
  7354  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  7355  		v0.AddArg(y)
  7356  		v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  7357  		v1.AddArg(x)
  7358  		v.AddArg2(v0, v1)
  7359  		return true
  7360  	}
  7361  }
  7362  func rewriteValueLOONG64_OpLess32F(v *Value) bool {
  7363  	v_1 := v.Args[1]
  7364  	v_0 := v.Args[0]
  7365  	b := v.Block
  7366  	// match: (Less32F x y)
  7367  	// result: (FPFlagTrue (CMPGTF y x))
  7368  	for {
  7369  		x := v_0
  7370  		y := v_1
  7371  		v.reset(OpLOONG64FPFlagTrue)
  7372  		v0 := b.NewValue0(v.Pos, OpLOONG64CMPGTF, types.TypeFlags)
  7373  		v0.AddArg2(y, x)
  7374  		v.AddArg(v0)
  7375  		return true
  7376  	}
  7377  }
  7378  func rewriteValueLOONG64_OpLess32U(v *Value) bool {
  7379  	v_1 := v.Args[1]
  7380  	v_0 := v.Args[0]
  7381  	b := v.Block
  7382  	typ := &b.Func.Config.Types
  7383  	// match: (Less32U x y)
  7384  	// result: (SGTU (ZeroExt32to64 y) (ZeroExt32to64 x))
  7385  	for {
  7386  		x := v_0
  7387  		y := v_1
  7388  		v.reset(OpLOONG64SGTU)
  7389  		v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7390  		v0.AddArg(y)
  7391  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7392  		v1.AddArg(x)
  7393  		v.AddArg2(v0, v1)
  7394  		return true
  7395  	}
  7396  }
  7397  func rewriteValueLOONG64_OpLess64(v *Value) bool {
  7398  	v_1 := v.Args[1]
  7399  	v_0 := v.Args[0]
  7400  	// match: (Less64 x y)
  7401  	// result: (SGT y x)
  7402  	for {
  7403  		x := v_0
  7404  		y := v_1
  7405  		v.reset(OpLOONG64SGT)
  7406  		v.AddArg2(y, x)
  7407  		return true
  7408  	}
  7409  }
  7410  func rewriteValueLOONG64_OpLess64F(v *Value) bool {
  7411  	v_1 := v.Args[1]
  7412  	v_0 := v.Args[0]
  7413  	b := v.Block
  7414  	// match: (Less64F x y)
  7415  	// result: (FPFlagTrue (CMPGTD y x))
  7416  	for {
  7417  		x := v_0
  7418  		y := v_1
  7419  		v.reset(OpLOONG64FPFlagTrue)
  7420  		v0 := b.NewValue0(v.Pos, OpLOONG64CMPGTD, types.TypeFlags)
  7421  		v0.AddArg2(y, x)
  7422  		v.AddArg(v0)
  7423  		return true
  7424  	}
  7425  }
  7426  func rewriteValueLOONG64_OpLess64U(v *Value) bool {
  7427  	v_1 := v.Args[1]
  7428  	v_0 := v.Args[0]
  7429  	// match: (Less64U x y)
  7430  	// result: (SGTU y x)
  7431  	for {
  7432  		x := v_0
  7433  		y := v_1
  7434  		v.reset(OpLOONG64SGTU)
  7435  		v.AddArg2(y, x)
  7436  		return true
  7437  	}
  7438  }
  7439  func rewriteValueLOONG64_OpLess8(v *Value) bool {
  7440  	v_1 := v.Args[1]
  7441  	v_0 := v.Args[0]
  7442  	b := v.Block
  7443  	typ := &b.Func.Config.Types
  7444  	// match: (Less8 x y)
  7445  	// result: (SGT (SignExt8to64 y) (SignExt8to64 x))
  7446  	for {
  7447  		x := v_0
  7448  		y := v_1
  7449  		v.reset(OpLOONG64SGT)
  7450  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  7451  		v0.AddArg(y)
  7452  		v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  7453  		v1.AddArg(x)
  7454  		v.AddArg2(v0, v1)
  7455  		return true
  7456  	}
  7457  }
  7458  func rewriteValueLOONG64_OpLess8U(v *Value) bool {
  7459  	v_1 := v.Args[1]
  7460  	v_0 := v.Args[0]
  7461  	b := v.Block
  7462  	typ := &b.Func.Config.Types
  7463  	// match: (Less8U x y)
  7464  	// result: (SGTU (ZeroExt8to64 y) (ZeroExt8to64 x))
  7465  	for {
  7466  		x := v_0
  7467  		y := v_1
  7468  		v.reset(OpLOONG64SGTU)
  7469  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7470  		v0.AddArg(y)
  7471  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7472  		v1.AddArg(x)
  7473  		v.AddArg2(v0, v1)
  7474  		return true
  7475  	}
  7476  }
  7477  func rewriteValueLOONG64_OpLoad(v *Value) bool {
  7478  	v_1 := v.Args[1]
  7479  	v_0 := v.Args[0]
  7480  	// match: (Load <t> ptr mem)
  7481  	// cond: t.IsBoolean()
  7482  	// result: (MOVBUload ptr mem)
  7483  	for {
  7484  		t := v.Type
  7485  		ptr := v_0
  7486  		mem := v_1
  7487  		if !(t.IsBoolean()) {
  7488  			break
  7489  		}
  7490  		v.reset(OpLOONG64MOVBUload)
  7491  		v.AddArg2(ptr, mem)
  7492  		return true
  7493  	}
  7494  	// match: (Load <t> ptr mem)
  7495  	// cond: (is8BitInt(t) && t.IsSigned())
  7496  	// result: (MOVBload ptr mem)
  7497  	for {
  7498  		t := v.Type
  7499  		ptr := v_0
  7500  		mem := v_1
  7501  		if !(is8BitInt(t) && t.IsSigned()) {
  7502  			break
  7503  		}
  7504  		v.reset(OpLOONG64MOVBload)
  7505  		v.AddArg2(ptr, mem)
  7506  		return true
  7507  	}
  7508  	// match: (Load <t> ptr mem)
  7509  	// cond: (is8BitInt(t) && !t.IsSigned())
  7510  	// result: (MOVBUload ptr mem)
  7511  	for {
  7512  		t := v.Type
  7513  		ptr := v_0
  7514  		mem := v_1
  7515  		if !(is8BitInt(t) && !t.IsSigned()) {
  7516  			break
  7517  		}
  7518  		v.reset(OpLOONG64MOVBUload)
  7519  		v.AddArg2(ptr, mem)
  7520  		return true
  7521  	}
  7522  	// match: (Load <t> ptr mem)
  7523  	// cond: (is16BitInt(t) && t.IsSigned())
  7524  	// result: (MOVHload ptr mem)
  7525  	for {
  7526  		t := v.Type
  7527  		ptr := v_0
  7528  		mem := v_1
  7529  		if !(is16BitInt(t) && t.IsSigned()) {
  7530  			break
  7531  		}
  7532  		v.reset(OpLOONG64MOVHload)
  7533  		v.AddArg2(ptr, mem)
  7534  		return true
  7535  	}
  7536  	// match: (Load <t> ptr mem)
  7537  	// cond: (is16BitInt(t) && !t.IsSigned())
  7538  	// result: (MOVHUload ptr mem)
  7539  	for {
  7540  		t := v.Type
  7541  		ptr := v_0
  7542  		mem := v_1
  7543  		if !(is16BitInt(t) && !t.IsSigned()) {
  7544  			break
  7545  		}
  7546  		v.reset(OpLOONG64MOVHUload)
  7547  		v.AddArg2(ptr, mem)
  7548  		return true
  7549  	}
  7550  	// match: (Load <t> ptr mem)
  7551  	// cond: (is32BitInt(t) && t.IsSigned())
  7552  	// result: (MOVWload ptr mem)
  7553  	for {
  7554  		t := v.Type
  7555  		ptr := v_0
  7556  		mem := v_1
  7557  		if !(is32BitInt(t) && t.IsSigned()) {
  7558  			break
  7559  		}
  7560  		v.reset(OpLOONG64MOVWload)
  7561  		v.AddArg2(ptr, mem)
  7562  		return true
  7563  	}
  7564  	// match: (Load <t> ptr mem)
  7565  	// cond: (is32BitInt(t) && !t.IsSigned())
  7566  	// result: (MOVWUload ptr mem)
  7567  	for {
  7568  		t := v.Type
  7569  		ptr := v_0
  7570  		mem := v_1
  7571  		if !(is32BitInt(t) && !t.IsSigned()) {
  7572  			break
  7573  		}
  7574  		v.reset(OpLOONG64MOVWUload)
  7575  		v.AddArg2(ptr, mem)
  7576  		return true
  7577  	}
  7578  	// match: (Load <t> ptr mem)
  7579  	// cond: (is64BitInt(t) || isPtr(t))
  7580  	// result: (MOVVload ptr mem)
  7581  	for {
  7582  		t := v.Type
  7583  		ptr := v_0
  7584  		mem := v_1
  7585  		if !(is64BitInt(t) || isPtr(t)) {
  7586  			break
  7587  		}
  7588  		v.reset(OpLOONG64MOVVload)
  7589  		v.AddArg2(ptr, mem)
  7590  		return true
  7591  	}
  7592  	// match: (Load <t> ptr mem)
  7593  	// cond: is32BitFloat(t)
  7594  	// result: (MOVFload ptr mem)
  7595  	for {
  7596  		t := v.Type
  7597  		ptr := v_0
  7598  		mem := v_1
  7599  		if !(is32BitFloat(t)) {
  7600  			break
  7601  		}
  7602  		v.reset(OpLOONG64MOVFload)
  7603  		v.AddArg2(ptr, mem)
  7604  		return true
  7605  	}
  7606  	// match: (Load <t> ptr mem)
  7607  	// cond: is64BitFloat(t)
  7608  	// result: (MOVDload ptr mem)
  7609  	for {
  7610  		t := v.Type
  7611  		ptr := v_0
  7612  		mem := v_1
  7613  		if !(is64BitFloat(t)) {
  7614  			break
  7615  		}
  7616  		v.reset(OpLOONG64MOVDload)
  7617  		v.AddArg2(ptr, mem)
  7618  		return true
  7619  	}
  7620  	return false
  7621  }
  7622  func rewriteValueLOONG64_OpLocalAddr(v *Value) bool {
  7623  	v_1 := v.Args[1]
  7624  	v_0 := v.Args[0]
  7625  	b := v.Block
  7626  	typ := &b.Func.Config.Types
  7627  	// match: (LocalAddr <t> {sym} base mem)
  7628  	// cond: t.Elem().HasPointers()
  7629  	// result: (MOVVaddr {sym} (SPanchored base mem))
  7630  	for {
  7631  		t := v.Type
  7632  		sym := auxToSym(v.Aux)
  7633  		base := v_0
  7634  		mem := v_1
  7635  		if !(t.Elem().HasPointers()) {
  7636  			break
  7637  		}
  7638  		v.reset(OpLOONG64MOVVaddr)
  7639  		v.Aux = symToAux(sym)
  7640  		v0 := b.NewValue0(v.Pos, OpSPanchored, typ.Uintptr)
  7641  		v0.AddArg2(base, mem)
  7642  		v.AddArg(v0)
  7643  		return true
  7644  	}
  7645  	// match: (LocalAddr <t> {sym} base _)
  7646  	// cond: !t.Elem().HasPointers()
  7647  	// result: (MOVVaddr {sym} base)
  7648  	for {
  7649  		t := v.Type
  7650  		sym := auxToSym(v.Aux)
  7651  		base := v_0
  7652  		if !(!t.Elem().HasPointers()) {
  7653  			break
  7654  		}
  7655  		v.reset(OpLOONG64MOVVaddr)
  7656  		v.Aux = symToAux(sym)
  7657  		v.AddArg(base)
  7658  		return true
  7659  	}
  7660  	return false
  7661  }
  7662  func rewriteValueLOONG64_OpLsh16x16(v *Value) bool {
  7663  	v_1 := v.Args[1]
  7664  	v_0 := v.Args[0]
  7665  	b := v.Block
  7666  	typ := &b.Func.Config.Types
  7667  	// match: (Lsh16x16 x y)
  7668  	// cond: shiftIsBounded(v)
  7669  	// result: (SLLV x y)
  7670  	for {
  7671  		x := v_0
  7672  		y := v_1
  7673  		if !(shiftIsBounded(v)) {
  7674  			break
  7675  		}
  7676  		v.reset(OpLOONG64SLLV)
  7677  		v.AddArg2(x, y)
  7678  		return true
  7679  	}
  7680  	// match: (Lsh16x16 <t> x y)
  7681  	// cond: !shiftIsBounded(v)
  7682  	// result: (MASKEQZ (SLLV <t> x (ZeroExt16to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y)))
  7683  	for {
  7684  		t := v.Type
  7685  		x := v_0
  7686  		y := v_1
  7687  		if !(!shiftIsBounded(v)) {
  7688  			break
  7689  		}
  7690  		v.reset(OpLOONG64MASKEQZ)
  7691  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  7692  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7693  		v1.AddArg(y)
  7694  		v0.AddArg2(x, v1)
  7695  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  7696  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  7697  		v3.AuxInt = int64ToAuxInt(64)
  7698  		v2.AddArg2(v3, v1)
  7699  		v.AddArg2(v0, v2)
  7700  		return true
  7701  	}
  7702  	return false
  7703  }
  7704  func rewriteValueLOONG64_OpLsh16x32(v *Value) bool {
  7705  	v_1 := v.Args[1]
  7706  	v_0 := v.Args[0]
  7707  	b := v.Block
  7708  	typ := &b.Func.Config.Types
  7709  	// match: (Lsh16x32 x y)
  7710  	// cond: shiftIsBounded(v)
  7711  	// result: (SLLV x y)
  7712  	for {
  7713  		x := v_0
  7714  		y := v_1
  7715  		if !(shiftIsBounded(v)) {
  7716  			break
  7717  		}
  7718  		v.reset(OpLOONG64SLLV)
  7719  		v.AddArg2(x, y)
  7720  		return true
  7721  	}
  7722  	// match: (Lsh16x32 <t> x y)
  7723  	// cond: !shiftIsBounded(v)
  7724  	// result: (MASKEQZ (SLLV <t> x (ZeroExt32to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y)))
  7725  	for {
  7726  		t := v.Type
  7727  		x := v_0
  7728  		y := v_1
  7729  		if !(!shiftIsBounded(v)) {
  7730  			break
  7731  		}
  7732  		v.reset(OpLOONG64MASKEQZ)
  7733  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  7734  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7735  		v1.AddArg(y)
  7736  		v0.AddArg2(x, v1)
  7737  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  7738  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  7739  		v3.AuxInt = int64ToAuxInt(64)
  7740  		v2.AddArg2(v3, v1)
  7741  		v.AddArg2(v0, v2)
  7742  		return true
  7743  	}
  7744  	return false
  7745  }
  7746  func rewriteValueLOONG64_OpLsh16x64(v *Value) bool {
  7747  	v_1 := v.Args[1]
  7748  	v_0 := v.Args[0]
  7749  	b := v.Block
  7750  	typ := &b.Func.Config.Types
  7751  	// match: (Lsh16x64 x y)
  7752  	// cond: shiftIsBounded(v)
  7753  	// result: (SLLV x y)
  7754  	for {
  7755  		x := v_0
  7756  		y := v_1
  7757  		if !(shiftIsBounded(v)) {
  7758  			break
  7759  		}
  7760  		v.reset(OpLOONG64SLLV)
  7761  		v.AddArg2(x, y)
  7762  		return true
  7763  	}
  7764  	// match: (Lsh16x64 <t> x y)
  7765  	// cond: !shiftIsBounded(v)
  7766  	// result: (MASKEQZ (SLLV <t> x y) (SGTU (MOVVconst <typ.UInt64> [64]) y))
  7767  	for {
  7768  		t := v.Type
  7769  		x := v_0
  7770  		y := v_1
  7771  		if !(!shiftIsBounded(v)) {
  7772  			break
  7773  		}
  7774  		v.reset(OpLOONG64MASKEQZ)
  7775  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  7776  		v0.AddArg2(x, y)
  7777  		v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  7778  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  7779  		v2.AuxInt = int64ToAuxInt(64)
  7780  		v1.AddArg2(v2, y)
  7781  		v.AddArg2(v0, v1)
  7782  		return true
  7783  	}
  7784  	return false
  7785  }
  7786  func rewriteValueLOONG64_OpLsh16x8(v *Value) bool {
  7787  	v_1 := v.Args[1]
  7788  	v_0 := v.Args[0]
  7789  	b := v.Block
  7790  	typ := &b.Func.Config.Types
  7791  	// match: (Lsh16x8 x y)
  7792  	// cond: shiftIsBounded(v)
  7793  	// result: (SLLV x y)
  7794  	for {
  7795  		x := v_0
  7796  		y := v_1
  7797  		if !(shiftIsBounded(v)) {
  7798  			break
  7799  		}
  7800  		v.reset(OpLOONG64SLLV)
  7801  		v.AddArg2(x, y)
  7802  		return true
  7803  	}
  7804  	// match: (Lsh16x8 <t> x y)
  7805  	// cond: !shiftIsBounded(v)
  7806  	// result: (MASKEQZ (SLLV <t> x (ZeroExt8to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y)))
  7807  	for {
  7808  		t := v.Type
  7809  		x := v_0
  7810  		y := v_1
  7811  		if !(!shiftIsBounded(v)) {
  7812  			break
  7813  		}
  7814  		v.reset(OpLOONG64MASKEQZ)
  7815  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  7816  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7817  		v1.AddArg(y)
  7818  		v0.AddArg2(x, v1)
  7819  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  7820  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  7821  		v3.AuxInt = int64ToAuxInt(64)
  7822  		v2.AddArg2(v3, v1)
  7823  		v.AddArg2(v0, v2)
  7824  		return true
  7825  	}
  7826  	return false
  7827  }
  7828  func rewriteValueLOONG64_OpLsh32x16(v *Value) bool {
  7829  	v_1 := v.Args[1]
  7830  	v_0 := v.Args[0]
  7831  	b := v.Block
  7832  	typ := &b.Func.Config.Types
  7833  	// match: (Lsh32x16 x y)
  7834  	// cond: shiftIsBounded(v)
  7835  	// result: (SLL x y)
  7836  	for {
  7837  		x := v_0
  7838  		y := v_1
  7839  		if !(shiftIsBounded(v)) {
  7840  			break
  7841  		}
  7842  		v.reset(OpLOONG64SLL)
  7843  		v.AddArg2(x, y)
  7844  		return true
  7845  	}
  7846  	// match: (Lsh32x16 <t> x y)
  7847  	// cond: !shiftIsBounded(v)
  7848  	// result: (MASKEQZ (SLL <t> x (ZeroExt16to64 y)) (SGTU (MOVVconst <typ.UInt64> [32]) (ZeroExt16to64 y)))
  7849  	for {
  7850  		t := v.Type
  7851  		x := v_0
  7852  		y := v_1
  7853  		if !(!shiftIsBounded(v)) {
  7854  			break
  7855  		}
  7856  		v.reset(OpLOONG64MASKEQZ)
  7857  		v0 := b.NewValue0(v.Pos, OpLOONG64SLL, t)
  7858  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7859  		v1.AddArg(y)
  7860  		v0.AddArg2(x, v1)
  7861  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  7862  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  7863  		v3.AuxInt = int64ToAuxInt(32)
  7864  		v2.AddArg2(v3, v1)
  7865  		v.AddArg2(v0, v2)
  7866  		return true
  7867  	}
  7868  	return false
  7869  }
  7870  func rewriteValueLOONG64_OpLsh32x32(v *Value) bool {
  7871  	v_1 := v.Args[1]
  7872  	v_0 := v.Args[0]
  7873  	b := v.Block
  7874  	typ := &b.Func.Config.Types
  7875  	// match: (Lsh32x32 x y)
  7876  	// cond: shiftIsBounded(v)
  7877  	// result: (SLL x y)
  7878  	for {
  7879  		x := v_0
  7880  		y := v_1
  7881  		if !(shiftIsBounded(v)) {
  7882  			break
  7883  		}
  7884  		v.reset(OpLOONG64SLL)
  7885  		v.AddArg2(x, y)
  7886  		return true
  7887  	}
  7888  	// match: (Lsh32x32 <t> x y)
  7889  	// cond: !shiftIsBounded(v)
  7890  	// result: (MASKEQZ (SLL <t> x (ZeroExt32to64 y)) (SGTU (MOVVconst <typ.UInt64> [32]) (ZeroExt32to64 y)))
  7891  	for {
  7892  		t := v.Type
  7893  		x := v_0
  7894  		y := v_1
  7895  		if !(!shiftIsBounded(v)) {
  7896  			break
  7897  		}
  7898  		v.reset(OpLOONG64MASKEQZ)
  7899  		v0 := b.NewValue0(v.Pos, OpLOONG64SLL, t)
  7900  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7901  		v1.AddArg(y)
  7902  		v0.AddArg2(x, v1)
  7903  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  7904  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  7905  		v3.AuxInt = int64ToAuxInt(32)
  7906  		v2.AddArg2(v3, v1)
  7907  		v.AddArg2(v0, v2)
  7908  		return true
  7909  	}
  7910  	return false
  7911  }
  7912  func rewriteValueLOONG64_OpLsh32x64(v *Value) bool {
  7913  	v_1 := v.Args[1]
  7914  	v_0 := v.Args[0]
  7915  	b := v.Block
  7916  	typ := &b.Func.Config.Types
  7917  	// match: (Lsh32x64 x y)
  7918  	// cond: shiftIsBounded(v)
  7919  	// result: (SLL x y)
  7920  	for {
  7921  		x := v_0
  7922  		y := v_1
  7923  		if !(shiftIsBounded(v)) {
  7924  			break
  7925  		}
  7926  		v.reset(OpLOONG64SLL)
  7927  		v.AddArg2(x, y)
  7928  		return true
  7929  	}
  7930  	// match: (Lsh32x64 <t> x y)
  7931  	// cond: !shiftIsBounded(v)
  7932  	// result: (MASKEQZ (SLL <t> x y) (SGTU (MOVVconst <typ.UInt64> [32]) y))
  7933  	for {
  7934  		t := v.Type
  7935  		x := v_0
  7936  		y := v_1
  7937  		if !(!shiftIsBounded(v)) {
  7938  			break
  7939  		}
  7940  		v.reset(OpLOONG64MASKEQZ)
  7941  		v0 := b.NewValue0(v.Pos, OpLOONG64SLL, t)
  7942  		v0.AddArg2(x, y)
  7943  		v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  7944  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  7945  		v2.AuxInt = int64ToAuxInt(32)
  7946  		v1.AddArg2(v2, y)
  7947  		v.AddArg2(v0, v1)
  7948  		return true
  7949  	}
  7950  	return false
  7951  }
  7952  func rewriteValueLOONG64_OpLsh32x8(v *Value) bool {
  7953  	v_1 := v.Args[1]
  7954  	v_0 := v.Args[0]
  7955  	b := v.Block
  7956  	typ := &b.Func.Config.Types
  7957  	// match: (Lsh32x8 x y)
  7958  	// cond: shiftIsBounded(v)
  7959  	// result: (SLL x y)
  7960  	for {
  7961  		x := v_0
  7962  		y := v_1
  7963  		if !(shiftIsBounded(v)) {
  7964  			break
  7965  		}
  7966  		v.reset(OpLOONG64SLL)
  7967  		v.AddArg2(x, y)
  7968  		return true
  7969  	}
  7970  	// match: (Lsh32x8 <t> x y)
  7971  	// cond: !shiftIsBounded(v)
  7972  	// result: (MASKEQZ (SLL <t> x (ZeroExt8to64 y)) (SGTU (MOVVconst <typ.UInt64> [32]) (ZeroExt8to64 y)))
  7973  	for {
  7974  		t := v.Type
  7975  		x := v_0
  7976  		y := v_1
  7977  		if !(!shiftIsBounded(v)) {
  7978  			break
  7979  		}
  7980  		v.reset(OpLOONG64MASKEQZ)
  7981  		v0 := b.NewValue0(v.Pos, OpLOONG64SLL, t)
  7982  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7983  		v1.AddArg(y)
  7984  		v0.AddArg2(x, v1)
  7985  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  7986  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  7987  		v3.AuxInt = int64ToAuxInt(32)
  7988  		v2.AddArg2(v3, v1)
  7989  		v.AddArg2(v0, v2)
  7990  		return true
  7991  	}
  7992  	return false
  7993  }
  7994  func rewriteValueLOONG64_OpLsh64x16(v *Value) bool {
  7995  	v_1 := v.Args[1]
  7996  	v_0 := v.Args[0]
  7997  	b := v.Block
  7998  	typ := &b.Func.Config.Types
  7999  	// match: (Lsh64x16 x y)
  8000  	// cond: shiftIsBounded(v)
  8001  	// result: (SLLV x y)
  8002  	for {
  8003  		x := v_0
  8004  		y := v_1
  8005  		if !(shiftIsBounded(v)) {
  8006  			break
  8007  		}
  8008  		v.reset(OpLOONG64SLLV)
  8009  		v.AddArg2(x, y)
  8010  		return true
  8011  	}
  8012  	// match: (Lsh64x16 <t> x y)
  8013  	// cond: !shiftIsBounded(v)
  8014  	// result: (MASKEQZ (SLLV <t> x (ZeroExt16to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y)))
  8015  	for {
  8016  		t := v.Type
  8017  		x := v_0
  8018  		y := v_1
  8019  		if !(!shiftIsBounded(v)) {
  8020  			break
  8021  		}
  8022  		v.reset(OpLOONG64MASKEQZ)
  8023  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  8024  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8025  		v1.AddArg(y)
  8026  		v0.AddArg2(x, v1)
  8027  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  8028  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  8029  		v3.AuxInt = int64ToAuxInt(64)
  8030  		v2.AddArg2(v3, v1)
  8031  		v.AddArg2(v0, v2)
  8032  		return true
  8033  	}
  8034  	return false
  8035  }
  8036  func rewriteValueLOONG64_OpLsh64x32(v *Value) bool {
  8037  	v_1 := v.Args[1]
  8038  	v_0 := v.Args[0]
  8039  	b := v.Block
  8040  	typ := &b.Func.Config.Types
  8041  	// match: (Lsh64x32 x y)
  8042  	// cond: shiftIsBounded(v)
  8043  	// result: (SLLV x y)
  8044  	for {
  8045  		x := v_0
  8046  		y := v_1
  8047  		if !(shiftIsBounded(v)) {
  8048  			break
  8049  		}
  8050  		v.reset(OpLOONG64SLLV)
  8051  		v.AddArg2(x, y)
  8052  		return true
  8053  	}
  8054  	// match: (Lsh64x32 <t> x y)
  8055  	// cond: !shiftIsBounded(v)
  8056  	// result: (MASKEQZ (SLLV <t> x (ZeroExt32to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y)))
  8057  	for {
  8058  		t := v.Type
  8059  		x := v_0
  8060  		y := v_1
  8061  		if !(!shiftIsBounded(v)) {
  8062  			break
  8063  		}
  8064  		v.reset(OpLOONG64MASKEQZ)
  8065  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  8066  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8067  		v1.AddArg(y)
  8068  		v0.AddArg2(x, v1)
  8069  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  8070  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  8071  		v3.AuxInt = int64ToAuxInt(64)
  8072  		v2.AddArg2(v3, v1)
  8073  		v.AddArg2(v0, v2)
  8074  		return true
  8075  	}
  8076  	return false
  8077  }
  8078  func rewriteValueLOONG64_OpLsh64x64(v *Value) bool {
  8079  	v_1 := v.Args[1]
  8080  	v_0 := v.Args[0]
  8081  	b := v.Block
  8082  	typ := &b.Func.Config.Types
  8083  	// match: (Lsh64x64 x y)
  8084  	// cond: shiftIsBounded(v)
  8085  	// result: (SLLV x y)
  8086  	for {
  8087  		x := v_0
  8088  		y := v_1
  8089  		if !(shiftIsBounded(v)) {
  8090  			break
  8091  		}
  8092  		v.reset(OpLOONG64SLLV)
  8093  		v.AddArg2(x, y)
  8094  		return true
  8095  	}
  8096  	// match: (Lsh64x64 <t> x y)
  8097  	// cond: !shiftIsBounded(v)
  8098  	// result: (MASKEQZ (SLLV <t> x y) (SGTU (MOVVconst <typ.UInt64> [64]) y))
  8099  	for {
  8100  		t := v.Type
  8101  		x := v_0
  8102  		y := v_1
  8103  		if !(!shiftIsBounded(v)) {
  8104  			break
  8105  		}
  8106  		v.reset(OpLOONG64MASKEQZ)
  8107  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  8108  		v0.AddArg2(x, y)
  8109  		v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  8110  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  8111  		v2.AuxInt = int64ToAuxInt(64)
  8112  		v1.AddArg2(v2, y)
  8113  		v.AddArg2(v0, v1)
  8114  		return true
  8115  	}
  8116  	return false
  8117  }
  8118  func rewriteValueLOONG64_OpLsh64x8(v *Value) bool {
  8119  	v_1 := v.Args[1]
  8120  	v_0 := v.Args[0]
  8121  	b := v.Block
  8122  	typ := &b.Func.Config.Types
  8123  	// match: (Lsh64x8 x y)
  8124  	// cond: shiftIsBounded(v)
  8125  	// result: (SLLV x y)
  8126  	for {
  8127  		x := v_0
  8128  		y := v_1
  8129  		if !(shiftIsBounded(v)) {
  8130  			break
  8131  		}
  8132  		v.reset(OpLOONG64SLLV)
  8133  		v.AddArg2(x, y)
  8134  		return true
  8135  	}
  8136  	// match: (Lsh64x8 <t> x y)
  8137  	// cond: !shiftIsBounded(v)
  8138  	// result: (MASKEQZ (SLLV <t> x (ZeroExt8to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y)))
  8139  	for {
  8140  		t := v.Type
  8141  		x := v_0
  8142  		y := v_1
  8143  		if !(!shiftIsBounded(v)) {
  8144  			break
  8145  		}
  8146  		v.reset(OpLOONG64MASKEQZ)
  8147  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  8148  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8149  		v1.AddArg(y)
  8150  		v0.AddArg2(x, v1)
  8151  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  8152  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  8153  		v3.AuxInt = int64ToAuxInt(64)
  8154  		v2.AddArg2(v3, v1)
  8155  		v.AddArg2(v0, v2)
  8156  		return true
  8157  	}
  8158  	return false
  8159  }
  8160  func rewriteValueLOONG64_OpLsh8x16(v *Value) bool {
  8161  	v_1 := v.Args[1]
  8162  	v_0 := v.Args[0]
  8163  	b := v.Block
  8164  	typ := &b.Func.Config.Types
  8165  	// match: (Lsh8x16 x y)
  8166  	// cond: shiftIsBounded(v)
  8167  	// result: (SLLV x y)
  8168  	for {
  8169  		x := v_0
  8170  		y := v_1
  8171  		if !(shiftIsBounded(v)) {
  8172  			break
  8173  		}
  8174  		v.reset(OpLOONG64SLLV)
  8175  		v.AddArg2(x, y)
  8176  		return true
  8177  	}
  8178  	// match: (Lsh8x16 <t> x y)
  8179  	// cond: !shiftIsBounded(v)
  8180  	// result: (MASKEQZ (SLLV <t> x (ZeroExt16to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y)))
  8181  	for {
  8182  		t := v.Type
  8183  		x := v_0
  8184  		y := v_1
  8185  		if !(!shiftIsBounded(v)) {
  8186  			break
  8187  		}
  8188  		v.reset(OpLOONG64MASKEQZ)
  8189  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  8190  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8191  		v1.AddArg(y)
  8192  		v0.AddArg2(x, v1)
  8193  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  8194  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  8195  		v3.AuxInt = int64ToAuxInt(64)
  8196  		v2.AddArg2(v3, v1)
  8197  		v.AddArg2(v0, v2)
  8198  		return true
  8199  	}
  8200  	return false
  8201  }
  8202  func rewriteValueLOONG64_OpLsh8x32(v *Value) bool {
  8203  	v_1 := v.Args[1]
  8204  	v_0 := v.Args[0]
  8205  	b := v.Block
  8206  	typ := &b.Func.Config.Types
  8207  	// match: (Lsh8x32 x y)
  8208  	// cond: shiftIsBounded(v)
  8209  	// result: (SLLV x y)
  8210  	for {
  8211  		x := v_0
  8212  		y := v_1
  8213  		if !(shiftIsBounded(v)) {
  8214  			break
  8215  		}
  8216  		v.reset(OpLOONG64SLLV)
  8217  		v.AddArg2(x, y)
  8218  		return true
  8219  	}
  8220  	// match: (Lsh8x32 <t> x y)
  8221  	// cond: !shiftIsBounded(v)
  8222  	// result: (MASKEQZ (SLLV <t> x (ZeroExt32to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y)))
  8223  	for {
  8224  		t := v.Type
  8225  		x := v_0
  8226  		y := v_1
  8227  		if !(!shiftIsBounded(v)) {
  8228  			break
  8229  		}
  8230  		v.reset(OpLOONG64MASKEQZ)
  8231  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  8232  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8233  		v1.AddArg(y)
  8234  		v0.AddArg2(x, v1)
  8235  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  8236  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  8237  		v3.AuxInt = int64ToAuxInt(64)
  8238  		v2.AddArg2(v3, v1)
  8239  		v.AddArg2(v0, v2)
  8240  		return true
  8241  	}
  8242  	return false
  8243  }
  8244  func rewriteValueLOONG64_OpLsh8x64(v *Value) bool {
  8245  	v_1 := v.Args[1]
  8246  	v_0 := v.Args[0]
  8247  	b := v.Block
  8248  	typ := &b.Func.Config.Types
  8249  	// match: (Lsh8x64 x y)
  8250  	// cond: shiftIsBounded(v)
  8251  	// result: (SLLV x y)
  8252  	for {
  8253  		x := v_0
  8254  		y := v_1
  8255  		if !(shiftIsBounded(v)) {
  8256  			break
  8257  		}
  8258  		v.reset(OpLOONG64SLLV)
  8259  		v.AddArg2(x, y)
  8260  		return true
  8261  	}
  8262  	// match: (Lsh8x64 <t> x y)
  8263  	// cond: !shiftIsBounded(v)
  8264  	// result: (MASKEQZ (SLLV <t> x y) (SGTU (MOVVconst <typ.UInt64> [64]) y))
  8265  	for {
  8266  		t := v.Type
  8267  		x := v_0
  8268  		y := v_1
  8269  		if !(!shiftIsBounded(v)) {
  8270  			break
  8271  		}
  8272  		v.reset(OpLOONG64MASKEQZ)
  8273  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  8274  		v0.AddArg2(x, y)
  8275  		v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  8276  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  8277  		v2.AuxInt = int64ToAuxInt(64)
  8278  		v1.AddArg2(v2, y)
  8279  		v.AddArg2(v0, v1)
  8280  		return true
  8281  	}
  8282  	return false
  8283  }
  8284  func rewriteValueLOONG64_OpLsh8x8(v *Value) bool {
  8285  	v_1 := v.Args[1]
  8286  	v_0 := v.Args[0]
  8287  	b := v.Block
  8288  	typ := &b.Func.Config.Types
  8289  	// match: (Lsh8x8 x y)
  8290  	// cond: shiftIsBounded(v)
  8291  	// result: (SLLV x y)
  8292  	for {
  8293  		x := v_0
  8294  		y := v_1
  8295  		if !(shiftIsBounded(v)) {
  8296  			break
  8297  		}
  8298  		v.reset(OpLOONG64SLLV)
  8299  		v.AddArg2(x, y)
  8300  		return true
  8301  	}
  8302  	// match: (Lsh8x8 <t> x y)
  8303  	// cond: !shiftIsBounded(v)
  8304  	// result: (MASKEQZ (SLLV <t> x (ZeroExt8to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y)))
  8305  	for {
  8306  		t := v.Type
  8307  		x := v_0
  8308  		y := v_1
  8309  		if !(!shiftIsBounded(v)) {
  8310  			break
  8311  		}
  8312  		v.reset(OpLOONG64MASKEQZ)
  8313  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  8314  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8315  		v1.AddArg(y)
  8316  		v0.AddArg2(x, v1)
  8317  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  8318  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  8319  		v3.AuxInt = int64ToAuxInt(64)
  8320  		v2.AddArg2(v3, v1)
  8321  		v.AddArg2(v0, v2)
  8322  		return true
  8323  	}
  8324  	return false
  8325  }
  8326  func rewriteValueLOONG64_OpMod16(v *Value) bool {
  8327  	v_1 := v.Args[1]
  8328  	v_0 := v.Args[0]
  8329  	b := v.Block
  8330  	typ := &b.Func.Config.Types
  8331  	// match: (Mod16 x y)
  8332  	// result: (REMV (SignExt16to64 x) (SignExt16to64 y))
  8333  	for {
  8334  		x := v_0
  8335  		y := v_1
  8336  		v.reset(OpLOONG64REMV)
  8337  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  8338  		v0.AddArg(x)
  8339  		v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  8340  		v1.AddArg(y)
  8341  		v.AddArg2(v0, v1)
  8342  		return true
  8343  	}
  8344  }
  8345  func rewriteValueLOONG64_OpMod16u(v *Value) bool {
  8346  	v_1 := v.Args[1]
  8347  	v_0 := v.Args[0]
  8348  	b := v.Block
  8349  	typ := &b.Func.Config.Types
  8350  	// match: (Mod16u x y)
  8351  	// result: (REMVU (ZeroExt16to64 x) (ZeroExt16to64 y))
  8352  	for {
  8353  		x := v_0
  8354  		y := v_1
  8355  		v.reset(OpLOONG64REMVU)
  8356  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8357  		v0.AddArg(x)
  8358  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8359  		v1.AddArg(y)
  8360  		v.AddArg2(v0, v1)
  8361  		return true
  8362  	}
  8363  }
  8364  func rewriteValueLOONG64_OpMod32(v *Value) bool {
  8365  	v_1 := v.Args[1]
  8366  	v_0 := v.Args[0]
  8367  	b := v.Block
  8368  	typ := &b.Func.Config.Types
  8369  	// match: (Mod32 x y)
  8370  	// result: (REMV (SignExt32to64 x) (SignExt32to64 y))
  8371  	for {
  8372  		x := v_0
  8373  		y := v_1
  8374  		v.reset(OpLOONG64REMV)
  8375  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  8376  		v0.AddArg(x)
  8377  		v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  8378  		v1.AddArg(y)
  8379  		v.AddArg2(v0, v1)
  8380  		return true
  8381  	}
  8382  }
  8383  func rewriteValueLOONG64_OpMod32u(v *Value) bool {
  8384  	v_1 := v.Args[1]
  8385  	v_0 := v.Args[0]
  8386  	b := v.Block
  8387  	typ := &b.Func.Config.Types
  8388  	// match: (Mod32u x y)
  8389  	// result: (REMVU (ZeroExt32to64 x) (ZeroExt32to64 y))
  8390  	for {
  8391  		x := v_0
  8392  		y := v_1
  8393  		v.reset(OpLOONG64REMVU)
  8394  		v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8395  		v0.AddArg(x)
  8396  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8397  		v1.AddArg(y)
  8398  		v.AddArg2(v0, v1)
  8399  		return true
  8400  	}
  8401  }
  8402  func rewriteValueLOONG64_OpMod64(v *Value) bool {
  8403  	v_1 := v.Args[1]
  8404  	v_0 := v.Args[0]
  8405  	// match: (Mod64 x y)
  8406  	// result: (REMV x y)
  8407  	for {
  8408  		x := v_0
  8409  		y := v_1
  8410  		v.reset(OpLOONG64REMV)
  8411  		v.AddArg2(x, y)
  8412  		return true
  8413  	}
  8414  }
  8415  func rewriteValueLOONG64_OpMod8(v *Value) bool {
  8416  	v_1 := v.Args[1]
  8417  	v_0 := v.Args[0]
  8418  	b := v.Block
  8419  	typ := &b.Func.Config.Types
  8420  	// match: (Mod8 x y)
  8421  	// result: (REMV (SignExt8to64 x) (SignExt8to64 y))
  8422  	for {
  8423  		x := v_0
  8424  		y := v_1
  8425  		v.reset(OpLOONG64REMV)
  8426  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  8427  		v0.AddArg(x)
  8428  		v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  8429  		v1.AddArg(y)
  8430  		v.AddArg2(v0, v1)
  8431  		return true
  8432  	}
  8433  }
  8434  func rewriteValueLOONG64_OpMod8u(v *Value) bool {
  8435  	v_1 := v.Args[1]
  8436  	v_0 := v.Args[0]
  8437  	b := v.Block
  8438  	typ := &b.Func.Config.Types
  8439  	// match: (Mod8u x y)
  8440  	// result: (REMVU (ZeroExt8to64 x) (ZeroExt8to64 y))
  8441  	for {
  8442  		x := v_0
  8443  		y := v_1
  8444  		v.reset(OpLOONG64REMVU)
  8445  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8446  		v0.AddArg(x)
  8447  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8448  		v1.AddArg(y)
  8449  		v.AddArg2(v0, v1)
  8450  		return true
  8451  	}
  8452  }
  8453  func rewriteValueLOONG64_OpMove(v *Value) bool {
  8454  	v_2 := v.Args[2]
  8455  	v_1 := v.Args[1]
  8456  	v_0 := v.Args[0]
  8457  	b := v.Block
  8458  	typ := &b.Func.Config.Types
  8459  	// match: (Move [0] _ _ mem)
  8460  	// result: mem
  8461  	for {
  8462  		if auxIntToInt64(v.AuxInt) != 0 {
  8463  			break
  8464  		}
  8465  		mem := v_2
  8466  		v.copyOf(mem)
  8467  		return true
  8468  	}
  8469  	// match: (Move [1] dst src mem)
  8470  	// result: (MOVBstore dst (MOVBUload src mem) mem)
  8471  	for {
  8472  		if auxIntToInt64(v.AuxInt) != 1 {
  8473  			break
  8474  		}
  8475  		dst := v_0
  8476  		src := v_1
  8477  		mem := v_2
  8478  		v.reset(OpLOONG64MOVBstore)
  8479  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVBUload, typ.UInt8)
  8480  		v0.AddArg2(src, mem)
  8481  		v.AddArg3(dst, v0, mem)
  8482  		return true
  8483  	}
  8484  	// match: (Move [2] dst src mem)
  8485  	// result: (MOVHstore dst (MOVHUload src mem) mem)
  8486  	for {
  8487  		if auxIntToInt64(v.AuxInt) != 2 {
  8488  			break
  8489  		}
  8490  		dst := v_0
  8491  		src := v_1
  8492  		mem := v_2
  8493  		v.reset(OpLOONG64MOVHstore)
  8494  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVHUload, typ.UInt16)
  8495  		v0.AddArg2(src, mem)
  8496  		v.AddArg3(dst, v0, mem)
  8497  		return true
  8498  	}
  8499  	// match: (Move [3] dst src mem)
  8500  	// result: (MOVBstore [2] dst (MOVBUload [2] src mem) (MOVHstore dst (MOVHUload src mem) mem))
  8501  	for {
  8502  		if auxIntToInt64(v.AuxInt) != 3 {
  8503  			break
  8504  		}
  8505  		dst := v_0
  8506  		src := v_1
  8507  		mem := v_2
  8508  		v.reset(OpLOONG64MOVBstore)
  8509  		v.AuxInt = int32ToAuxInt(2)
  8510  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVBUload, typ.UInt8)
  8511  		v0.AuxInt = int32ToAuxInt(2)
  8512  		v0.AddArg2(src, mem)
  8513  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVHstore, types.TypeMem)
  8514  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVHUload, typ.UInt16)
  8515  		v2.AddArg2(src, mem)
  8516  		v1.AddArg3(dst, v2, mem)
  8517  		v.AddArg3(dst, v0, v1)
  8518  		return true
  8519  	}
  8520  	// match: (Move [4] dst src mem)
  8521  	// result: (MOVWstore dst (MOVWUload src mem) mem)
  8522  	for {
  8523  		if auxIntToInt64(v.AuxInt) != 4 {
  8524  			break
  8525  		}
  8526  		dst := v_0
  8527  		src := v_1
  8528  		mem := v_2
  8529  		v.reset(OpLOONG64MOVWstore)
  8530  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVWUload, typ.UInt32)
  8531  		v0.AddArg2(src, mem)
  8532  		v.AddArg3(dst, v0, mem)
  8533  		return true
  8534  	}
  8535  	// match: (Move [5] dst src mem)
  8536  	// result: (MOVBstore [4] dst (MOVBUload [4] src mem) (MOVWstore dst (MOVWUload src mem) mem))
  8537  	for {
  8538  		if auxIntToInt64(v.AuxInt) != 5 {
  8539  			break
  8540  		}
  8541  		dst := v_0
  8542  		src := v_1
  8543  		mem := v_2
  8544  		v.reset(OpLOONG64MOVBstore)
  8545  		v.AuxInt = int32ToAuxInt(4)
  8546  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVBUload, typ.UInt8)
  8547  		v0.AuxInt = int32ToAuxInt(4)
  8548  		v0.AddArg2(src, mem)
  8549  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVWstore, types.TypeMem)
  8550  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVWUload, typ.UInt32)
  8551  		v2.AddArg2(src, mem)
  8552  		v1.AddArg3(dst, v2, mem)
  8553  		v.AddArg3(dst, v0, v1)
  8554  		return true
  8555  	}
  8556  	// match: (Move [6] dst src mem)
  8557  	// result: (MOVHstore [4] dst (MOVHUload [4] src mem) (MOVWstore dst (MOVWUload src mem) mem))
  8558  	for {
  8559  		if auxIntToInt64(v.AuxInt) != 6 {
  8560  			break
  8561  		}
  8562  		dst := v_0
  8563  		src := v_1
  8564  		mem := v_2
  8565  		v.reset(OpLOONG64MOVHstore)
  8566  		v.AuxInt = int32ToAuxInt(4)
  8567  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVHUload, typ.UInt16)
  8568  		v0.AuxInt = int32ToAuxInt(4)
  8569  		v0.AddArg2(src, mem)
  8570  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVWstore, types.TypeMem)
  8571  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVWUload, typ.UInt32)
  8572  		v2.AddArg2(src, mem)
  8573  		v1.AddArg3(dst, v2, mem)
  8574  		v.AddArg3(dst, v0, v1)
  8575  		return true
  8576  	}
  8577  	// match: (Move [7] dst src mem)
  8578  	// result: (MOVWstore [3] dst (MOVWUload [3] src mem) (MOVWstore dst (MOVWUload src mem) mem))
  8579  	for {
  8580  		if auxIntToInt64(v.AuxInt) != 7 {
  8581  			break
  8582  		}
  8583  		dst := v_0
  8584  		src := v_1
  8585  		mem := v_2
  8586  		v.reset(OpLOONG64MOVWstore)
  8587  		v.AuxInt = int32ToAuxInt(3)
  8588  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVWUload, typ.UInt32)
  8589  		v0.AuxInt = int32ToAuxInt(3)
  8590  		v0.AddArg2(src, mem)
  8591  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVWstore, types.TypeMem)
  8592  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVWUload, typ.UInt32)
  8593  		v2.AddArg2(src, mem)
  8594  		v1.AddArg3(dst, v2, mem)
  8595  		v.AddArg3(dst, v0, v1)
  8596  		return true
  8597  	}
  8598  	// match: (Move [8] dst src mem)
  8599  	// result: (MOVVstore dst (MOVVload src mem) mem)
  8600  	for {
  8601  		if auxIntToInt64(v.AuxInt) != 8 {
  8602  			break
  8603  		}
  8604  		dst := v_0
  8605  		src := v_1
  8606  		mem := v_2
  8607  		v.reset(OpLOONG64MOVVstore)
  8608  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  8609  		v0.AddArg2(src, mem)
  8610  		v.AddArg3(dst, v0, mem)
  8611  		return true
  8612  	}
  8613  	// match: (Move [9] dst src mem)
  8614  	// result: (MOVBstore [8] dst (MOVBUload [8] src mem) (MOVVstore dst (MOVVload src mem) mem))
  8615  	for {
  8616  		if auxIntToInt64(v.AuxInt) != 9 {
  8617  			break
  8618  		}
  8619  		dst := v_0
  8620  		src := v_1
  8621  		mem := v_2
  8622  		v.reset(OpLOONG64MOVBstore)
  8623  		v.AuxInt = int32ToAuxInt(8)
  8624  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVBUload, typ.UInt8)
  8625  		v0.AuxInt = int32ToAuxInt(8)
  8626  		v0.AddArg2(src, mem)
  8627  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
  8628  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  8629  		v2.AddArg2(src, mem)
  8630  		v1.AddArg3(dst, v2, mem)
  8631  		v.AddArg3(dst, v0, v1)
  8632  		return true
  8633  	}
  8634  	// match: (Move [10] dst src mem)
  8635  	// result: (MOVHstore [8] dst (MOVHUload [8] src mem) (MOVVstore dst (MOVVload src mem) mem))
  8636  	for {
  8637  		if auxIntToInt64(v.AuxInt) != 10 {
  8638  			break
  8639  		}
  8640  		dst := v_0
  8641  		src := v_1
  8642  		mem := v_2
  8643  		v.reset(OpLOONG64MOVHstore)
  8644  		v.AuxInt = int32ToAuxInt(8)
  8645  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVHUload, typ.UInt16)
  8646  		v0.AuxInt = int32ToAuxInt(8)
  8647  		v0.AddArg2(src, mem)
  8648  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
  8649  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  8650  		v2.AddArg2(src, mem)
  8651  		v1.AddArg3(dst, v2, mem)
  8652  		v.AddArg3(dst, v0, v1)
  8653  		return true
  8654  	}
  8655  	// match: (Move [11] dst src mem)
  8656  	// result: (MOVWstore [7] dst (MOVWload [7] src mem) (MOVVstore dst (MOVVload src mem) mem))
  8657  	for {
  8658  		if auxIntToInt64(v.AuxInt) != 11 {
  8659  			break
  8660  		}
  8661  		dst := v_0
  8662  		src := v_1
  8663  		mem := v_2
  8664  		v.reset(OpLOONG64MOVWstore)
  8665  		v.AuxInt = int32ToAuxInt(7)
  8666  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVWload, typ.Int32)
  8667  		v0.AuxInt = int32ToAuxInt(7)
  8668  		v0.AddArg2(src, mem)
  8669  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
  8670  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  8671  		v2.AddArg2(src, mem)
  8672  		v1.AddArg3(dst, v2, mem)
  8673  		v.AddArg3(dst, v0, v1)
  8674  		return true
  8675  	}
  8676  	// match: (Move [12] dst src mem)
  8677  	// result: (MOVWstore [8] dst (MOVWUload [8] src mem) (MOVVstore dst (MOVVload src mem) mem))
  8678  	for {
  8679  		if auxIntToInt64(v.AuxInt) != 12 {
  8680  			break
  8681  		}
  8682  		dst := v_0
  8683  		src := v_1
  8684  		mem := v_2
  8685  		v.reset(OpLOONG64MOVWstore)
  8686  		v.AuxInt = int32ToAuxInt(8)
  8687  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVWUload, typ.UInt32)
  8688  		v0.AuxInt = int32ToAuxInt(8)
  8689  		v0.AddArg2(src, mem)
  8690  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
  8691  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  8692  		v2.AddArg2(src, mem)
  8693  		v1.AddArg3(dst, v2, mem)
  8694  		v.AddArg3(dst, v0, v1)
  8695  		return true
  8696  	}
  8697  	// match: (Move [13] dst src mem)
  8698  	// result: (MOVVstore [5] dst (MOVVload [5] src mem) (MOVVstore dst (MOVVload src mem) mem))
  8699  	for {
  8700  		if auxIntToInt64(v.AuxInt) != 13 {
  8701  			break
  8702  		}
  8703  		dst := v_0
  8704  		src := v_1
  8705  		mem := v_2
  8706  		v.reset(OpLOONG64MOVVstore)
  8707  		v.AuxInt = int32ToAuxInt(5)
  8708  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  8709  		v0.AuxInt = int32ToAuxInt(5)
  8710  		v0.AddArg2(src, mem)
  8711  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
  8712  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  8713  		v2.AddArg2(src, mem)
  8714  		v1.AddArg3(dst, v2, mem)
  8715  		v.AddArg3(dst, v0, v1)
  8716  		return true
  8717  	}
  8718  	// match: (Move [14] dst src mem)
  8719  	// result: (MOVVstore [6] dst (MOVVload [6] src mem) (MOVVstore dst (MOVVload src mem) mem))
  8720  	for {
  8721  		if auxIntToInt64(v.AuxInt) != 14 {
  8722  			break
  8723  		}
  8724  		dst := v_0
  8725  		src := v_1
  8726  		mem := v_2
  8727  		v.reset(OpLOONG64MOVVstore)
  8728  		v.AuxInt = int32ToAuxInt(6)
  8729  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  8730  		v0.AuxInt = int32ToAuxInt(6)
  8731  		v0.AddArg2(src, mem)
  8732  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
  8733  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  8734  		v2.AddArg2(src, mem)
  8735  		v1.AddArg3(dst, v2, mem)
  8736  		v.AddArg3(dst, v0, v1)
  8737  		return true
  8738  	}
  8739  	// match: (Move [15] dst src mem)
  8740  	// result: (MOVVstore [7] dst (MOVVload [7] src mem) (MOVVstore dst (MOVVload src mem) mem))
  8741  	for {
  8742  		if auxIntToInt64(v.AuxInt) != 15 {
  8743  			break
  8744  		}
  8745  		dst := v_0
  8746  		src := v_1
  8747  		mem := v_2
  8748  		v.reset(OpLOONG64MOVVstore)
  8749  		v.AuxInt = int32ToAuxInt(7)
  8750  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  8751  		v0.AuxInt = int32ToAuxInt(7)
  8752  		v0.AddArg2(src, mem)
  8753  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
  8754  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  8755  		v2.AddArg2(src, mem)
  8756  		v1.AddArg3(dst, v2, mem)
  8757  		v.AddArg3(dst, v0, v1)
  8758  		return true
  8759  	}
  8760  	// match: (Move [16] dst src mem)
  8761  	// result: (MOVVstore [8] dst (MOVVload [8] src mem) (MOVVstore dst (MOVVload src mem) mem))
  8762  	for {
  8763  		if auxIntToInt64(v.AuxInt) != 16 {
  8764  			break
  8765  		}
  8766  		dst := v_0
  8767  		src := v_1
  8768  		mem := v_2
  8769  		v.reset(OpLOONG64MOVVstore)
  8770  		v.AuxInt = int32ToAuxInt(8)
  8771  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  8772  		v0.AuxInt = int32ToAuxInt(8)
  8773  		v0.AddArg2(src, mem)
  8774  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
  8775  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  8776  		v2.AddArg2(src, mem)
  8777  		v1.AddArg3(dst, v2, mem)
  8778  		v.AddArg3(dst, v0, v1)
  8779  		return true
  8780  	}
  8781  	// match: (Move [s] dst src mem)
  8782  	// cond: s%8 != 0 && s > 16
  8783  	// result: (Move [s%8] (OffPtr <dst.Type> dst [s-s%8]) (OffPtr <src.Type> src [s-s%8]) (Move [s-s%8] dst src mem))
  8784  	for {
  8785  		s := auxIntToInt64(v.AuxInt)
  8786  		dst := v_0
  8787  		src := v_1
  8788  		mem := v_2
  8789  		if !(s%8 != 0 && s > 16) {
  8790  			break
  8791  		}
  8792  		v.reset(OpMove)
  8793  		v.AuxInt = int64ToAuxInt(s % 8)
  8794  		v0 := b.NewValue0(v.Pos, OpOffPtr, dst.Type)
  8795  		v0.AuxInt = int64ToAuxInt(s - s%8)
  8796  		v0.AddArg(dst)
  8797  		v1 := b.NewValue0(v.Pos, OpOffPtr, src.Type)
  8798  		v1.AuxInt = int64ToAuxInt(s - s%8)
  8799  		v1.AddArg(src)
  8800  		v2 := b.NewValue0(v.Pos, OpMove, types.TypeMem)
  8801  		v2.AuxInt = int64ToAuxInt(s - s%8)
  8802  		v2.AddArg3(dst, src, mem)
  8803  		v.AddArg3(v0, v1, v2)
  8804  		return true
  8805  	}
  8806  	// match: (Move [s] dst src mem)
  8807  	// cond: s%8 == 0 && s > 16 && s <= 8*128 && logLargeCopy(v, s)
  8808  	// result: (DUFFCOPY [16 * (128 - s/8)] dst src mem)
  8809  	for {
  8810  		s := auxIntToInt64(v.AuxInt)
  8811  		dst := v_0
  8812  		src := v_1
  8813  		mem := v_2
  8814  		if !(s%8 == 0 && s > 16 && s <= 8*128 && logLargeCopy(v, s)) {
  8815  			break
  8816  		}
  8817  		v.reset(OpLOONG64DUFFCOPY)
  8818  		v.AuxInt = int64ToAuxInt(16 * (128 - s/8))
  8819  		v.AddArg3(dst, src, mem)
  8820  		return true
  8821  	}
  8822  	// match: (Move [s] dst src mem)
  8823  	// cond: s%8 == 0 && s > 1024 && logLargeCopy(v, s)
  8824  	// result: (LoweredMove dst src (ADDVconst <src.Type> src [s-8]) mem)
  8825  	for {
  8826  		s := auxIntToInt64(v.AuxInt)
  8827  		dst := v_0
  8828  		src := v_1
  8829  		mem := v_2
  8830  		if !(s%8 == 0 && s > 1024 && logLargeCopy(v, s)) {
  8831  			break
  8832  		}
  8833  		v.reset(OpLOONG64LoweredMove)
  8834  		v0 := b.NewValue0(v.Pos, OpLOONG64ADDVconst, src.Type)
  8835  		v0.AuxInt = int64ToAuxInt(s - 8)
  8836  		v0.AddArg(src)
  8837  		v.AddArg4(dst, src, v0, mem)
  8838  		return true
  8839  	}
  8840  	return false
  8841  }
  8842  func rewriteValueLOONG64_OpNeq16(v *Value) bool {
  8843  	v_1 := v.Args[1]
  8844  	v_0 := v.Args[0]
  8845  	b := v.Block
  8846  	typ := &b.Func.Config.Types
  8847  	// match: (Neq16 x y)
  8848  	// result: (SGTU (XOR (ZeroExt16to32 x) (ZeroExt16to64 y)) (MOVVconst [0]))
  8849  	for {
  8850  		x := v_0
  8851  		y := v_1
  8852  		v.reset(OpLOONG64SGTU)
  8853  		v0 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.UInt64)
  8854  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  8855  		v1.AddArg(x)
  8856  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8857  		v2.AddArg(y)
  8858  		v0.AddArg2(v1, v2)
  8859  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  8860  		v3.AuxInt = int64ToAuxInt(0)
  8861  		v.AddArg2(v0, v3)
  8862  		return true
  8863  	}
  8864  }
  8865  func rewriteValueLOONG64_OpNeq32(v *Value) bool {
  8866  	v_1 := v.Args[1]
  8867  	v_0 := v.Args[0]
  8868  	b := v.Block
  8869  	typ := &b.Func.Config.Types
  8870  	// match: (Neq32 x y)
  8871  	// result: (SGTU (XOR (ZeroExt32to64 x) (ZeroExt32to64 y)) (MOVVconst [0]))
  8872  	for {
  8873  		x := v_0
  8874  		y := v_1
  8875  		v.reset(OpLOONG64SGTU)
  8876  		v0 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.UInt64)
  8877  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8878  		v1.AddArg(x)
  8879  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8880  		v2.AddArg(y)
  8881  		v0.AddArg2(v1, v2)
  8882  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  8883  		v3.AuxInt = int64ToAuxInt(0)
  8884  		v.AddArg2(v0, v3)
  8885  		return true
  8886  	}
  8887  }
  8888  func rewriteValueLOONG64_OpNeq32F(v *Value) bool {
  8889  	v_1 := v.Args[1]
  8890  	v_0 := v.Args[0]
  8891  	b := v.Block
  8892  	// match: (Neq32F x y)
  8893  	// result: (FPFlagFalse (CMPEQF x y))
  8894  	for {
  8895  		x := v_0
  8896  		y := v_1
  8897  		v.reset(OpLOONG64FPFlagFalse)
  8898  		v0 := b.NewValue0(v.Pos, OpLOONG64CMPEQF, types.TypeFlags)
  8899  		v0.AddArg2(x, y)
  8900  		v.AddArg(v0)
  8901  		return true
  8902  	}
  8903  }
  8904  func rewriteValueLOONG64_OpNeq64(v *Value) bool {
  8905  	v_1 := v.Args[1]
  8906  	v_0 := v.Args[0]
  8907  	b := v.Block
  8908  	typ := &b.Func.Config.Types
  8909  	// match: (Neq64 x y)
  8910  	// result: (SGTU (XOR x y) (MOVVconst [0]))
  8911  	for {
  8912  		x := v_0
  8913  		y := v_1
  8914  		v.reset(OpLOONG64SGTU)
  8915  		v0 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.UInt64)
  8916  		v0.AddArg2(x, y)
  8917  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  8918  		v1.AuxInt = int64ToAuxInt(0)
  8919  		v.AddArg2(v0, v1)
  8920  		return true
  8921  	}
  8922  }
  8923  func rewriteValueLOONG64_OpNeq64F(v *Value) bool {
  8924  	v_1 := v.Args[1]
  8925  	v_0 := v.Args[0]
  8926  	b := v.Block
  8927  	// match: (Neq64F x y)
  8928  	// result: (FPFlagFalse (CMPEQD x y))
  8929  	for {
  8930  		x := v_0
  8931  		y := v_1
  8932  		v.reset(OpLOONG64FPFlagFalse)
  8933  		v0 := b.NewValue0(v.Pos, OpLOONG64CMPEQD, types.TypeFlags)
  8934  		v0.AddArg2(x, y)
  8935  		v.AddArg(v0)
  8936  		return true
  8937  	}
  8938  }
  8939  func rewriteValueLOONG64_OpNeq8(v *Value) bool {
  8940  	v_1 := v.Args[1]
  8941  	v_0 := v.Args[0]
  8942  	b := v.Block
  8943  	typ := &b.Func.Config.Types
  8944  	// match: (Neq8 x y)
  8945  	// result: (SGTU (XOR (ZeroExt8to64 x) (ZeroExt8to64 y)) (MOVVconst [0]))
  8946  	for {
  8947  		x := v_0
  8948  		y := v_1
  8949  		v.reset(OpLOONG64SGTU)
  8950  		v0 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.UInt64)
  8951  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8952  		v1.AddArg(x)
  8953  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8954  		v2.AddArg(y)
  8955  		v0.AddArg2(v1, v2)
  8956  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  8957  		v3.AuxInt = int64ToAuxInt(0)
  8958  		v.AddArg2(v0, v3)
  8959  		return true
  8960  	}
  8961  }
  8962  func rewriteValueLOONG64_OpNeqPtr(v *Value) bool {
  8963  	v_1 := v.Args[1]
  8964  	v_0 := v.Args[0]
  8965  	b := v.Block
  8966  	typ := &b.Func.Config.Types
  8967  	// match: (NeqPtr x y)
  8968  	// result: (SGTU (XOR x y) (MOVVconst [0]))
  8969  	for {
  8970  		x := v_0
  8971  		y := v_1
  8972  		v.reset(OpLOONG64SGTU)
  8973  		v0 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.UInt64)
  8974  		v0.AddArg2(x, y)
  8975  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  8976  		v1.AuxInt = int64ToAuxInt(0)
  8977  		v.AddArg2(v0, v1)
  8978  		return true
  8979  	}
  8980  }
  8981  func rewriteValueLOONG64_OpNot(v *Value) bool {
  8982  	v_0 := v.Args[0]
  8983  	// match: (Not x)
  8984  	// result: (XORconst [1] x)
  8985  	for {
  8986  		x := v_0
  8987  		v.reset(OpLOONG64XORconst)
  8988  		v.AuxInt = int64ToAuxInt(1)
  8989  		v.AddArg(x)
  8990  		return true
  8991  	}
  8992  }
  8993  func rewriteValueLOONG64_OpOffPtr(v *Value) bool {
  8994  	v_0 := v.Args[0]
  8995  	// match: (OffPtr [off] ptr:(SP))
  8996  	// result: (MOVVaddr [int32(off)] ptr)
  8997  	for {
  8998  		off := auxIntToInt64(v.AuxInt)
  8999  		ptr := v_0
  9000  		if ptr.Op != OpSP {
  9001  			break
  9002  		}
  9003  		v.reset(OpLOONG64MOVVaddr)
  9004  		v.AuxInt = int32ToAuxInt(int32(off))
  9005  		v.AddArg(ptr)
  9006  		return true
  9007  	}
  9008  	// match: (OffPtr [off] ptr)
  9009  	// result: (ADDVconst [off] ptr)
  9010  	for {
  9011  		off := auxIntToInt64(v.AuxInt)
  9012  		ptr := v_0
  9013  		v.reset(OpLOONG64ADDVconst)
  9014  		v.AuxInt = int64ToAuxInt(off)
  9015  		v.AddArg(ptr)
  9016  		return true
  9017  	}
  9018  }
  9019  func rewriteValueLOONG64_OpPanicBounds(v *Value) bool {
  9020  	v_2 := v.Args[2]
  9021  	v_1 := v.Args[1]
  9022  	v_0 := v.Args[0]
  9023  	// match: (PanicBounds [kind] x y mem)
  9024  	// cond: boundsABI(kind) == 0
  9025  	// result: (LoweredPanicBoundsA [kind] x y mem)
  9026  	for {
  9027  		kind := auxIntToInt64(v.AuxInt)
  9028  		x := v_0
  9029  		y := v_1
  9030  		mem := v_2
  9031  		if !(boundsABI(kind) == 0) {
  9032  			break
  9033  		}
  9034  		v.reset(OpLOONG64LoweredPanicBoundsA)
  9035  		v.AuxInt = int64ToAuxInt(kind)
  9036  		v.AddArg3(x, y, mem)
  9037  		return true
  9038  	}
  9039  	// match: (PanicBounds [kind] x y mem)
  9040  	// cond: boundsABI(kind) == 1
  9041  	// result: (LoweredPanicBoundsB [kind] x y mem)
  9042  	for {
  9043  		kind := auxIntToInt64(v.AuxInt)
  9044  		x := v_0
  9045  		y := v_1
  9046  		mem := v_2
  9047  		if !(boundsABI(kind) == 1) {
  9048  			break
  9049  		}
  9050  		v.reset(OpLOONG64LoweredPanicBoundsB)
  9051  		v.AuxInt = int64ToAuxInt(kind)
  9052  		v.AddArg3(x, y, mem)
  9053  		return true
  9054  	}
  9055  	// match: (PanicBounds [kind] x y mem)
  9056  	// cond: boundsABI(kind) == 2
  9057  	// result: (LoweredPanicBoundsC [kind] x y mem)
  9058  	for {
  9059  		kind := auxIntToInt64(v.AuxInt)
  9060  		x := v_0
  9061  		y := v_1
  9062  		mem := v_2
  9063  		if !(boundsABI(kind) == 2) {
  9064  			break
  9065  		}
  9066  		v.reset(OpLOONG64LoweredPanicBoundsC)
  9067  		v.AuxInt = int64ToAuxInt(kind)
  9068  		v.AddArg3(x, y, mem)
  9069  		return true
  9070  	}
  9071  	return false
  9072  }
  9073  func rewriteValueLOONG64_OpPopCount16(v *Value) bool {
  9074  	v_0 := v.Args[0]
  9075  	b := v.Block
  9076  	typ := &b.Func.Config.Types
  9077  	// match: (PopCount16 <t> x)
  9078  	// result: (MOVWfpgp <t> (VPCNT16 <typ.Float32> (MOVWgpfp <typ.Float32> (ZeroExt16to32 x))))
  9079  	for {
  9080  		t := v.Type
  9081  		x := v_0
  9082  		v.reset(OpLOONG64MOVWfpgp)
  9083  		v.Type = t
  9084  		v0 := b.NewValue0(v.Pos, OpLOONG64VPCNT16, typ.Float32)
  9085  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVWgpfp, typ.Float32)
  9086  		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  9087  		v2.AddArg(x)
  9088  		v1.AddArg(v2)
  9089  		v0.AddArg(v1)
  9090  		v.AddArg(v0)
  9091  		return true
  9092  	}
  9093  }
  9094  func rewriteValueLOONG64_OpPopCount32(v *Value) bool {
  9095  	v_0 := v.Args[0]
  9096  	b := v.Block
  9097  	typ := &b.Func.Config.Types
  9098  	// match: (PopCount32 <t> x)
  9099  	// result: (MOVWfpgp <t> (VPCNT32 <typ.Float32> (MOVWgpfp <typ.Float32> x)))
  9100  	for {
  9101  		t := v.Type
  9102  		x := v_0
  9103  		v.reset(OpLOONG64MOVWfpgp)
  9104  		v.Type = t
  9105  		v0 := b.NewValue0(v.Pos, OpLOONG64VPCNT32, typ.Float32)
  9106  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVWgpfp, typ.Float32)
  9107  		v1.AddArg(x)
  9108  		v0.AddArg(v1)
  9109  		v.AddArg(v0)
  9110  		return true
  9111  	}
  9112  }
  9113  func rewriteValueLOONG64_OpPopCount64(v *Value) bool {
  9114  	v_0 := v.Args[0]
  9115  	b := v.Block
  9116  	typ := &b.Func.Config.Types
  9117  	// match: (PopCount64 <t> x)
  9118  	// result: (MOVVfpgp <t> (VPCNT64 <typ.Float64> (MOVVgpfp <typ.Float64> x)))
  9119  	for {
  9120  		t := v.Type
  9121  		x := v_0
  9122  		v.reset(OpLOONG64MOVVfpgp)
  9123  		v.Type = t
  9124  		v0 := b.NewValue0(v.Pos, OpLOONG64VPCNT64, typ.Float64)
  9125  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVgpfp, typ.Float64)
  9126  		v1.AddArg(x)
  9127  		v0.AddArg(v1)
  9128  		v.AddArg(v0)
  9129  		return true
  9130  	}
  9131  }
  9132  func rewriteValueLOONG64_OpPrefetchCache(v *Value) bool {
  9133  	v_1 := v.Args[1]
  9134  	v_0 := v.Args[0]
  9135  	// match: (PrefetchCache addr mem)
  9136  	// result: (PRELD addr mem [0])
  9137  	for {
  9138  		addr := v_0
  9139  		mem := v_1
  9140  		v.reset(OpLOONG64PRELD)
  9141  		v.AuxInt = int64ToAuxInt(0)
  9142  		v.AddArg2(addr, mem)
  9143  		return true
  9144  	}
  9145  }
  9146  func rewriteValueLOONG64_OpPrefetchCacheStreamed(v *Value) bool {
  9147  	v_1 := v.Args[1]
  9148  	v_0 := v.Args[0]
  9149  	// match: (PrefetchCacheStreamed addr mem)
  9150  	// result: (PRELDX addr mem [(((512 << 1) + (1 << 12)) << 5) + 2])
  9151  	for {
  9152  		addr := v_0
  9153  		mem := v_1
  9154  		v.reset(OpLOONG64PRELDX)
  9155  		v.AuxInt = int64ToAuxInt((((512 << 1) + (1 << 12)) << 5) + 2)
  9156  		v.AddArg2(addr, mem)
  9157  		return true
  9158  	}
  9159  }
  9160  func rewriteValueLOONG64_OpRotateLeft16(v *Value) bool {
  9161  	v_1 := v.Args[1]
  9162  	v_0 := v.Args[0]
  9163  	b := v.Block
  9164  	typ := &b.Func.Config.Types
  9165  	// match: (RotateLeft16 <t> x (MOVVconst [c]))
  9166  	// result: (Or16 (Lsh16x64 <t> x (MOVVconst [c&15])) (Rsh16Ux64 <t> x (MOVVconst [-c&15])))
  9167  	for {
  9168  		t := v.Type
  9169  		x := v_0
  9170  		if v_1.Op != OpLOONG64MOVVconst {
  9171  			break
  9172  		}
  9173  		c := auxIntToInt64(v_1.AuxInt)
  9174  		v.reset(OpOr16)
  9175  		v0 := b.NewValue0(v.Pos, OpLsh16x64, t)
  9176  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9177  		v1.AuxInt = int64ToAuxInt(c & 15)
  9178  		v0.AddArg2(x, v1)
  9179  		v2 := b.NewValue0(v.Pos, OpRsh16Ux64, t)
  9180  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9181  		v3.AuxInt = int64ToAuxInt(-c & 15)
  9182  		v2.AddArg2(x, v3)
  9183  		v.AddArg2(v0, v2)
  9184  		return true
  9185  	}
  9186  	// match: (RotateLeft16 <t> x y)
  9187  	// result: (ROTR <t> (OR <typ.UInt32> (ZeroExt16to32 x) (SLLVconst <t> (ZeroExt16to32 x) [16])) (NEGV <typ.Int64> y))
  9188  	for {
  9189  		t := v.Type
  9190  		x := v_0
  9191  		y := v_1
  9192  		v.reset(OpLOONG64ROTR)
  9193  		v.Type = t
  9194  		v0 := b.NewValue0(v.Pos, OpLOONG64OR, typ.UInt32)
  9195  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  9196  		v1.AddArg(x)
  9197  		v2 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, t)
  9198  		v2.AuxInt = int64ToAuxInt(16)
  9199  		v2.AddArg(v1)
  9200  		v0.AddArg2(v1, v2)
  9201  		v3 := b.NewValue0(v.Pos, OpLOONG64NEGV, typ.Int64)
  9202  		v3.AddArg(y)
  9203  		v.AddArg2(v0, v3)
  9204  		return true
  9205  	}
  9206  }
  9207  func rewriteValueLOONG64_OpRotateLeft32(v *Value) bool {
  9208  	v_1 := v.Args[1]
  9209  	v_0 := v.Args[0]
  9210  	b := v.Block
  9211  	// match: (RotateLeft32 x y)
  9212  	// result: (ROTR x (NEGV <y.Type> y))
  9213  	for {
  9214  		x := v_0
  9215  		y := v_1
  9216  		v.reset(OpLOONG64ROTR)
  9217  		v0 := b.NewValue0(v.Pos, OpLOONG64NEGV, y.Type)
  9218  		v0.AddArg(y)
  9219  		v.AddArg2(x, v0)
  9220  		return true
  9221  	}
  9222  }
  9223  func rewriteValueLOONG64_OpRotateLeft64(v *Value) bool {
  9224  	v_1 := v.Args[1]
  9225  	v_0 := v.Args[0]
  9226  	b := v.Block
  9227  	// match: (RotateLeft64 x y)
  9228  	// result: (ROTRV x (NEGV <y.Type> y))
  9229  	for {
  9230  		x := v_0
  9231  		y := v_1
  9232  		v.reset(OpLOONG64ROTRV)
  9233  		v0 := b.NewValue0(v.Pos, OpLOONG64NEGV, y.Type)
  9234  		v0.AddArg(y)
  9235  		v.AddArg2(x, v0)
  9236  		return true
  9237  	}
  9238  }
  9239  func rewriteValueLOONG64_OpRotateLeft8(v *Value) bool {
  9240  	v_1 := v.Args[1]
  9241  	v_0 := v.Args[0]
  9242  	b := v.Block
  9243  	typ := &b.Func.Config.Types
  9244  	// match: (RotateLeft8 <t> x (MOVVconst [c]))
  9245  	// result: (Or8 (Lsh8x64 <t> x (MOVVconst [c&7])) (Rsh8Ux64 <t> x (MOVVconst [-c&7])))
  9246  	for {
  9247  		t := v.Type
  9248  		x := v_0
  9249  		if v_1.Op != OpLOONG64MOVVconst {
  9250  			break
  9251  		}
  9252  		c := auxIntToInt64(v_1.AuxInt)
  9253  		v.reset(OpOr8)
  9254  		v0 := b.NewValue0(v.Pos, OpLsh8x64, t)
  9255  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9256  		v1.AuxInt = int64ToAuxInt(c & 7)
  9257  		v0.AddArg2(x, v1)
  9258  		v2 := b.NewValue0(v.Pos, OpRsh8Ux64, t)
  9259  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9260  		v3.AuxInt = int64ToAuxInt(-c & 7)
  9261  		v2.AddArg2(x, v3)
  9262  		v.AddArg2(v0, v2)
  9263  		return true
  9264  	}
  9265  	// match: (RotateLeft8 <t> x y)
  9266  	// result: (OR <t> (SLLV <t> x (ANDconst <typ.Int64> [7] y)) (SRLV <t> (ZeroExt8to64 x) (ANDconst <typ.Int64> [7] (NEGV <typ.Int64> y))))
  9267  	for {
  9268  		t := v.Type
  9269  		x := v_0
  9270  		y := v_1
  9271  		v.reset(OpLOONG64OR)
  9272  		v.Type = t
  9273  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  9274  		v1 := b.NewValue0(v.Pos, OpLOONG64ANDconst, typ.Int64)
  9275  		v1.AuxInt = int64ToAuxInt(7)
  9276  		v1.AddArg(y)
  9277  		v0.AddArg2(x, v1)
  9278  		v2 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
  9279  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  9280  		v3.AddArg(x)
  9281  		v4 := b.NewValue0(v.Pos, OpLOONG64ANDconst, typ.Int64)
  9282  		v4.AuxInt = int64ToAuxInt(7)
  9283  		v5 := b.NewValue0(v.Pos, OpLOONG64NEGV, typ.Int64)
  9284  		v5.AddArg(y)
  9285  		v4.AddArg(v5)
  9286  		v2.AddArg2(v3, v4)
  9287  		v.AddArg2(v0, v2)
  9288  		return true
  9289  	}
  9290  }
  9291  func rewriteValueLOONG64_OpRsh16Ux16(v *Value) bool {
  9292  	v_1 := v.Args[1]
  9293  	v_0 := v.Args[0]
  9294  	b := v.Block
  9295  	typ := &b.Func.Config.Types
  9296  	// match: (Rsh16Ux16 x y)
  9297  	// cond: shiftIsBounded(v)
  9298  	// result: (SRLV (ZeroExt16to64 x) y)
  9299  	for {
  9300  		x := v_0
  9301  		y := v_1
  9302  		if !(shiftIsBounded(v)) {
  9303  			break
  9304  		}
  9305  		v.reset(OpLOONG64SRLV)
  9306  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  9307  		v0.AddArg(x)
  9308  		v.AddArg2(v0, y)
  9309  		return true
  9310  	}
  9311  	// match: (Rsh16Ux16 <t> x y)
  9312  	// cond: !shiftIsBounded(v)
  9313  	// result: (MASKEQZ (SRLV <t> (ZeroExt16to64 x) (ZeroExt16to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y)))
  9314  	for {
  9315  		t := v.Type
  9316  		x := v_0
  9317  		y := v_1
  9318  		if !(!shiftIsBounded(v)) {
  9319  			break
  9320  		}
  9321  		v.reset(OpLOONG64MASKEQZ)
  9322  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
  9323  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  9324  		v1.AddArg(x)
  9325  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  9326  		v2.AddArg(y)
  9327  		v0.AddArg2(v1, v2)
  9328  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  9329  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9330  		v4.AuxInt = int64ToAuxInt(64)
  9331  		v3.AddArg2(v4, v2)
  9332  		v.AddArg2(v0, v3)
  9333  		return true
  9334  	}
  9335  	return false
  9336  }
  9337  func rewriteValueLOONG64_OpRsh16Ux32(v *Value) bool {
  9338  	v_1 := v.Args[1]
  9339  	v_0 := v.Args[0]
  9340  	b := v.Block
  9341  	typ := &b.Func.Config.Types
  9342  	// match: (Rsh16Ux32 x y)
  9343  	// cond: shiftIsBounded(v)
  9344  	// result: (SRLV (ZeroExt16to64 x) y)
  9345  	for {
  9346  		x := v_0
  9347  		y := v_1
  9348  		if !(shiftIsBounded(v)) {
  9349  			break
  9350  		}
  9351  		v.reset(OpLOONG64SRLV)
  9352  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  9353  		v0.AddArg(x)
  9354  		v.AddArg2(v0, y)
  9355  		return true
  9356  	}
  9357  	// match: (Rsh16Ux32 <t> x y)
  9358  	// cond: !shiftIsBounded(v)
  9359  	// result: (MASKEQZ (SRLV <t> (ZeroExt16to64 x) (ZeroExt32to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y)))
  9360  	for {
  9361  		t := v.Type
  9362  		x := v_0
  9363  		y := v_1
  9364  		if !(!shiftIsBounded(v)) {
  9365  			break
  9366  		}
  9367  		v.reset(OpLOONG64MASKEQZ)
  9368  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
  9369  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  9370  		v1.AddArg(x)
  9371  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  9372  		v2.AddArg(y)
  9373  		v0.AddArg2(v1, v2)
  9374  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  9375  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9376  		v4.AuxInt = int64ToAuxInt(64)
  9377  		v3.AddArg2(v4, v2)
  9378  		v.AddArg2(v0, v3)
  9379  		return true
  9380  	}
  9381  	return false
  9382  }
  9383  func rewriteValueLOONG64_OpRsh16Ux64(v *Value) bool {
  9384  	v_1 := v.Args[1]
  9385  	v_0 := v.Args[0]
  9386  	b := v.Block
  9387  	typ := &b.Func.Config.Types
  9388  	// match: (Rsh16Ux64 x y)
  9389  	// cond: shiftIsBounded(v)
  9390  	// result: (SRLV (ZeroExt16to64 x) y)
  9391  	for {
  9392  		x := v_0
  9393  		y := v_1
  9394  		if !(shiftIsBounded(v)) {
  9395  			break
  9396  		}
  9397  		v.reset(OpLOONG64SRLV)
  9398  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  9399  		v0.AddArg(x)
  9400  		v.AddArg2(v0, y)
  9401  		return true
  9402  	}
  9403  	// match: (Rsh16Ux64 <t> x y)
  9404  	// cond: !shiftIsBounded(v)
  9405  	// result: (MASKEQZ (SRLV <t> (ZeroExt16to64 x) y) (SGTU (MOVVconst <typ.UInt64> [64]) y))
  9406  	for {
  9407  		t := v.Type
  9408  		x := v_0
  9409  		y := v_1
  9410  		if !(!shiftIsBounded(v)) {
  9411  			break
  9412  		}
  9413  		v.reset(OpLOONG64MASKEQZ)
  9414  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
  9415  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  9416  		v1.AddArg(x)
  9417  		v0.AddArg2(v1, y)
  9418  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  9419  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9420  		v3.AuxInt = int64ToAuxInt(64)
  9421  		v2.AddArg2(v3, y)
  9422  		v.AddArg2(v0, v2)
  9423  		return true
  9424  	}
  9425  	return false
  9426  }
  9427  func rewriteValueLOONG64_OpRsh16Ux8(v *Value) bool {
  9428  	v_1 := v.Args[1]
  9429  	v_0 := v.Args[0]
  9430  	b := v.Block
  9431  	typ := &b.Func.Config.Types
  9432  	// match: (Rsh16Ux8 x y)
  9433  	// cond: shiftIsBounded(v)
  9434  	// result: (SRLV (ZeroExt16to64 x) y)
  9435  	for {
  9436  		x := v_0
  9437  		y := v_1
  9438  		if !(shiftIsBounded(v)) {
  9439  			break
  9440  		}
  9441  		v.reset(OpLOONG64SRLV)
  9442  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  9443  		v0.AddArg(x)
  9444  		v.AddArg2(v0, y)
  9445  		return true
  9446  	}
  9447  	// match: (Rsh16Ux8 <t> x y)
  9448  	// cond: !shiftIsBounded(v)
  9449  	// result: (MASKEQZ (SRLV <t> (ZeroExt16to64 x) (ZeroExt8to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y)))
  9450  	for {
  9451  		t := v.Type
  9452  		x := v_0
  9453  		y := v_1
  9454  		if !(!shiftIsBounded(v)) {
  9455  			break
  9456  		}
  9457  		v.reset(OpLOONG64MASKEQZ)
  9458  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
  9459  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  9460  		v1.AddArg(x)
  9461  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  9462  		v2.AddArg(y)
  9463  		v0.AddArg2(v1, v2)
  9464  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  9465  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9466  		v4.AuxInt = int64ToAuxInt(64)
  9467  		v3.AddArg2(v4, v2)
  9468  		v.AddArg2(v0, v3)
  9469  		return true
  9470  	}
  9471  	return false
  9472  }
  9473  func rewriteValueLOONG64_OpRsh16x16(v *Value) bool {
  9474  	v_1 := v.Args[1]
  9475  	v_0 := v.Args[0]
  9476  	b := v.Block
  9477  	typ := &b.Func.Config.Types
  9478  	// match: (Rsh16x16 x y)
  9479  	// cond: shiftIsBounded(v)
  9480  	// result: (SRAV (SignExt16to64 x) y)
  9481  	for {
  9482  		x := v_0
  9483  		y := v_1
  9484  		if !(shiftIsBounded(v)) {
  9485  			break
  9486  		}
  9487  		v.reset(OpLOONG64SRAV)
  9488  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  9489  		v0.AddArg(x)
  9490  		v.AddArg2(v0, y)
  9491  		return true
  9492  	}
  9493  	// match: (Rsh16x16 <t> x y)
  9494  	// cond: !shiftIsBounded(v)
  9495  	// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y)))
  9496  	for {
  9497  		t := v.Type
  9498  		x := v_0
  9499  		y := v_1
  9500  		if !(!shiftIsBounded(v)) {
  9501  			break
  9502  		}
  9503  		v.reset(OpLOONG64SRAV)
  9504  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  9505  		v0.AddArg(x)
  9506  		v1 := b.NewValue0(v.Pos, OpLOONG64OR, t)
  9507  		v2 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
  9508  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  9509  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  9510  		v4.AddArg(y)
  9511  		v5 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9512  		v5.AuxInt = int64ToAuxInt(63)
  9513  		v3.AddArg2(v4, v5)
  9514  		v2.AddArg(v3)
  9515  		v1.AddArg2(v2, v4)
  9516  		v.AddArg2(v0, v1)
  9517  		return true
  9518  	}
  9519  	return false
  9520  }
  9521  func rewriteValueLOONG64_OpRsh16x32(v *Value) bool {
  9522  	v_1 := v.Args[1]
  9523  	v_0 := v.Args[0]
  9524  	b := v.Block
  9525  	typ := &b.Func.Config.Types
  9526  	// match: (Rsh16x32 x y)
  9527  	// cond: shiftIsBounded(v)
  9528  	// result: (SRAV (SignExt16to64 x) y)
  9529  	for {
  9530  		x := v_0
  9531  		y := v_1
  9532  		if !(shiftIsBounded(v)) {
  9533  			break
  9534  		}
  9535  		v.reset(OpLOONG64SRAV)
  9536  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  9537  		v0.AddArg(x)
  9538  		v.AddArg2(v0, y)
  9539  		return true
  9540  	}
  9541  	// match: (Rsh16x32 <t> x y)
  9542  	// cond: !shiftIsBounded(v)
  9543  	// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y)))
  9544  	for {
  9545  		t := v.Type
  9546  		x := v_0
  9547  		y := v_1
  9548  		if !(!shiftIsBounded(v)) {
  9549  			break
  9550  		}
  9551  		v.reset(OpLOONG64SRAV)
  9552  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  9553  		v0.AddArg(x)
  9554  		v1 := b.NewValue0(v.Pos, OpLOONG64OR, t)
  9555  		v2 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
  9556  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  9557  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  9558  		v4.AddArg(y)
  9559  		v5 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9560  		v5.AuxInt = int64ToAuxInt(63)
  9561  		v3.AddArg2(v4, v5)
  9562  		v2.AddArg(v3)
  9563  		v1.AddArg2(v2, v4)
  9564  		v.AddArg2(v0, v1)
  9565  		return true
  9566  	}
  9567  	return false
  9568  }
  9569  func rewriteValueLOONG64_OpRsh16x64(v *Value) bool {
  9570  	v_1 := v.Args[1]
  9571  	v_0 := v.Args[0]
  9572  	b := v.Block
  9573  	typ := &b.Func.Config.Types
  9574  	// match: (Rsh16x64 x y)
  9575  	// cond: shiftIsBounded(v)
  9576  	// result: (SRAV (SignExt16to64 x) y)
  9577  	for {
  9578  		x := v_0
  9579  		y := v_1
  9580  		if !(shiftIsBounded(v)) {
  9581  			break
  9582  		}
  9583  		v.reset(OpLOONG64SRAV)
  9584  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  9585  		v0.AddArg(x)
  9586  		v.AddArg2(v0, y)
  9587  		return true
  9588  	}
  9589  	// match: (Rsh16x64 <t> x y)
  9590  	// cond: !shiftIsBounded(v)
  9591  	// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y))
  9592  	for {
  9593  		t := v.Type
  9594  		x := v_0
  9595  		y := v_1
  9596  		if !(!shiftIsBounded(v)) {
  9597  			break
  9598  		}
  9599  		v.reset(OpLOONG64SRAV)
  9600  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  9601  		v0.AddArg(x)
  9602  		v1 := b.NewValue0(v.Pos, OpLOONG64OR, t)
  9603  		v2 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
  9604  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  9605  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9606  		v4.AuxInt = int64ToAuxInt(63)
  9607  		v3.AddArg2(y, v4)
  9608  		v2.AddArg(v3)
  9609  		v1.AddArg2(v2, y)
  9610  		v.AddArg2(v0, v1)
  9611  		return true
  9612  	}
  9613  	return false
  9614  }
  9615  func rewriteValueLOONG64_OpRsh16x8(v *Value) bool {
  9616  	v_1 := v.Args[1]
  9617  	v_0 := v.Args[0]
  9618  	b := v.Block
  9619  	typ := &b.Func.Config.Types
  9620  	// match: (Rsh16x8 x y)
  9621  	// cond: shiftIsBounded(v)
  9622  	// result: (SRAV (SignExt16to64 x) y)
  9623  	for {
  9624  		x := v_0
  9625  		y := v_1
  9626  		if !(shiftIsBounded(v)) {
  9627  			break
  9628  		}
  9629  		v.reset(OpLOONG64SRAV)
  9630  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  9631  		v0.AddArg(x)
  9632  		v.AddArg2(v0, y)
  9633  		return true
  9634  	}
  9635  	// match: (Rsh16x8 <t> x y)
  9636  	// cond: !shiftIsBounded(v)
  9637  	// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
  9638  	for {
  9639  		t := v.Type
  9640  		x := v_0
  9641  		y := v_1
  9642  		if !(!shiftIsBounded(v)) {
  9643  			break
  9644  		}
  9645  		v.reset(OpLOONG64SRAV)
  9646  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  9647  		v0.AddArg(x)
  9648  		v1 := b.NewValue0(v.Pos, OpLOONG64OR, t)
  9649  		v2 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
  9650  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  9651  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  9652  		v4.AddArg(y)
  9653  		v5 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9654  		v5.AuxInt = int64ToAuxInt(63)
  9655  		v3.AddArg2(v4, v5)
  9656  		v2.AddArg(v3)
  9657  		v1.AddArg2(v2, v4)
  9658  		v.AddArg2(v0, v1)
  9659  		return true
  9660  	}
  9661  	return false
  9662  }
  9663  func rewriteValueLOONG64_OpRsh32Ux16(v *Value) bool {
  9664  	v_1 := v.Args[1]
  9665  	v_0 := v.Args[0]
  9666  	b := v.Block
  9667  	typ := &b.Func.Config.Types
  9668  	// match: (Rsh32Ux16 x y)
  9669  	// cond: shiftIsBounded(v)
  9670  	// result: (SRL x y)
  9671  	for {
  9672  		x := v_0
  9673  		y := v_1
  9674  		if !(shiftIsBounded(v)) {
  9675  			break
  9676  		}
  9677  		v.reset(OpLOONG64SRL)
  9678  		v.AddArg2(x, y)
  9679  		return true
  9680  	}
  9681  	// match: (Rsh32Ux16 <t> x y)
  9682  	// cond: !shiftIsBounded(v)
  9683  	// result: (MASKEQZ (SRL <t> x (ZeroExt16to64 y)) (SGTU (MOVVconst <typ.UInt64> [32]) (ZeroExt16to64 y)))
  9684  	for {
  9685  		t := v.Type
  9686  		x := v_0
  9687  		y := v_1
  9688  		if !(!shiftIsBounded(v)) {
  9689  			break
  9690  		}
  9691  		v.reset(OpLOONG64MASKEQZ)
  9692  		v0 := b.NewValue0(v.Pos, OpLOONG64SRL, t)
  9693  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  9694  		v1.AddArg(y)
  9695  		v0.AddArg2(x, v1)
  9696  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  9697  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9698  		v3.AuxInt = int64ToAuxInt(32)
  9699  		v2.AddArg2(v3, v1)
  9700  		v.AddArg2(v0, v2)
  9701  		return true
  9702  	}
  9703  	return false
  9704  }
  9705  func rewriteValueLOONG64_OpRsh32Ux32(v *Value) bool {
  9706  	v_1 := v.Args[1]
  9707  	v_0 := v.Args[0]
  9708  	b := v.Block
  9709  	typ := &b.Func.Config.Types
  9710  	// match: (Rsh32Ux32 x y)
  9711  	// cond: shiftIsBounded(v)
  9712  	// result: (SRL x y)
  9713  	for {
  9714  		x := v_0
  9715  		y := v_1
  9716  		if !(shiftIsBounded(v)) {
  9717  			break
  9718  		}
  9719  		v.reset(OpLOONG64SRL)
  9720  		v.AddArg2(x, y)
  9721  		return true
  9722  	}
  9723  	// match: (Rsh32Ux32 <t> x y)
  9724  	// cond: !shiftIsBounded(v)
  9725  	// result: (MASKEQZ (SRL <t> x (ZeroExt32to64 y)) (SGTU (MOVVconst <typ.UInt64> [32]) (ZeroExt32to64 y)))
  9726  	for {
  9727  		t := v.Type
  9728  		x := v_0
  9729  		y := v_1
  9730  		if !(!shiftIsBounded(v)) {
  9731  			break
  9732  		}
  9733  		v.reset(OpLOONG64MASKEQZ)
  9734  		v0 := b.NewValue0(v.Pos, OpLOONG64SRL, t)
  9735  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  9736  		v1.AddArg(y)
  9737  		v0.AddArg2(x, v1)
  9738  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  9739  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9740  		v3.AuxInt = int64ToAuxInt(32)
  9741  		v2.AddArg2(v3, v1)
  9742  		v.AddArg2(v0, v2)
  9743  		return true
  9744  	}
  9745  	return false
  9746  }
  9747  func rewriteValueLOONG64_OpRsh32Ux64(v *Value) bool {
  9748  	v_1 := v.Args[1]
  9749  	v_0 := v.Args[0]
  9750  	b := v.Block
  9751  	typ := &b.Func.Config.Types
  9752  	// match: (Rsh32Ux64 x y)
  9753  	// cond: shiftIsBounded(v)
  9754  	// result: (SRL x y)
  9755  	for {
  9756  		x := v_0
  9757  		y := v_1
  9758  		if !(shiftIsBounded(v)) {
  9759  			break
  9760  		}
  9761  		v.reset(OpLOONG64SRL)
  9762  		v.AddArg2(x, y)
  9763  		return true
  9764  	}
  9765  	// match: (Rsh32Ux64 <t> x y)
  9766  	// cond: !shiftIsBounded(v)
  9767  	// result: (MASKEQZ (SRL <t> x y) (SGTU (MOVVconst <typ.UInt64> [32]) y))
  9768  	for {
  9769  		t := v.Type
  9770  		x := v_0
  9771  		y := v_1
  9772  		if !(!shiftIsBounded(v)) {
  9773  			break
  9774  		}
  9775  		v.reset(OpLOONG64MASKEQZ)
  9776  		v0 := b.NewValue0(v.Pos, OpLOONG64SRL, t)
  9777  		v0.AddArg2(x, y)
  9778  		v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  9779  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9780  		v2.AuxInt = int64ToAuxInt(32)
  9781  		v1.AddArg2(v2, y)
  9782  		v.AddArg2(v0, v1)
  9783  		return true
  9784  	}
  9785  	return false
  9786  }
  9787  func rewriteValueLOONG64_OpRsh32Ux8(v *Value) bool {
  9788  	v_1 := v.Args[1]
  9789  	v_0 := v.Args[0]
  9790  	b := v.Block
  9791  	typ := &b.Func.Config.Types
  9792  	// match: (Rsh32Ux8 x y)
  9793  	// cond: shiftIsBounded(v)
  9794  	// result: (SRL x y)
  9795  	for {
  9796  		x := v_0
  9797  		y := v_1
  9798  		if !(shiftIsBounded(v)) {
  9799  			break
  9800  		}
  9801  		v.reset(OpLOONG64SRL)
  9802  		v.AddArg2(x, y)
  9803  		return true
  9804  	}
  9805  	// match: (Rsh32Ux8 <t> x y)
  9806  	// cond: !shiftIsBounded(v)
  9807  	// result: (MASKEQZ (SRL <t> x (ZeroExt8to64 y)) (SGTU (MOVVconst <typ.UInt64> [32]) (ZeroExt8to64 y)))
  9808  	for {
  9809  		t := v.Type
  9810  		x := v_0
  9811  		y := v_1
  9812  		if !(!shiftIsBounded(v)) {
  9813  			break
  9814  		}
  9815  		v.reset(OpLOONG64MASKEQZ)
  9816  		v0 := b.NewValue0(v.Pos, OpLOONG64SRL, t)
  9817  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  9818  		v1.AddArg(y)
  9819  		v0.AddArg2(x, v1)
  9820  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  9821  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9822  		v3.AuxInt = int64ToAuxInt(32)
  9823  		v2.AddArg2(v3, v1)
  9824  		v.AddArg2(v0, v2)
  9825  		return true
  9826  	}
  9827  	return false
  9828  }
  9829  func rewriteValueLOONG64_OpRsh32x16(v *Value) bool {
  9830  	v_1 := v.Args[1]
  9831  	v_0 := v.Args[0]
  9832  	b := v.Block
  9833  	typ := &b.Func.Config.Types
  9834  	// match: (Rsh32x16 x y)
  9835  	// cond: shiftIsBounded(v)
  9836  	// result: (SRA x y)
  9837  	for {
  9838  		x := v_0
  9839  		y := v_1
  9840  		if !(shiftIsBounded(v)) {
  9841  			break
  9842  		}
  9843  		v.reset(OpLOONG64SRA)
  9844  		v.AddArg2(x, y)
  9845  		return true
  9846  	}
  9847  	// match: (Rsh32x16 <t> x y)
  9848  	// cond: !shiftIsBounded(v)
  9849  	// result: (SRA x (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [31]))) (ZeroExt16to64 y)))
  9850  	for {
  9851  		t := v.Type
  9852  		x := v_0
  9853  		y := v_1
  9854  		if !(!shiftIsBounded(v)) {
  9855  			break
  9856  		}
  9857  		v.reset(OpLOONG64SRA)
  9858  		v0 := b.NewValue0(v.Pos, OpLOONG64OR, t)
  9859  		v1 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
  9860  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  9861  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  9862  		v3.AddArg(y)
  9863  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9864  		v4.AuxInt = int64ToAuxInt(31)
  9865  		v2.AddArg2(v3, v4)
  9866  		v1.AddArg(v2)
  9867  		v0.AddArg2(v1, v3)
  9868  		v.AddArg2(x, v0)
  9869  		return true
  9870  	}
  9871  	return false
  9872  }
  9873  func rewriteValueLOONG64_OpRsh32x32(v *Value) bool {
  9874  	v_1 := v.Args[1]
  9875  	v_0 := v.Args[0]
  9876  	b := v.Block
  9877  	typ := &b.Func.Config.Types
  9878  	// match: (Rsh32x32 x y)
  9879  	// cond: shiftIsBounded(v)
  9880  	// result: (SRA x y)
  9881  	for {
  9882  		x := v_0
  9883  		y := v_1
  9884  		if !(shiftIsBounded(v)) {
  9885  			break
  9886  		}
  9887  		v.reset(OpLOONG64SRA)
  9888  		v.AddArg2(x, y)
  9889  		return true
  9890  	}
  9891  	// match: (Rsh32x32 <t> x y)
  9892  	// cond: !shiftIsBounded(v)
  9893  	// result: (SRA x (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [31]))) (ZeroExt32to64 y)))
  9894  	for {
  9895  		t := v.Type
  9896  		x := v_0
  9897  		y := v_1
  9898  		if !(!shiftIsBounded(v)) {
  9899  			break
  9900  		}
  9901  		v.reset(OpLOONG64SRA)
  9902  		v0 := b.NewValue0(v.Pos, OpLOONG64OR, t)
  9903  		v1 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
  9904  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  9905  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  9906  		v3.AddArg(y)
  9907  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9908  		v4.AuxInt = int64ToAuxInt(31)
  9909  		v2.AddArg2(v3, v4)
  9910  		v1.AddArg(v2)
  9911  		v0.AddArg2(v1, v3)
  9912  		v.AddArg2(x, v0)
  9913  		return true
  9914  	}
  9915  	return false
  9916  }
  9917  func rewriteValueLOONG64_OpRsh32x64(v *Value) bool {
  9918  	v_1 := v.Args[1]
  9919  	v_0 := v.Args[0]
  9920  	b := v.Block
  9921  	typ := &b.Func.Config.Types
  9922  	// match: (Rsh32x64 x y)
  9923  	// cond: shiftIsBounded(v)
  9924  	// result: (SRA x y)
  9925  	for {
  9926  		x := v_0
  9927  		y := v_1
  9928  		if !(shiftIsBounded(v)) {
  9929  			break
  9930  		}
  9931  		v.reset(OpLOONG64SRA)
  9932  		v.AddArg2(x, y)
  9933  		return true
  9934  	}
  9935  	// match: (Rsh32x64 <t> x y)
  9936  	// cond: !shiftIsBounded(v)
  9937  	// result: (SRA x (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [31]))) y))
  9938  	for {
  9939  		t := v.Type
  9940  		x := v_0
  9941  		y := v_1
  9942  		if !(!shiftIsBounded(v)) {
  9943  			break
  9944  		}
  9945  		v.reset(OpLOONG64SRA)
  9946  		v0 := b.NewValue0(v.Pos, OpLOONG64OR, t)
  9947  		v1 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
  9948  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  9949  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9950  		v3.AuxInt = int64ToAuxInt(31)
  9951  		v2.AddArg2(y, v3)
  9952  		v1.AddArg(v2)
  9953  		v0.AddArg2(v1, y)
  9954  		v.AddArg2(x, v0)
  9955  		return true
  9956  	}
  9957  	return false
  9958  }
  9959  func rewriteValueLOONG64_OpRsh32x8(v *Value) bool {
  9960  	v_1 := v.Args[1]
  9961  	v_0 := v.Args[0]
  9962  	b := v.Block
  9963  	typ := &b.Func.Config.Types
  9964  	// match: (Rsh32x8 x y)
  9965  	// cond: shiftIsBounded(v)
  9966  	// result: (SRA x y)
  9967  	for {
  9968  		x := v_0
  9969  		y := v_1
  9970  		if !(shiftIsBounded(v)) {
  9971  			break
  9972  		}
  9973  		v.reset(OpLOONG64SRA)
  9974  		v.AddArg2(x, y)
  9975  		return true
  9976  	}
  9977  	// match: (Rsh32x8 <t> x y)
  9978  	// cond: !shiftIsBounded(v)
  9979  	// result: (SRA x (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [31]))) (ZeroExt8to64 y)))
  9980  	for {
  9981  		t := v.Type
  9982  		x := v_0
  9983  		y := v_1
  9984  		if !(!shiftIsBounded(v)) {
  9985  			break
  9986  		}
  9987  		v.reset(OpLOONG64SRA)
  9988  		v0 := b.NewValue0(v.Pos, OpLOONG64OR, t)
  9989  		v1 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
  9990  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  9991  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  9992  		v3.AddArg(y)
  9993  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9994  		v4.AuxInt = int64ToAuxInt(31)
  9995  		v2.AddArg2(v3, v4)
  9996  		v1.AddArg(v2)
  9997  		v0.AddArg2(v1, v3)
  9998  		v.AddArg2(x, v0)
  9999  		return true
 10000  	}
 10001  	return false
 10002  }
 10003  func rewriteValueLOONG64_OpRsh64Ux16(v *Value) bool {
 10004  	v_1 := v.Args[1]
 10005  	v_0 := v.Args[0]
 10006  	b := v.Block
 10007  	typ := &b.Func.Config.Types
 10008  	// match: (Rsh64Ux16 x y)
 10009  	// cond: shiftIsBounded(v)
 10010  	// result: (SRLV x y)
 10011  	for {
 10012  		x := v_0
 10013  		y := v_1
 10014  		if !(shiftIsBounded(v)) {
 10015  			break
 10016  		}
 10017  		v.reset(OpLOONG64SRLV)
 10018  		v.AddArg2(x, y)
 10019  		return true
 10020  	}
 10021  	// match: (Rsh64Ux16 <t> x y)
 10022  	// cond: !shiftIsBounded(v)
 10023  	// result: (MASKEQZ (SRLV <t> x (ZeroExt16to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y)))
 10024  	for {
 10025  		t := v.Type
 10026  		x := v_0
 10027  		y := v_1
 10028  		if !(!shiftIsBounded(v)) {
 10029  			break
 10030  		}
 10031  		v.reset(OpLOONG64MASKEQZ)
 10032  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
 10033  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 10034  		v1.AddArg(y)
 10035  		v0.AddArg2(x, v1)
 10036  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10037  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10038  		v3.AuxInt = int64ToAuxInt(64)
 10039  		v2.AddArg2(v3, v1)
 10040  		v.AddArg2(v0, v2)
 10041  		return true
 10042  	}
 10043  	return false
 10044  }
 10045  func rewriteValueLOONG64_OpRsh64Ux32(v *Value) bool {
 10046  	v_1 := v.Args[1]
 10047  	v_0 := v.Args[0]
 10048  	b := v.Block
 10049  	typ := &b.Func.Config.Types
 10050  	// match: (Rsh64Ux32 x y)
 10051  	// cond: shiftIsBounded(v)
 10052  	// result: (SRLV x y)
 10053  	for {
 10054  		x := v_0
 10055  		y := v_1
 10056  		if !(shiftIsBounded(v)) {
 10057  			break
 10058  		}
 10059  		v.reset(OpLOONG64SRLV)
 10060  		v.AddArg2(x, y)
 10061  		return true
 10062  	}
 10063  	// match: (Rsh64Ux32 <t> x y)
 10064  	// cond: !shiftIsBounded(v)
 10065  	// result: (MASKEQZ (SRLV <t> x (ZeroExt32to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y)))
 10066  	for {
 10067  		t := v.Type
 10068  		x := v_0
 10069  		y := v_1
 10070  		if !(!shiftIsBounded(v)) {
 10071  			break
 10072  		}
 10073  		v.reset(OpLOONG64MASKEQZ)
 10074  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
 10075  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 10076  		v1.AddArg(y)
 10077  		v0.AddArg2(x, v1)
 10078  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10079  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10080  		v3.AuxInt = int64ToAuxInt(64)
 10081  		v2.AddArg2(v3, v1)
 10082  		v.AddArg2(v0, v2)
 10083  		return true
 10084  	}
 10085  	return false
 10086  }
 10087  func rewriteValueLOONG64_OpRsh64Ux64(v *Value) bool {
 10088  	v_1 := v.Args[1]
 10089  	v_0 := v.Args[0]
 10090  	b := v.Block
 10091  	typ := &b.Func.Config.Types
 10092  	// match: (Rsh64Ux64 x y)
 10093  	// cond: shiftIsBounded(v)
 10094  	// result: (SRLV x y)
 10095  	for {
 10096  		x := v_0
 10097  		y := v_1
 10098  		if !(shiftIsBounded(v)) {
 10099  			break
 10100  		}
 10101  		v.reset(OpLOONG64SRLV)
 10102  		v.AddArg2(x, y)
 10103  		return true
 10104  	}
 10105  	// match: (Rsh64Ux64 <t> x y)
 10106  	// cond: !shiftIsBounded(v)
 10107  	// result: (MASKEQZ (SRLV <t> x y) (SGTU (MOVVconst <typ.UInt64> [64]) y))
 10108  	for {
 10109  		t := v.Type
 10110  		x := v_0
 10111  		y := v_1
 10112  		if !(!shiftIsBounded(v)) {
 10113  			break
 10114  		}
 10115  		v.reset(OpLOONG64MASKEQZ)
 10116  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
 10117  		v0.AddArg2(x, y)
 10118  		v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10119  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10120  		v2.AuxInt = int64ToAuxInt(64)
 10121  		v1.AddArg2(v2, y)
 10122  		v.AddArg2(v0, v1)
 10123  		return true
 10124  	}
 10125  	return false
 10126  }
 10127  func rewriteValueLOONG64_OpRsh64Ux8(v *Value) bool {
 10128  	v_1 := v.Args[1]
 10129  	v_0 := v.Args[0]
 10130  	b := v.Block
 10131  	typ := &b.Func.Config.Types
 10132  	// match: (Rsh64Ux8 x y)
 10133  	// cond: shiftIsBounded(v)
 10134  	// result: (SRLV x y)
 10135  	for {
 10136  		x := v_0
 10137  		y := v_1
 10138  		if !(shiftIsBounded(v)) {
 10139  			break
 10140  		}
 10141  		v.reset(OpLOONG64SRLV)
 10142  		v.AddArg2(x, y)
 10143  		return true
 10144  	}
 10145  	// match: (Rsh64Ux8 <t> x y)
 10146  	// cond: !shiftIsBounded(v)
 10147  	// result: (MASKEQZ (SRLV <t> x (ZeroExt8to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y)))
 10148  	for {
 10149  		t := v.Type
 10150  		x := v_0
 10151  		y := v_1
 10152  		if !(!shiftIsBounded(v)) {
 10153  			break
 10154  		}
 10155  		v.reset(OpLOONG64MASKEQZ)
 10156  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
 10157  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 10158  		v1.AddArg(y)
 10159  		v0.AddArg2(x, v1)
 10160  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10161  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10162  		v3.AuxInt = int64ToAuxInt(64)
 10163  		v2.AddArg2(v3, v1)
 10164  		v.AddArg2(v0, v2)
 10165  		return true
 10166  	}
 10167  	return false
 10168  }
 10169  func rewriteValueLOONG64_OpRsh64x16(v *Value) bool {
 10170  	v_1 := v.Args[1]
 10171  	v_0 := v.Args[0]
 10172  	b := v.Block
 10173  	typ := &b.Func.Config.Types
 10174  	// match: (Rsh64x16 x y)
 10175  	// cond: shiftIsBounded(v)
 10176  	// result: (SRAV x y)
 10177  	for {
 10178  		x := v_0
 10179  		y := v_1
 10180  		if !(shiftIsBounded(v)) {
 10181  			break
 10182  		}
 10183  		v.reset(OpLOONG64SRAV)
 10184  		v.AddArg2(x, y)
 10185  		return true
 10186  	}
 10187  	// match: (Rsh64x16 <t> x y)
 10188  	// cond: !shiftIsBounded(v)
 10189  	// result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y)))
 10190  	for {
 10191  		t := v.Type
 10192  		x := v_0
 10193  		y := v_1
 10194  		if !(!shiftIsBounded(v)) {
 10195  			break
 10196  		}
 10197  		v.reset(OpLOONG64SRAV)
 10198  		v0 := b.NewValue0(v.Pos, OpLOONG64OR, t)
 10199  		v1 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
 10200  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10201  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 10202  		v3.AddArg(y)
 10203  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10204  		v4.AuxInt = int64ToAuxInt(63)
 10205  		v2.AddArg2(v3, v4)
 10206  		v1.AddArg(v2)
 10207  		v0.AddArg2(v1, v3)
 10208  		v.AddArg2(x, v0)
 10209  		return true
 10210  	}
 10211  	return false
 10212  }
 10213  func rewriteValueLOONG64_OpRsh64x32(v *Value) bool {
 10214  	v_1 := v.Args[1]
 10215  	v_0 := v.Args[0]
 10216  	b := v.Block
 10217  	typ := &b.Func.Config.Types
 10218  	// match: (Rsh64x32 x y)
 10219  	// cond: shiftIsBounded(v)
 10220  	// result: (SRAV x y)
 10221  	for {
 10222  		x := v_0
 10223  		y := v_1
 10224  		if !(shiftIsBounded(v)) {
 10225  			break
 10226  		}
 10227  		v.reset(OpLOONG64SRAV)
 10228  		v.AddArg2(x, y)
 10229  		return true
 10230  	}
 10231  	// match: (Rsh64x32 <t> x y)
 10232  	// cond: !shiftIsBounded(v)
 10233  	// result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y)))
 10234  	for {
 10235  		t := v.Type
 10236  		x := v_0
 10237  		y := v_1
 10238  		if !(!shiftIsBounded(v)) {
 10239  			break
 10240  		}
 10241  		v.reset(OpLOONG64SRAV)
 10242  		v0 := b.NewValue0(v.Pos, OpLOONG64OR, t)
 10243  		v1 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
 10244  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10245  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 10246  		v3.AddArg(y)
 10247  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10248  		v4.AuxInt = int64ToAuxInt(63)
 10249  		v2.AddArg2(v3, v4)
 10250  		v1.AddArg(v2)
 10251  		v0.AddArg2(v1, v3)
 10252  		v.AddArg2(x, v0)
 10253  		return true
 10254  	}
 10255  	return false
 10256  }
 10257  func rewriteValueLOONG64_OpRsh64x64(v *Value) bool {
 10258  	v_1 := v.Args[1]
 10259  	v_0 := v.Args[0]
 10260  	b := v.Block
 10261  	typ := &b.Func.Config.Types
 10262  	// match: (Rsh64x64 x y)
 10263  	// cond: shiftIsBounded(v)
 10264  	// result: (SRAV x y)
 10265  	for {
 10266  		x := v_0
 10267  		y := v_1
 10268  		if !(shiftIsBounded(v)) {
 10269  			break
 10270  		}
 10271  		v.reset(OpLOONG64SRAV)
 10272  		v.AddArg2(x, y)
 10273  		return true
 10274  	}
 10275  	// match: (Rsh64x64 <t> x y)
 10276  	// cond: !shiftIsBounded(v)
 10277  	// result: (SRAV x (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y))
 10278  	for {
 10279  		t := v.Type
 10280  		x := v_0
 10281  		y := v_1
 10282  		if !(!shiftIsBounded(v)) {
 10283  			break
 10284  		}
 10285  		v.reset(OpLOONG64SRAV)
 10286  		v0 := b.NewValue0(v.Pos, OpLOONG64OR, t)
 10287  		v1 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
 10288  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10289  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10290  		v3.AuxInt = int64ToAuxInt(63)
 10291  		v2.AddArg2(y, v3)
 10292  		v1.AddArg(v2)
 10293  		v0.AddArg2(v1, y)
 10294  		v.AddArg2(x, v0)
 10295  		return true
 10296  	}
 10297  	return false
 10298  }
 10299  func rewriteValueLOONG64_OpRsh64x8(v *Value) bool {
 10300  	v_1 := v.Args[1]
 10301  	v_0 := v.Args[0]
 10302  	b := v.Block
 10303  	typ := &b.Func.Config.Types
 10304  	// match: (Rsh64x8 x y)
 10305  	// cond: shiftIsBounded(v)
 10306  	// result: (SRAV x y)
 10307  	for {
 10308  		x := v_0
 10309  		y := v_1
 10310  		if !(shiftIsBounded(v)) {
 10311  			break
 10312  		}
 10313  		v.reset(OpLOONG64SRAV)
 10314  		v.AddArg2(x, y)
 10315  		return true
 10316  	}
 10317  	// match: (Rsh64x8 <t> x y)
 10318  	// cond: !shiftIsBounded(v)
 10319  	// result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
 10320  	for {
 10321  		t := v.Type
 10322  		x := v_0
 10323  		y := v_1
 10324  		if !(!shiftIsBounded(v)) {
 10325  			break
 10326  		}
 10327  		v.reset(OpLOONG64SRAV)
 10328  		v0 := b.NewValue0(v.Pos, OpLOONG64OR, t)
 10329  		v1 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
 10330  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10331  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 10332  		v3.AddArg(y)
 10333  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10334  		v4.AuxInt = int64ToAuxInt(63)
 10335  		v2.AddArg2(v3, v4)
 10336  		v1.AddArg(v2)
 10337  		v0.AddArg2(v1, v3)
 10338  		v.AddArg2(x, v0)
 10339  		return true
 10340  	}
 10341  	return false
 10342  }
 10343  func rewriteValueLOONG64_OpRsh8Ux16(v *Value) bool {
 10344  	v_1 := v.Args[1]
 10345  	v_0 := v.Args[0]
 10346  	b := v.Block
 10347  	typ := &b.Func.Config.Types
 10348  	// match: (Rsh8Ux16 x y)
 10349  	// cond: shiftIsBounded(v)
 10350  	// result: (SRLV (ZeroExt8to64 x) y)
 10351  	for {
 10352  		x := v_0
 10353  		y := v_1
 10354  		if !(shiftIsBounded(v)) {
 10355  			break
 10356  		}
 10357  		v.reset(OpLOONG64SRLV)
 10358  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 10359  		v0.AddArg(x)
 10360  		v.AddArg2(v0, y)
 10361  		return true
 10362  	}
 10363  	// match: (Rsh8Ux16 <t> x y)
 10364  	// cond: !shiftIsBounded(v)
 10365  	// result: (MASKEQZ (SRLV <t> (ZeroExt8to64 x) (ZeroExt16to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y)))
 10366  	for {
 10367  		t := v.Type
 10368  		x := v_0
 10369  		y := v_1
 10370  		if !(!shiftIsBounded(v)) {
 10371  			break
 10372  		}
 10373  		v.reset(OpLOONG64MASKEQZ)
 10374  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
 10375  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 10376  		v1.AddArg(x)
 10377  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 10378  		v2.AddArg(y)
 10379  		v0.AddArg2(v1, v2)
 10380  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10381  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10382  		v4.AuxInt = int64ToAuxInt(64)
 10383  		v3.AddArg2(v4, v2)
 10384  		v.AddArg2(v0, v3)
 10385  		return true
 10386  	}
 10387  	return false
 10388  }
 10389  func rewriteValueLOONG64_OpRsh8Ux32(v *Value) bool {
 10390  	v_1 := v.Args[1]
 10391  	v_0 := v.Args[0]
 10392  	b := v.Block
 10393  	typ := &b.Func.Config.Types
 10394  	// match: (Rsh8Ux32 x y)
 10395  	// cond: shiftIsBounded(v)
 10396  	// result: (SRLV (ZeroExt8to64 x) y)
 10397  	for {
 10398  		x := v_0
 10399  		y := v_1
 10400  		if !(shiftIsBounded(v)) {
 10401  			break
 10402  		}
 10403  		v.reset(OpLOONG64SRLV)
 10404  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 10405  		v0.AddArg(x)
 10406  		v.AddArg2(v0, y)
 10407  		return true
 10408  	}
 10409  	// match: (Rsh8Ux32 <t> x y)
 10410  	// cond: !shiftIsBounded(v)
 10411  	// result: (MASKEQZ (SRLV <t> (ZeroExt8to64 x) (ZeroExt32to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y)))
 10412  	for {
 10413  		t := v.Type
 10414  		x := v_0
 10415  		y := v_1
 10416  		if !(!shiftIsBounded(v)) {
 10417  			break
 10418  		}
 10419  		v.reset(OpLOONG64MASKEQZ)
 10420  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
 10421  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 10422  		v1.AddArg(x)
 10423  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 10424  		v2.AddArg(y)
 10425  		v0.AddArg2(v1, v2)
 10426  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10427  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10428  		v4.AuxInt = int64ToAuxInt(64)
 10429  		v3.AddArg2(v4, v2)
 10430  		v.AddArg2(v0, v3)
 10431  		return true
 10432  	}
 10433  	return false
 10434  }
 10435  func rewriteValueLOONG64_OpRsh8Ux64(v *Value) bool {
 10436  	v_1 := v.Args[1]
 10437  	v_0 := v.Args[0]
 10438  	b := v.Block
 10439  	typ := &b.Func.Config.Types
 10440  	// match: (Rsh8Ux64 x y)
 10441  	// cond: shiftIsBounded(v)
 10442  	// result: (SRLV (ZeroExt8to64 x) y)
 10443  	for {
 10444  		x := v_0
 10445  		y := v_1
 10446  		if !(shiftIsBounded(v)) {
 10447  			break
 10448  		}
 10449  		v.reset(OpLOONG64SRLV)
 10450  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 10451  		v0.AddArg(x)
 10452  		v.AddArg2(v0, y)
 10453  		return true
 10454  	}
 10455  	// match: (Rsh8Ux64 <t> x y)
 10456  	// cond: !shiftIsBounded(v)
 10457  	// result: (MASKEQZ (SRLV <t> (ZeroExt8to64 x) y) (SGTU (MOVVconst <typ.UInt64> [64]) y))
 10458  	for {
 10459  		t := v.Type
 10460  		x := v_0
 10461  		y := v_1
 10462  		if !(!shiftIsBounded(v)) {
 10463  			break
 10464  		}
 10465  		v.reset(OpLOONG64MASKEQZ)
 10466  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
 10467  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 10468  		v1.AddArg(x)
 10469  		v0.AddArg2(v1, y)
 10470  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10471  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10472  		v3.AuxInt = int64ToAuxInt(64)
 10473  		v2.AddArg2(v3, y)
 10474  		v.AddArg2(v0, v2)
 10475  		return true
 10476  	}
 10477  	return false
 10478  }
 10479  func rewriteValueLOONG64_OpRsh8Ux8(v *Value) bool {
 10480  	v_1 := v.Args[1]
 10481  	v_0 := v.Args[0]
 10482  	b := v.Block
 10483  	typ := &b.Func.Config.Types
 10484  	// match: (Rsh8Ux8 x y)
 10485  	// cond: shiftIsBounded(v)
 10486  	// result: (SRLV (ZeroExt8to64 x) y)
 10487  	for {
 10488  		x := v_0
 10489  		y := v_1
 10490  		if !(shiftIsBounded(v)) {
 10491  			break
 10492  		}
 10493  		v.reset(OpLOONG64SRLV)
 10494  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 10495  		v0.AddArg(x)
 10496  		v.AddArg2(v0, y)
 10497  		return true
 10498  	}
 10499  	// match: (Rsh8Ux8 <t> x y)
 10500  	// cond: !shiftIsBounded(v)
 10501  	// result: (MASKEQZ (SRLV <t> (ZeroExt8to64 x) (ZeroExt8to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y)))
 10502  	for {
 10503  		t := v.Type
 10504  		x := v_0
 10505  		y := v_1
 10506  		if !(!shiftIsBounded(v)) {
 10507  			break
 10508  		}
 10509  		v.reset(OpLOONG64MASKEQZ)
 10510  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
 10511  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 10512  		v1.AddArg(x)
 10513  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 10514  		v2.AddArg(y)
 10515  		v0.AddArg2(v1, v2)
 10516  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10517  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10518  		v4.AuxInt = int64ToAuxInt(64)
 10519  		v3.AddArg2(v4, v2)
 10520  		v.AddArg2(v0, v3)
 10521  		return true
 10522  	}
 10523  	return false
 10524  }
 10525  func rewriteValueLOONG64_OpRsh8x16(v *Value) bool {
 10526  	v_1 := v.Args[1]
 10527  	v_0 := v.Args[0]
 10528  	b := v.Block
 10529  	typ := &b.Func.Config.Types
 10530  	// match: (Rsh8x16 x y)
 10531  	// cond: shiftIsBounded(v)
 10532  	// result: (SRAV (SignExt8to64 x) y)
 10533  	for {
 10534  		x := v_0
 10535  		y := v_1
 10536  		if !(shiftIsBounded(v)) {
 10537  			break
 10538  		}
 10539  		v.reset(OpLOONG64SRAV)
 10540  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
 10541  		v0.AddArg(x)
 10542  		v.AddArg2(v0, y)
 10543  		return true
 10544  	}
 10545  	// match: (Rsh8x16 <t> x y)
 10546  	// cond: !shiftIsBounded(v)
 10547  	// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y)))
 10548  	for {
 10549  		t := v.Type
 10550  		x := v_0
 10551  		y := v_1
 10552  		if !(!shiftIsBounded(v)) {
 10553  			break
 10554  		}
 10555  		v.reset(OpLOONG64SRAV)
 10556  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
 10557  		v0.AddArg(x)
 10558  		v1 := b.NewValue0(v.Pos, OpLOONG64OR, t)
 10559  		v2 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
 10560  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10561  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 10562  		v4.AddArg(y)
 10563  		v5 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10564  		v5.AuxInt = int64ToAuxInt(63)
 10565  		v3.AddArg2(v4, v5)
 10566  		v2.AddArg(v3)
 10567  		v1.AddArg2(v2, v4)
 10568  		v.AddArg2(v0, v1)
 10569  		return true
 10570  	}
 10571  	return false
 10572  }
 10573  func rewriteValueLOONG64_OpRsh8x32(v *Value) bool {
 10574  	v_1 := v.Args[1]
 10575  	v_0 := v.Args[0]
 10576  	b := v.Block
 10577  	typ := &b.Func.Config.Types
 10578  	// match: (Rsh8x32 x y)
 10579  	// cond: shiftIsBounded(v)
 10580  	// result: (SRAV (SignExt8to64 x) y)
 10581  	for {
 10582  		x := v_0
 10583  		y := v_1
 10584  		if !(shiftIsBounded(v)) {
 10585  			break
 10586  		}
 10587  		v.reset(OpLOONG64SRAV)
 10588  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
 10589  		v0.AddArg(x)
 10590  		v.AddArg2(v0, y)
 10591  		return true
 10592  	}
 10593  	// match: (Rsh8x32 <t> x y)
 10594  	// cond: !shiftIsBounded(v)
 10595  	// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y)))
 10596  	for {
 10597  		t := v.Type
 10598  		x := v_0
 10599  		y := v_1
 10600  		if !(!shiftIsBounded(v)) {
 10601  			break
 10602  		}
 10603  		v.reset(OpLOONG64SRAV)
 10604  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
 10605  		v0.AddArg(x)
 10606  		v1 := b.NewValue0(v.Pos, OpLOONG64OR, t)
 10607  		v2 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
 10608  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10609  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 10610  		v4.AddArg(y)
 10611  		v5 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10612  		v5.AuxInt = int64ToAuxInt(63)
 10613  		v3.AddArg2(v4, v5)
 10614  		v2.AddArg(v3)
 10615  		v1.AddArg2(v2, v4)
 10616  		v.AddArg2(v0, v1)
 10617  		return true
 10618  	}
 10619  	return false
 10620  }
 10621  func rewriteValueLOONG64_OpRsh8x64(v *Value) bool {
 10622  	v_1 := v.Args[1]
 10623  	v_0 := v.Args[0]
 10624  	b := v.Block
 10625  	typ := &b.Func.Config.Types
 10626  	// match: (Rsh8x64 x y)
 10627  	// cond: shiftIsBounded(v)
 10628  	// result: (SRAV (SignExt8to64 x) y)
 10629  	for {
 10630  		x := v_0
 10631  		y := v_1
 10632  		if !(shiftIsBounded(v)) {
 10633  			break
 10634  		}
 10635  		v.reset(OpLOONG64SRAV)
 10636  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
 10637  		v0.AddArg(x)
 10638  		v.AddArg2(v0, y)
 10639  		return true
 10640  	}
 10641  	// match: (Rsh8x64 <t> x y)
 10642  	// cond: !shiftIsBounded(v)
 10643  	// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y))
 10644  	for {
 10645  		t := v.Type
 10646  		x := v_0
 10647  		y := v_1
 10648  		if !(!shiftIsBounded(v)) {
 10649  			break
 10650  		}
 10651  		v.reset(OpLOONG64SRAV)
 10652  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
 10653  		v0.AddArg(x)
 10654  		v1 := b.NewValue0(v.Pos, OpLOONG64OR, t)
 10655  		v2 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
 10656  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10657  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10658  		v4.AuxInt = int64ToAuxInt(63)
 10659  		v3.AddArg2(y, v4)
 10660  		v2.AddArg(v3)
 10661  		v1.AddArg2(v2, y)
 10662  		v.AddArg2(v0, v1)
 10663  		return true
 10664  	}
 10665  	return false
 10666  }
 10667  func rewriteValueLOONG64_OpRsh8x8(v *Value) bool {
 10668  	v_1 := v.Args[1]
 10669  	v_0 := v.Args[0]
 10670  	b := v.Block
 10671  	typ := &b.Func.Config.Types
 10672  	// match: (Rsh8x8 x y)
 10673  	// cond: shiftIsBounded(v)
 10674  	// result: (SRAV (SignExt8to64 x) y)
 10675  	for {
 10676  		x := v_0
 10677  		y := v_1
 10678  		if !(shiftIsBounded(v)) {
 10679  			break
 10680  		}
 10681  		v.reset(OpLOONG64SRAV)
 10682  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
 10683  		v0.AddArg(x)
 10684  		v.AddArg2(v0, y)
 10685  		return true
 10686  	}
 10687  	// match: (Rsh8x8 <t> x y)
 10688  	// cond: !shiftIsBounded(v)
 10689  	// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
 10690  	for {
 10691  		t := v.Type
 10692  		x := v_0
 10693  		y := v_1
 10694  		if !(!shiftIsBounded(v)) {
 10695  			break
 10696  		}
 10697  		v.reset(OpLOONG64SRAV)
 10698  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
 10699  		v0.AddArg(x)
 10700  		v1 := b.NewValue0(v.Pos, OpLOONG64OR, t)
 10701  		v2 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
 10702  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10703  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 10704  		v4.AddArg(y)
 10705  		v5 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10706  		v5.AuxInt = int64ToAuxInt(63)
 10707  		v3.AddArg2(v4, v5)
 10708  		v2.AddArg(v3)
 10709  		v1.AddArg2(v2, v4)
 10710  		v.AddArg2(v0, v1)
 10711  		return true
 10712  	}
 10713  	return false
 10714  }
 10715  func rewriteValueLOONG64_OpSelect0(v *Value) bool {
 10716  	v_0 := v.Args[0]
 10717  	b := v.Block
 10718  	// match: (Select0 (Mul64uhilo x y))
 10719  	// result: (MULHVU x y)
 10720  	for {
 10721  		if v_0.Op != OpMul64uhilo {
 10722  			break
 10723  		}
 10724  		y := v_0.Args[1]
 10725  		x := v_0.Args[0]
 10726  		v.reset(OpLOONG64MULHVU)
 10727  		v.AddArg2(x, y)
 10728  		return true
 10729  	}
 10730  	// match: (Select0 (Mul64uover x y))
 10731  	// result: (MULV x y)
 10732  	for {
 10733  		if v_0.Op != OpMul64uover {
 10734  			break
 10735  		}
 10736  		y := v_0.Args[1]
 10737  		x := v_0.Args[0]
 10738  		v.reset(OpLOONG64MULV)
 10739  		v.AddArg2(x, y)
 10740  		return true
 10741  	}
 10742  	// match: (Select0 <t> (Add64carry x y c))
 10743  	// result: (ADDV (ADDV <t> x y) c)
 10744  	for {
 10745  		t := v.Type
 10746  		if v_0.Op != OpAdd64carry {
 10747  			break
 10748  		}
 10749  		c := v_0.Args[2]
 10750  		x := v_0.Args[0]
 10751  		y := v_0.Args[1]
 10752  		v.reset(OpLOONG64ADDV)
 10753  		v0 := b.NewValue0(v.Pos, OpLOONG64ADDV, t)
 10754  		v0.AddArg2(x, y)
 10755  		v.AddArg2(v0, c)
 10756  		return true
 10757  	}
 10758  	// match: (Select0 <t> (Sub64borrow x y c))
 10759  	// result: (SUBV (SUBV <t> x y) c)
 10760  	for {
 10761  		t := v.Type
 10762  		if v_0.Op != OpSub64borrow {
 10763  			break
 10764  		}
 10765  		c := v_0.Args[2]
 10766  		x := v_0.Args[0]
 10767  		y := v_0.Args[1]
 10768  		v.reset(OpLOONG64SUBV)
 10769  		v0 := b.NewValue0(v.Pos, OpLOONG64SUBV, t)
 10770  		v0.AddArg2(x, y)
 10771  		v.AddArg2(v0, c)
 10772  		return true
 10773  	}
 10774  	return false
 10775  }
 10776  func rewriteValueLOONG64_OpSelect1(v *Value) bool {
 10777  	v_0 := v.Args[0]
 10778  	b := v.Block
 10779  	typ := &b.Func.Config.Types
 10780  	// match: (Select1 (Mul64uhilo x y))
 10781  	// result: (MULV x y)
 10782  	for {
 10783  		if v_0.Op != OpMul64uhilo {
 10784  			break
 10785  		}
 10786  		y := v_0.Args[1]
 10787  		x := v_0.Args[0]
 10788  		v.reset(OpLOONG64MULV)
 10789  		v.AddArg2(x, y)
 10790  		return true
 10791  	}
 10792  	// match: (Select1 (Mul64uover x y))
 10793  	// result: (SGTU <typ.Bool> (MULHVU x y) (MOVVconst <typ.UInt64> [0]))
 10794  	for {
 10795  		if v_0.Op != OpMul64uover {
 10796  			break
 10797  		}
 10798  		y := v_0.Args[1]
 10799  		x := v_0.Args[0]
 10800  		v.reset(OpLOONG64SGTU)
 10801  		v.Type = typ.Bool
 10802  		v0 := b.NewValue0(v.Pos, OpLOONG64MULHVU, typ.UInt64)
 10803  		v0.AddArg2(x, y)
 10804  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10805  		v1.AuxInt = int64ToAuxInt(0)
 10806  		v.AddArg2(v0, v1)
 10807  		return true
 10808  	}
 10809  	// match: (Select1 <t> (Add64carry x y c))
 10810  	// result: (OR (SGTU <t> x s:(ADDV <t> x y)) (SGTU <t> s (ADDV <t> s c)))
 10811  	for {
 10812  		t := v.Type
 10813  		if v_0.Op != OpAdd64carry {
 10814  			break
 10815  		}
 10816  		c := v_0.Args[2]
 10817  		x := v_0.Args[0]
 10818  		y := v_0.Args[1]
 10819  		v.reset(OpLOONG64OR)
 10820  		v0 := b.NewValue0(v.Pos, OpLOONG64SGTU, t)
 10821  		s := b.NewValue0(v.Pos, OpLOONG64ADDV, t)
 10822  		s.AddArg2(x, y)
 10823  		v0.AddArg2(x, s)
 10824  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, t)
 10825  		v3 := b.NewValue0(v.Pos, OpLOONG64ADDV, t)
 10826  		v3.AddArg2(s, c)
 10827  		v2.AddArg2(s, v3)
 10828  		v.AddArg2(v0, v2)
 10829  		return true
 10830  	}
 10831  	// match: (Select1 <t> (Sub64borrow x y c))
 10832  	// result: (OR (SGTU <t> s:(SUBV <t> x y) x) (SGTU <t> (SUBV <t> s c) s))
 10833  	for {
 10834  		t := v.Type
 10835  		if v_0.Op != OpSub64borrow {
 10836  			break
 10837  		}
 10838  		c := v_0.Args[2]
 10839  		x := v_0.Args[0]
 10840  		y := v_0.Args[1]
 10841  		v.reset(OpLOONG64OR)
 10842  		v0 := b.NewValue0(v.Pos, OpLOONG64SGTU, t)
 10843  		s := b.NewValue0(v.Pos, OpLOONG64SUBV, t)
 10844  		s.AddArg2(x, y)
 10845  		v0.AddArg2(s, x)
 10846  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, t)
 10847  		v3 := b.NewValue0(v.Pos, OpLOONG64SUBV, t)
 10848  		v3.AddArg2(s, c)
 10849  		v2.AddArg2(v3, s)
 10850  		v.AddArg2(v0, v2)
 10851  		return true
 10852  	}
 10853  	return false
 10854  }
 10855  func rewriteValueLOONG64_OpSelectN(v *Value) bool {
 10856  	v_0 := v.Args[0]
 10857  	b := v.Block
 10858  	config := b.Func.Config
 10859  	// match: (SelectN [0] call:(CALLstatic {sym} dst src (MOVVconst [sz]) mem))
 10860  	// cond: sz >= 0 && isSameCall(sym, "runtime.memmove") && call.Uses == 1 && isInlinableMemmove(dst, src, sz, config) && clobber(call)
 10861  	// result: (Move [sz] dst src mem)
 10862  	for {
 10863  		if auxIntToInt64(v.AuxInt) != 0 {
 10864  			break
 10865  		}
 10866  		call := v_0
 10867  		if call.Op != OpLOONG64CALLstatic || len(call.Args) != 4 {
 10868  			break
 10869  		}
 10870  		sym := auxToCall(call.Aux)
 10871  		mem := call.Args[3]
 10872  		dst := call.Args[0]
 10873  		src := call.Args[1]
 10874  		call_2 := call.Args[2]
 10875  		if call_2.Op != OpLOONG64MOVVconst {
 10876  			break
 10877  		}
 10878  		sz := auxIntToInt64(call_2.AuxInt)
 10879  		if !(sz >= 0 && isSameCall(sym, "runtime.memmove") && call.Uses == 1 && isInlinableMemmove(dst, src, sz, config) && clobber(call)) {
 10880  			break
 10881  		}
 10882  		v.reset(OpMove)
 10883  		v.AuxInt = int64ToAuxInt(sz)
 10884  		v.AddArg3(dst, src, mem)
 10885  		return true
 10886  	}
 10887  	return false
 10888  }
 10889  func rewriteValueLOONG64_OpSlicemask(v *Value) bool {
 10890  	v_0 := v.Args[0]
 10891  	b := v.Block
 10892  	// match: (Slicemask <t> x)
 10893  	// result: (SRAVconst (NEGV <t> x) [63])
 10894  	for {
 10895  		t := v.Type
 10896  		x := v_0
 10897  		v.reset(OpLOONG64SRAVconst)
 10898  		v.AuxInt = int64ToAuxInt(63)
 10899  		v0 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
 10900  		v0.AddArg(x)
 10901  		v.AddArg(v0)
 10902  		return true
 10903  	}
 10904  }
 10905  func rewriteValueLOONG64_OpStore(v *Value) bool {
 10906  	v_2 := v.Args[2]
 10907  	v_1 := v.Args[1]
 10908  	v_0 := v.Args[0]
 10909  	// match: (Store {t} ptr val mem)
 10910  	// cond: t.Size() == 1
 10911  	// result: (MOVBstore ptr val mem)
 10912  	for {
 10913  		t := auxToType(v.Aux)
 10914  		ptr := v_0
 10915  		val := v_1
 10916  		mem := v_2
 10917  		if !(t.Size() == 1) {
 10918  			break
 10919  		}
 10920  		v.reset(OpLOONG64MOVBstore)
 10921  		v.AddArg3(ptr, val, mem)
 10922  		return true
 10923  	}
 10924  	// match: (Store {t} ptr val mem)
 10925  	// cond: t.Size() == 2
 10926  	// result: (MOVHstore ptr val mem)
 10927  	for {
 10928  		t := auxToType(v.Aux)
 10929  		ptr := v_0
 10930  		val := v_1
 10931  		mem := v_2
 10932  		if !(t.Size() == 2) {
 10933  			break
 10934  		}
 10935  		v.reset(OpLOONG64MOVHstore)
 10936  		v.AddArg3(ptr, val, mem)
 10937  		return true
 10938  	}
 10939  	// match: (Store {t} ptr val mem)
 10940  	// cond: t.Size() == 4 && !t.IsFloat()
 10941  	// result: (MOVWstore ptr val mem)
 10942  	for {
 10943  		t := auxToType(v.Aux)
 10944  		ptr := v_0
 10945  		val := v_1
 10946  		mem := v_2
 10947  		if !(t.Size() == 4 && !t.IsFloat()) {
 10948  			break
 10949  		}
 10950  		v.reset(OpLOONG64MOVWstore)
 10951  		v.AddArg3(ptr, val, mem)
 10952  		return true
 10953  	}
 10954  	// match: (Store {t} ptr val mem)
 10955  	// cond: t.Size() == 8 && !t.IsFloat()
 10956  	// result: (MOVVstore ptr val mem)
 10957  	for {
 10958  		t := auxToType(v.Aux)
 10959  		ptr := v_0
 10960  		val := v_1
 10961  		mem := v_2
 10962  		if !(t.Size() == 8 && !t.IsFloat()) {
 10963  			break
 10964  		}
 10965  		v.reset(OpLOONG64MOVVstore)
 10966  		v.AddArg3(ptr, val, mem)
 10967  		return true
 10968  	}
 10969  	// match: (Store {t} ptr val mem)
 10970  	// cond: t.Size() == 4 && t.IsFloat()
 10971  	// result: (MOVFstore ptr val mem)
 10972  	for {
 10973  		t := auxToType(v.Aux)
 10974  		ptr := v_0
 10975  		val := v_1
 10976  		mem := v_2
 10977  		if !(t.Size() == 4 && t.IsFloat()) {
 10978  			break
 10979  		}
 10980  		v.reset(OpLOONG64MOVFstore)
 10981  		v.AddArg3(ptr, val, mem)
 10982  		return true
 10983  	}
 10984  	// match: (Store {t} ptr val mem)
 10985  	// cond: t.Size() == 8 && t.IsFloat()
 10986  	// result: (MOVDstore ptr val mem)
 10987  	for {
 10988  		t := auxToType(v.Aux)
 10989  		ptr := v_0
 10990  		val := v_1
 10991  		mem := v_2
 10992  		if !(t.Size() == 8 && t.IsFloat()) {
 10993  			break
 10994  		}
 10995  		v.reset(OpLOONG64MOVDstore)
 10996  		v.AddArg3(ptr, val, mem)
 10997  		return true
 10998  	}
 10999  	return false
 11000  }
 11001  func rewriteValueLOONG64_OpZero(v *Value) bool {
 11002  	v_1 := v.Args[1]
 11003  	v_0 := v.Args[0]
 11004  	b := v.Block
 11005  	typ := &b.Func.Config.Types
 11006  	// match: (Zero [0] _ mem)
 11007  	// result: mem
 11008  	for {
 11009  		if auxIntToInt64(v.AuxInt) != 0 {
 11010  			break
 11011  		}
 11012  		mem := v_1
 11013  		v.copyOf(mem)
 11014  		return true
 11015  	}
 11016  	// match: (Zero [1] ptr mem)
 11017  	// result: (MOVBstore ptr (MOVVconst [0]) mem)
 11018  	for {
 11019  		if auxIntToInt64(v.AuxInt) != 1 {
 11020  			break
 11021  		}
 11022  		ptr := v_0
 11023  		mem := v_1
 11024  		v.reset(OpLOONG64MOVBstore)
 11025  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11026  		v0.AuxInt = int64ToAuxInt(0)
 11027  		v.AddArg3(ptr, v0, mem)
 11028  		return true
 11029  	}
 11030  	// match: (Zero [2] ptr mem)
 11031  	// result: (MOVHstore ptr (MOVVconst [0]) mem)
 11032  	for {
 11033  		if auxIntToInt64(v.AuxInt) != 2 {
 11034  			break
 11035  		}
 11036  		ptr := v_0
 11037  		mem := v_1
 11038  		v.reset(OpLOONG64MOVHstore)
 11039  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11040  		v0.AuxInt = int64ToAuxInt(0)
 11041  		v.AddArg3(ptr, v0, mem)
 11042  		return true
 11043  	}
 11044  	// match: (Zero [3] ptr mem)
 11045  	// result: (MOVBstore [2] ptr (MOVVconst [0]) (MOVHstore ptr (MOVVconst [0]) mem))
 11046  	for {
 11047  		if auxIntToInt64(v.AuxInt) != 3 {
 11048  			break
 11049  		}
 11050  		ptr := v_0
 11051  		mem := v_1
 11052  		v.reset(OpLOONG64MOVBstore)
 11053  		v.AuxInt = int32ToAuxInt(2)
 11054  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11055  		v0.AuxInt = int64ToAuxInt(0)
 11056  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVHstore, types.TypeMem)
 11057  		v1.AddArg3(ptr, v0, mem)
 11058  		v.AddArg3(ptr, v0, v1)
 11059  		return true
 11060  	}
 11061  	// match: (Zero [4] {t} ptr mem)
 11062  	// result: (MOVWstore ptr (MOVVconst [0]) mem)
 11063  	for {
 11064  		if auxIntToInt64(v.AuxInt) != 4 {
 11065  			break
 11066  		}
 11067  		ptr := v_0
 11068  		mem := v_1
 11069  		v.reset(OpLOONG64MOVWstore)
 11070  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11071  		v0.AuxInt = int64ToAuxInt(0)
 11072  		v.AddArg3(ptr, v0, mem)
 11073  		return true
 11074  	}
 11075  	// match: (Zero [5] ptr mem)
 11076  	// result: (MOVBstore [4] ptr (MOVVconst [0]) (MOVWstore ptr (MOVVconst [0]) mem))
 11077  	for {
 11078  		if auxIntToInt64(v.AuxInt) != 5 {
 11079  			break
 11080  		}
 11081  		ptr := v_0
 11082  		mem := v_1
 11083  		v.reset(OpLOONG64MOVBstore)
 11084  		v.AuxInt = int32ToAuxInt(4)
 11085  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11086  		v0.AuxInt = int64ToAuxInt(0)
 11087  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVWstore, types.TypeMem)
 11088  		v1.AddArg3(ptr, v0, mem)
 11089  		v.AddArg3(ptr, v0, v1)
 11090  		return true
 11091  	}
 11092  	// match: (Zero [6] ptr mem)
 11093  	// result: (MOVHstore [4] ptr (MOVVconst [0]) (MOVWstore ptr (MOVVconst [0]) mem))
 11094  	for {
 11095  		if auxIntToInt64(v.AuxInt) != 6 {
 11096  			break
 11097  		}
 11098  		ptr := v_0
 11099  		mem := v_1
 11100  		v.reset(OpLOONG64MOVHstore)
 11101  		v.AuxInt = int32ToAuxInt(4)
 11102  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11103  		v0.AuxInt = int64ToAuxInt(0)
 11104  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVWstore, types.TypeMem)
 11105  		v1.AddArg3(ptr, v0, mem)
 11106  		v.AddArg3(ptr, v0, v1)
 11107  		return true
 11108  	}
 11109  	// match: (Zero [7] ptr mem)
 11110  	// result: (MOVWstore [3] ptr (MOVVconst [0]) (MOVWstore ptr (MOVVconst [0]) mem))
 11111  	for {
 11112  		if auxIntToInt64(v.AuxInt) != 7 {
 11113  			break
 11114  		}
 11115  		ptr := v_0
 11116  		mem := v_1
 11117  		v.reset(OpLOONG64MOVWstore)
 11118  		v.AuxInt = int32ToAuxInt(3)
 11119  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11120  		v0.AuxInt = int64ToAuxInt(0)
 11121  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVWstore, types.TypeMem)
 11122  		v1.AddArg3(ptr, v0, mem)
 11123  		v.AddArg3(ptr, v0, v1)
 11124  		return true
 11125  	}
 11126  	// match: (Zero [8] {t} ptr mem)
 11127  	// result: (MOVVstore ptr (MOVVconst [0]) mem)
 11128  	for {
 11129  		if auxIntToInt64(v.AuxInt) != 8 {
 11130  			break
 11131  		}
 11132  		ptr := v_0
 11133  		mem := v_1
 11134  		v.reset(OpLOONG64MOVVstore)
 11135  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11136  		v0.AuxInt = int64ToAuxInt(0)
 11137  		v.AddArg3(ptr, v0, mem)
 11138  		return true
 11139  	}
 11140  	// match: (Zero [9] ptr mem)
 11141  	// result: (MOVBstore [8] ptr (MOVVconst [0]) (MOVVstore ptr (MOVVconst [0]) mem))
 11142  	for {
 11143  		if auxIntToInt64(v.AuxInt) != 9 {
 11144  			break
 11145  		}
 11146  		ptr := v_0
 11147  		mem := v_1
 11148  		v.reset(OpLOONG64MOVBstore)
 11149  		v.AuxInt = int32ToAuxInt(8)
 11150  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11151  		v0.AuxInt = int64ToAuxInt(0)
 11152  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
 11153  		v1.AddArg3(ptr, v0, mem)
 11154  		v.AddArg3(ptr, v0, v1)
 11155  		return true
 11156  	}
 11157  	// match: (Zero [10] ptr mem)
 11158  	// result: (MOVHstore [8] ptr (MOVVconst [0]) (MOVVstore ptr (MOVVconst [0]) mem))
 11159  	for {
 11160  		if auxIntToInt64(v.AuxInt) != 10 {
 11161  			break
 11162  		}
 11163  		ptr := v_0
 11164  		mem := v_1
 11165  		v.reset(OpLOONG64MOVHstore)
 11166  		v.AuxInt = int32ToAuxInt(8)
 11167  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11168  		v0.AuxInt = int64ToAuxInt(0)
 11169  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
 11170  		v1.AddArg3(ptr, v0, mem)
 11171  		v.AddArg3(ptr, v0, v1)
 11172  		return true
 11173  	}
 11174  	// match: (Zero [11] ptr mem)
 11175  	// result: (MOVWstore [7] ptr (MOVVconst [0]) (MOVVstore ptr (MOVVconst [0]) mem))
 11176  	for {
 11177  		if auxIntToInt64(v.AuxInt) != 11 {
 11178  			break
 11179  		}
 11180  		ptr := v_0
 11181  		mem := v_1
 11182  		v.reset(OpLOONG64MOVWstore)
 11183  		v.AuxInt = int32ToAuxInt(7)
 11184  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11185  		v0.AuxInt = int64ToAuxInt(0)
 11186  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
 11187  		v1.AddArg3(ptr, v0, mem)
 11188  		v.AddArg3(ptr, v0, v1)
 11189  		return true
 11190  	}
 11191  	// match: (Zero [12] ptr mem)
 11192  	// result: (MOVWstore [8] ptr (MOVVconst [0]) (MOVVstore ptr (MOVVconst [0]) mem))
 11193  	for {
 11194  		if auxIntToInt64(v.AuxInt) != 12 {
 11195  			break
 11196  		}
 11197  		ptr := v_0
 11198  		mem := v_1
 11199  		v.reset(OpLOONG64MOVWstore)
 11200  		v.AuxInt = int32ToAuxInt(8)
 11201  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11202  		v0.AuxInt = int64ToAuxInt(0)
 11203  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
 11204  		v1.AddArg3(ptr, v0, mem)
 11205  		v.AddArg3(ptr, v0, v1)
 11206  		return true
 11207  	}
 11208  	// match: (Zero [13] ptr mem)
 11209  	// result: (MOVVstore [5] ptr (MOVVconst [0]) (MOVVstore ptr (MOVVconst [0]) mem))
 11210  	for {
 11211  		if auxIntToInt64(v.AuxInt) != 13 {
 11212  			break
 11213  		}
 11214  		ptr := v_0
 11215  		mem := v_1
 11216  		v.reset(OpLOONG64MOVVstore)
 11217  		v.AuxInt = int32ToAuxInt(5)
 11218  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11219  		v0.AuxInt = int64ToAuxInt(0)
 11220  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
 11221  		v1.AddArg3(ptr, v0, mem)
 11222  		v.AddArg3(ptr, v0, v1)
 11223  		return true
 11224  	}
 11225  	// match: (Zero [14] ptr mem)
 11226  	// result: (MOVVstore [6] ptr (MOVVconst [0]) (MOVVstore ptr (MOVVconst [0]) mem))
 11227  	for {
 11228  		if auxIntToInt64(v.AuxInt) != 14 {
 11229  			break
 11230  		}
 11231  		ptr := v_0
 11232  		mem := v_1
 11233  		v.reset(OpLOONG64MOVVstore)
 11234  		v.AuxInt = int32ToAuxInt(6)
 11235  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11236  		v0.AuxInt = int64ToAuxInt(0)
 11237  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
 11238  		v1.AddArg3(ptr, v0, mem)
 11239  		v.AddArg3(ptr, v0, v1)
 11240  		return true
 11241  	}
 11242  	// match: (Zero [15] ptr mem)
 11243  	// result: (MOVVstore [7] ptr (MOVVconst [0]) (MOVVstore ptr (MOVVconst [0]) mem))
 11244  	for {
 11245  		if auxIntToInt64(v.AuxInt) != 15 {
 11246  			break
 11247  		}
 11248  		ptr := v_0
 11249  		mem := v_1
 11250  		v.reset(OpLOONG64MOVVstore)
 11251  		v.AuxInt = int32ToAuxInt(7)
 11252  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11253  		v0.AuxInt = int64ToAuxInt(0)
 11254  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
 11255  		v1.AddArg3(ptr, v0, mem)
 11256  		v.AddArg3(ptr, v0, v1)
 11257  		return true
 11258  	}
 11259  	// match: (Zero [16] ptr mem)
 11260  	// result: (MOVVstore [8] ptr (MOVVconst [0]) (MOVVstore ptr (MOVVconst [0]) mem))
 11261  	for {
 11262  		if auxIntToInt64(v.AuxInt) != 16 {
 11263  			break
 11264  		}
 11265  		ptr := v_0
 11266  		mem := v_1
 11267  		v.reset(OpLOONG64MOVVstore)
 11268  		v.AuxInt = int32ToAuxInt(8)
 11269  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11270  		v0.AuxInt = int64ToAuxInt(0)
 11271  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
 11272  		v1.AddArg3(ptr, v0, mem)
 11273  		v.AddArg3(ptr, v0, v1)
 11274  		return true
 11275  	}
 11276  	// match: (Zero [s] ptr mem)
 11277  	// cond: s%8 != 0 && s > 16
 11278  	// result: (Zero [s%8] (OffPtr <ptr.Type> ptr [s-s%8]) (Zero [s-s%8] ptr mem))
 11279  	for {
 11280  		s := auxIntToInt64(v.AuxInt)
 11281  		ptr := v_0
 11282  		mem := v_1
 11283  		if !(s%8 != 0 && s > 16) {
 11284  			break
 11285  		}
 11286  		v.reset(OpZero)
 11287  		v.AuxInt = int64ToAuxInt(s % 8)
 11288  		v0 := b.NewValue0(v.Pos, OpOffPtr, ptr.Type)
 11289  		v0.AuxInt = int64ToAuxInt(s - s%8)
 11290  		v0.AddArg(ptr)
 11291  		v1 := b.NewValue0(v.Pos, OpZero, types.TypeMem)
 11292  		v1.AuxInt = int64ToAuxInt(s - s%8)
 11293  		v1.AddArg2(ptr, mem)
 11294  		v.AddArg2(v0, v1)
 11295  		return true
 11296  	}
 11297  	// match: (Zero [s] ptr mem)
 11298  	// cond: s%8 == 0 && s > 16 && s <= 8*128
 11299  	// result: (DUFFZERO [8 * (128 - s/8)] ptr mem)
 11300  	for {
 11301  		s := auxIntToInt64(v.AuxInt)
 11302  		ptr := v_0
 11303  		mem := v_1
 11304  		if !(s%8 == 0 && s > 16 && s <= 8*128) {
 11305  			break
 11306  		}
 11307  		v.reset(OpLOONG64DUFFZERO)
 11308  		v.AuxInt = int64ToAuxInt(8 * (128 - s/8))
 11309  		v.AddArg2(ptr, mem)
 11310  		return true
 11311  	}
 11312  	// match: (Zero [s] ptr mem)
 11313  	// cond: s%8 == 0 && s > 8*128
 11314  	// result: (LoweredZero ptr (ADDVconst <ptr.Type> ptr [s-8]) mem)
 11315  	for {
 11316  		s := auxIntToInt64(v.AuxInt)
 11317  		ptr := v_0
 11318  		mem := v_1
 11319  		if !(s%8 == 0 && s > 8*128) {
 11320  			break
 11321  		}
 11322  		v.reset(OpLOONG64LoweredZero)
 11323  		v0 := b.NewValue0(v.Pos, OpLOONG64ADDVconst, ptr.Type)
 11324  		v0.AuxInt = int64ToAuxInt(s - 8)
 11325  		v0.AddArg(ptr)
 11326  		v.AddArg3(ptr, v0, mem)
 11327  		return true
 11328  	}
 11329  	return false
 11330  }
 11331  func rewriteBlockLOONG64(b *Block) bool {
 11332  	typ := &b.Func.Config.Types
 11333  	switch b.Kind {
 11334  	case BlockLOONG64EQ:
 11335  		// match: (EQ (FPFlagTrue cmp) yes no)
 11336  		// result: (FPF cmp yes no)
 11337  		for b.Controls[0].Op == OpLOONG64FPFlagTrue {
 11338  			v_0 := b.Controls[0]
 11339  			cmp := v_0.Args[0]
 11340  			b.resetWithControl(BlockLOONG64FPF, cmp)
 11341  			return true
 11342  		}
 11343  		// match: (EQ (FPFlagFalse cmp) yes no)
 11344  		// result: (FPT cmp yes no)
 11345  		for b.Controls[0].Op == OpLOONG64FPFlagFalse {
 11346  			v_0 := b.Controls[0]
 11347  			cmp := v_0.Args[0]
 11348  			b.resetWithControl(BlockLOONG64FPT, cmp)
 11349  			return true
 11350  		}
 11351  		// match: (EQ (XORconst [1] cmp:(SGT _ _)) yes no)
 11352  		// result: (NE cmp yes no)
 11353  		for b.Controls[0].Op == OpLOONG64XORconst {
 11354  			v_0 := b.Controls[0]
 11355  			if auxIntToInt64(v_0.AuxInt) != 1 {
 11356  				break
 11357  			}
 11358  			cmp := v_0.Args[0]
 11359  			if cmp.Op != OpLOONG64SGT {
 11360  				break
 11361  			}
 11362  			b.resetWithControl(BlockLOONG64NE, cmp)
 11363  			return true
 11364  		}
 11365  		// match: (EQ (XORconst [1] cmp:(SGTU _ _)) yes no)
 11366  		// result: (NE cmp yes no)
 11367  		for b.Controls[0].Op == OpLOONG64XORconst {
 11368  			v_0 := b.Controls[0]
 11369  			if auxIntToInt64(v_0.AuxInt) != 1 {
 11370  				break
 11371  			}
 11372  			cmp := v_0.Args[0]
 11373  			if cmp.Op != OpLOONG64SGTU {
 11374  				break
 11375  			}
 11376  			b.resetWithControl(BlockLOONG64NE, cmp)
 11377  			return true
 11378  		}
 11379  		// match: (EQ (XORconst [1] cmp:(SGTconst _)) yes no)
 11380  		// result: (NE cmp yes no)
 11381  		for b.Controls[0].Op == OpLOONG64XORconst {
 11382  			v_0 := b.Controls[0]
 11383  			if auxIntToInt64(v_0.AuxInt) != 1 {
 11384  				break
 11385  			}
 11386  			cmp := v_0.Args[0]
 11387  			if cmp.Op != OpLOONG64SGTconst {
 11388  				break
 11389  			}
 11390  			b.resetWithControl(BlockLOONG64NE, cmp)
 11391  			return true
 11392  		}
 11393  		// match: (EQ (XORconst [1] cmp:(SGTUconst _)) yes no)
 11394  		// result: (NE cmp yes no)
 11395  		for b.Controls[0].Op == OpLOONG64XORconst {
 11396  			v_0 := b.Controls[0]
 11397  			if auxIntToInt64(v_0.AuxInt) != 1 {
 11398  				break
 11399  			}
 11400  			cmp := v_0.Args[0]
 11401  			if cmp.Op != OpLOONG64SGTUconst {
 11402  				break
 11403  			}
 11404  			b.resetWithControl(BlockLOONG64NE, cmp)
 11405  			return true
 11406  		}
 11407  		// match: (EQ (SGTUconst [1] x) yes no)
 11408  		// result: (NE x yes no)
 11409  		for b.Controls[0].Op == OpLOONG64SGTUconst {
 11410  			v_0 := b.Controls[0]
 11411  			if auxIntToInt64(v_0.AuxInt) != 1 {
 11412  				break
 11413  			}
 11414  			x := v_0.Args[0]
 11415  			b.resetWithControl(BlockLOONG64NE, x)
 11416  			return true
 11417  		}
 11418  		// match: (EQ (SGTU x (MOVVconst [0])) yes no)
 11419  		// result: (EQ x yes no)
 11420  		for b.Controls[0].Op == OpLOONG64SGTU {
 11421  			v_0 := b.Controls[0]
 11422  			_ = v_0.Args[1]
 11423  			x := v_0.Args[0]
 11424  			v_0_1 := v_0.Args[1]
 11425  			if v_0_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
 11426  				break
 11427  			}
 11428  			b.resetWithControl(BlockLOONG64EQ, x)
 11429  			return true
 11430  		}
 11431  		// match: (EQ (SGTconst [0] x) yes no)
 11432  		// result: (GEZ x yes no)
 11433  		for b.Controls[0].Op == OpLOONG64SGTconst {
 11434  			v_0 := b.Controls[0]
 11435  			if auxIntToInt64(v_0.AuxInt) != 0 {
 11436  				break
 11437  			}
 11438  			x := v_0.Args[0]
 11439  			b.resetWithControl(BlockLOONG64GEZ, x)
 11440  			return true
 11441  		}
 11442  		// match: (EQ (SGT x (MOVVconst [0])) yes no)
 11443  		// result: (LEZ x yes no)
 11444  		for b.Controls[0].Op == OpLOONG64SGT {
 11445  			v_0 := b.Controls[0]
 11446  			_ = v_0.Args[1]
 11447  			x := v_0.Args[0]
 11448  			v_0_1 := v_0.Args[1]
 11449  			if v_0_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
 11450  				break
 11451  			}
 11452  			b.resetWithControl(BlockLOONG64LEZ, x)
 11453  			return true
 11454  		}
 11455  		// match: (EQ (SGTU (MOVVconst [c]) y) yes no)
 11456  		// cond: c >= -2048 && c <= 2047
 11457  		// result: (EQ (SGTUconst [c] y) yes no)
 11458  		for b.Controls[0].Op == OpLOONG64SGTU {
 11459  			v_0 := b.Controls[0]
 11460  			y := v_0.Args[1]
 11461  			v_0_0 := v_0.Args[0]
 11462  			if v_0_0.Op != OpLOONG64MOVVconst {
 11463  				break
 11464  			}
 11465  			c := auxIntToInt64(v_0_0.AuxInt)
 11466  			if !(c >= -2048 && c <= 2047) {
 11467  				break
 11468  			}
 11469  			v0 := b.NewValue0(v_0.Pos, OpLOONG64SGTUconst, typ.Bool)
 11470  			v0.AuxInt = int64ToAuxInt(c)
 11471  			v0.AddArg(y)
 11472  			b.resetWithControl(BlockLOONG64EQ, v0)
 11473  			return true
 11474  		}
 11475  		// match: (EQ (SUBV x y) yes no)
 11476  		// result: (BEQ x y yes no)
 11477  		for b.Controls[0].Op == OpLOONG64SUBV {
 11478  			v_0 := b.Controls[0]
 11479  			y := v_0.Args[1]
 11480  			x := v_0.Args[0]
 11481  			b.resetWithControl2(BlockLOONG64BEQ, x, y)
 11482  			return true
 11483  		}
 11484  		// match: (EQ (SGT x y) yes no)
 11485  		// result: (BGE y x yes no)
 11486  		for b.Controls[0].Op == OpLOONG64SGT {
 11487  			v_0 := b.Controls[0]
 11488  			y := v_0.Args[1]
 11489  			x := v_0.Args[0]
 11490  			b.resetWithControl2(BlockLOONG64BGE, y, x)
 11491  			return true
 11492  		}
 11493  		// match: (EQ (SGTU x y) yes no)
 11494  		// result: (BGEU y x yes no)
 11495  		for b.Controls[0].Op == OpLOONG64SGTU {
 11496  			v_0 := b.Controls[0]
 11497  			y := v_0.Args[1]
 11498  			x := v_0.Args[0]
 11499  			b.resetWithControl2(BlockLOONG64BGEU, y, x)
 11500  			return true
 11501  		}
 11502  		// match: (EQ (MOVVconst [0]) yes no)
 11503  		// result: (First yes no)
 11504  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 11505  			v_0 := b.Controls[0]
 11506  			if auxIntToInt64(v_0.AuxInt) != 0 {
 11507  				break
 11508  			}
 11509  			b.Reset(BlockFirst)
 11510  			return true
 11511  		}
 11512  		// match: (EQ (MOVVconst [c]) yes no)
 11513  		// cond: c != 0
 11514  		// result: (First no yes)
 11515  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 11516  			v_0 := b.Controls[0]
 11517  			c := auxIntToInt64(v_0.AuxInt)
 11518  			if !(c != 0) {
 11519  				break
 11520  			}
 11521  			b.Reset(BlockFirst)
 11522  			b.swapSuccessors()
 11523  			return true
 11524  		}
 11525  	case BlockLOONG64GEZ:
 11526  		// match: (GEZ (MOVVconst [c]) yes no)
 11527  		// cond: c >= 0
 11528  		// result: (First yes no)
 11529  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 11530  			v_0 := b.Controls[0]
 11531  			c := auxIntToInt64(v_0.AuxInt)
 11532  			if !(c >= 0) {
 11533  				break
 11534  			}
 11535  			b.Reset(BlockFirst)
 11536  			return true
 11537  		}
 11538  		// match: (GEZ (MOVVconst [c]) yes no)
 11539  		// cond: c < 0
 11540  		// result: (First no yes)
 11541  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 11542  			v_0 := b.Controls[0]
 11543  			c := auxIntToInt64(v_0.AuxInt)
 11544  			if !(c < 0) {
 11545  				break
 11546  			}
 11547  			b.Reset(BlockFirst)
 11548  			b.swapSuccessors()
 11549  			return true
 11550  		}
 11551  	case BlockLOONG64GTZ:
 11552  		// match: (GTZ (MOVVconst [c]) yes no)
 11553  		// cond: c > 0
 11554  		// result: (First yes no)
 11555  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 11556  			v_0 := b.Controls[0]
 11557  			c := auxIntToInt64(v_0.AuxInt)
 11558  			if !(c > 0) {
 11559  				break
 11560  			}
 11561  			b.Reset(BlockFirst)
 11562  			return true
 11563  		}
 11564  		// match: (GTZ (MOVVconst [c]) yes no)
 11565  		// cond: c <= 0
 11566  		// result: (First no yes)
 11567  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 11568  			v_0 := b.Controls[0]
 11569  			c := auxIntToInt64(v_0.AuxInt)
 11570  			if !(c <= 0) {
 11571  				break
 11572  			}
 11573  			b.Reset(BlockFirst)
 11574  			b.swapSuccessors()
 11575  			return true
 11576  		}
 11577  	case BlockIf:
 11578  		// match: (If cond yes no)
 11579  		// result: (NE (MOVBUreg <typ.UInt64> cond) yes no)
 11580  		for {
 11581  			cond := b.Controls[0]
 11582  			v0 := b.NewValue0(cond.Pos, OpLOONG64MOVBUreg, typ.UInt64)
 11583  			v0.AddArg(cond)
 11584  			b.resetWithControl(BlockLOONG64NE, v0)
 11585  			return true
 11586  		}
 11587  	case BlockLOONG64LEZ:
 11588  		// match: (LEZ (MOVVconst [c]) yes no)
 11589  		// cond: c <= 0
 11590  		// result: (First yes no)
 11591  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 11592  			v_0 := b.Controls[0]
 11593  			c := auxIntToInt64(v_0.AuxInt)
 11594  			if !(c <= 0) {
 11595  				break
 11596  			}
 11597  			b.Reset(BlockFirst)
 11598  			return true
 11599  		}
 11600  		// match: (LEZ (MOVVconst [c]) yes no)
 11601  		// cond: c > 0
 11602  		// result: (First no yes)
 11603  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 11604  			v_0 := b.Controls[0]
 11605  			c := auxIntToInt64(v_0.AuxInt)
 11606  			if !(c > 0) {
 11607  				break
 11608  			}
 11609  			b.Reset(BlockFirst)
 11610  			b.swapSuccessors()
 11611  			return true
 11612  		}
 11613  	case BlockLOONG64LTZ:
 11614  		// match: (LTZ (MOVVconst [c]) yes no)
 11615  		// cond: c < 0
 11616  		// result: (First yes no)
 11617  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 11618  			v_0 := b.Controls[0]
 11619  			c := auxIntToInt64(v_0.AuxInt)
 11620  			if !(c < 0) {
 11621  				break
 11622  			}
 11623  			b.Reset(BlockFirst)
 11624  			return true
 11625  		}
 11626  		// match: (LTZ (MOVVconst [c]) yes no)
 11627  		// cond: c >= 0
 11628  		// result: (First no yes)
 11629  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 11630  			v_0 := b.Controls[0]
 11631  			c := auxIntToInt64(v_0.AuxInt)
 11632  			if !(c >= 0) {
 11633  				break
 11634  			}
 11635  			b.Reset(BlockFirst)
 11636  			b.swapSuccessors()
 11637  			return true
 11638  		}
 11639  	case BlockLOONG64NE:
 11640  		// match: (NE (FPFlagTrue cmp) yes no)
 11641  		// result: (FPT cmp yes no)
 11642  		for b.Controls[0].Op == OpLOONG64FPFlagTrue {
 11643  			v_0 := b.Controls[0]
 11644  			cmp := v_0.Args[0]
 11645  			b.resetWithControl(BlockLOONG64FPT, cmp)
 11646  			return true
 11647  		}
 11648  		// match: (NE (FPFlagFalse cmp) yes no)
 11649  		// result: (FPF cmp yes no)
 11650  		for b.Controls[0].Op == OpLOONG64FPFlagFalse {
 11651  			v_0 := b.Controls[0]
 11652  			cmp := v_0.Args[0]
 11653  			b.resetWithControl(BlockLOONG64FPF, cmp)
 11654  			return true
 11655  		}
 11656  		// match: (NE (XORconst [1] cmp:(SGT _ _)) yes no)
 11657  		// result: (EQ cmp yes no)
 11658  		for b.Controls[0].Op == OpLOONG64XORconst {
 11659  			v_0 := b.Controls[0]
 11660  			if auxIntToInt64(v_0.AuxInt) != 1 {
 11661  				break
 11662  			}
 11663  			cmp := v_0.Args[0]
 11664  			if cmp.Op != OpLOONG64SGT {
 11665  				break
 11666  			}
 11667  			b.resetWithControl(BlockLOONG64EQ, cmp)
 11668  			return true
 11669  		}
 11670  		// match: (NE (XORconst [1] cmp:(SGTU _ _)) yes no)
 11671  		// result: (EQ cmp yes no)
 11672  		for b.Controls[0].Op == OpLOONG64XORconst {
 11673  			v_0 := b.Controls[0]
 11674  			if auxIntToInt64(v_0.AuxInt) != 1 {
 11675  				break
 11676  			}
 11677  			cmp := v_0.Args[0]
 11678  			if cmp.Op != OpLOONG64SGTU {
 11679  				break
 11680  			}
 11681  			b.resetWithControl(BlockLOONG64EQ, cmp)
 11682  			return true
 11683  		}
 11684  		// match: (NE (XORconst [1] cmp:(SGTconst _)) yes no)
 11685  		// result: (EQ cmp yes no)
 11686  		for b.Controls[0].Op == OpLOONG64XORconst {
 11687  			v_0 := b.Controls[0]
 11688  			if auxIntToInt64(v_0.AuxInt) != 1 {
 11689  				break
 11690  			}
 11691  			cmp := v_0.Args[0]
 11692  			if cmp.Op != OpLOONG64SGTconst {
 11693  				break
 11694  			}
 11695  			b.resetWithControl(BlockLOONG64EQ, cmp)
 11696  			return true
 11697  		}
 11698  		// match: (NE (XORconst [1] cmp:(SGTUconst _)) yes no)
 11699  		// result: (EQ cmp yes no)
 11700  		for b.Controls[0].Op == OpLOONG64XORconst {
 11701  			v_0 := b.Controls[0]
 11702  			if auxIntToInt64(v_0.AuxInt) != 1 {
 11703  				break
 11704  			}
 11705  			cmp := v_0.Args[0]
 11706  			if cmp.Op != OpLOONG64SGTUconst {
 11707  				break
 11708  			}
 11709  			b.resetWithControl(BlockLOONG64EQ, cmp)
 11710  			return true
 11711  		}
 11712  		// match: (NE (SGTUconst [1] x) yes no)
 11713  		// result: (EQ x yes no)
 11714  		for b.Controls[0].Op == OpLOONG64SGTUconst {
 11715  			v_0 := b.Controls[0]
 11716  			if auxIntToInt64(v_0.AuxInt) != 1 {
 11717  				break
 11718  			}
 11719  			x := v_0.Args[0]
 11720  			b.resetWithControl(BlockLOONG64EQ, x)
 11721  			return true
 11722  		}
 11723  		// match: (NE (SGTU x (MOVVconst [0])) yes no)
 11724  		// result: (NE x yes no)
 11725  		for b.Controls[0].Op == OpLOONG64SGTU {
 11726  			v_0 := b.Controls[0]
 11727  			_ = v_0.Args[1]
 11728  			x := v_0.Args[0]
 11729  			v_0_1 := v_0.Args[1]
 11730  			if v_0_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
 11731  				break
 11732  			}
 11733  			b.resetWithControl(BlockLOONG64NE, x)
 11734  			return true
 11735  		}
 11736  		// match: (NE (SGTconst [0] x) yes no)
 11737  		// result: (LTZ x yes no)
 11738  		for b.Controls[0].Op == OpLOONG64SGTconst {
 11739  			v_0 := b.Controls[0]
 11740  			if auxIntToInt64(v_0.AuxInt) != 0 {
 11741  				break
 11742  			}
 11743  			x := v_0.Args[0]
 11744  			b.resetWithControl(BlockLOONG64LTZ, x)
 11745  			return true
 11746  		}
 11747  		// match: (NE (SGT x (MOVVconst [0])) yes no)
 11748  		// result: (GTZ x yes no)
 11749  		for b.Controls[0].Op == OpLOONG64SGT {
 11750  			v_0 := b.Controls[0]
 11751  			_ = v_0.Args[1]
 11752  			x := v_0.Args[0]
 11753  			v_0_1 := v_0.Args[1]
 11754  			if v_0_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
 11755  				break
 11756  			}
 11757  			b.resetWithControl(BlockLOONG64GTZ, x)
 11758  			return true
 11759  		}
 11760  		// match: (NE (SGTU (MOVVconst [c]) y) yes no)
 11761  		// cond: c >= -2048 && c <= 2047
 11762  		// result: (NE (SGTUconst [c] y) yes no)
 11763  		for b.Controls[0].Op == OpLOONG64SGTU {
 11764  			v_0 := b.Controls[0]
 11765  			y := v_0.Args[1]
 11766  			v_0_0 := v_0.Args[0]
 11767  			if v_0_0.Op != OpLOONG64MOVVconst {
 11768  				break
 11769  			}
 11770  			c := auxIntToInt64(v_0_0.AuxInt)
 11771  			if !(c >= -2048 && c <= 2047) {
 11772  				break
 11773  			}
 11774  			v0 := b.NewValue0(v_0.Pos, OpLOONG64SGTUconst, typ.Bool)
 11775  			v0.AuxInt = int64ToAuxInt(c)
 11776  			v0.AddArg(y)
 11777  			b.resetWithControl(BlockLOONG64NE, v0)
 11778  			return true
 11779  		}
 11780  		// match: (NE (SUBV x y) yes no)
 11781  		// result: (BNE x y yes no)
 11782  		for b.Controls[0].Op == OpLOONG64SUBV {
 11783  			v_0 := b.Controls[0]
 11784  			y := v_0.Args[1]
 11785  			x := v_0.Args[0]
 11786  			b.resetWithControl2(BlockLOONG64BNE, x, y)
 11787  			return true
 11788  		}
 11789  		// match: (NE (SGT x y) yes no)
 11790  		// result: (BLT y x yes no)
 11791  		for b.Controls[0].Op == OpLOONG64SGT {
 11792  			v_0 := b.Controls[0]
 11793  			y := v_0.Args[1]
 11794  			x := v_0.Args[0]
 11795  			b.resetWithControl2(BlockLOONG64BLT, y, x)
 11796  			return true
 11797  		}
 11798  		// match: (NE (SGTU x y) yes no)
 11799  		// result: (BLTU y x yes no)
 11800  		for b.Controls[0].Op == OpLOONG64SGTU {
 11801  			v_0 := b.Controls[0]
 11802  			y := v_0.Args[1]
 11803  			x := v_0.Args[0]
 11804  			b.resetWithControl2(BlockLOONG64BLTU, y, x)
 11805  			return true
 11806  		}
 11807  		// match: (NE (MOVVconst [0]) yes no)
 11808  		// result: (First no yes)
 11809  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 11810  			v_0 := b.Controls[0]
 11811  			if auxIntToInt64(v_0.AuxInt) != 0 {
 11812  				break
 11813  			}
 11814  			b.Reset(BlockFirst)
 11815  			b.swapSuccessors()
 11816  			return true
 11817  		}
 11818  		// match: (NE (MOVVconst [c]) yes no)
 11819  		// cond: c != 0
 11820  		// result: (First yes no)
 11821  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 11822  			v_0 := b.Controls[0]
 11823  			c := auxIntToInt64(v_0.AuxInt)
 11824  			if !(c != 0) {
 11825  				break
 11826  			}
 11827  			b.Reset(BlockFirst)
 11828  			return true
 11829  		}
 11830  	}
 11831  	return false
 11832  }
 11833  

View as plain text