Source file src/cmd/compile/internal/ssa/rewriteLOONG64.go

     1  // Code generated from _gen/LOONG64.rules using 'go generate'; DO NOT EDIT.
     2  
     3  package ssa
     4  
     5  import "cmd/compile/internal/types"
     6  
     7  func rewriteValueLOONG64(v *Value) bool {
     8  	switch v.Op {
     9  	case OpAbs:
    10  		v.Op = OpLOONG64ABSD
    11  		return true
    12  	case OpAdd16:
    13  		v.Op = OpLOONG64ADDV
    14  		return true
    15  	case OpAdd32:
    16  		v.Op = OpLOONG64ADDV
    17  		return true
    18  	case OpAdd32F:
    19  		v.Op = OpLOONG64ADDF
    20  		return true
    21  	case OpAdd64:
    22  		v.Op = OpLOONG64ADDV
    23  		return true
    24  	case OpAdd64F:
    25  		v.Op = OpLOONG64ADDD
    26  		return true
    27  	case OpAdd8:
    28  		v.Op = OpLOONG64ADDV
    29  		return true
    30  	case OpAddPtr:
    31  		v.Op = OpLOONG64ADDV
    32  		return true
    33  	case OpAddr:
    34  		return rewriteValueLOONG64_OpAddr(v)
    35  	case OpAnd16:
    36  		v.Op = OpLOONG64AND
    37  		return true
    38  	case OpAnd32:
    39  		v.Op = OpLOONG64AND
    40  		return true
    41  	case OpAnd64:
    42  		v.Op = OpLOONG64AND
    43  		return true
    44  	case OpAnd8:
    45  		v.Op = OpLOONG64AND
    46  		return true
    47  	case OpAndB:
    48  		v.Op = OpLOONG64AND
    49  		return true
    50  	case OpAtomicAdd32:
    51  		v.Op = OpLOONG64LoweredAtomicAdd32
    52  		return true
    53  	case OpAtomicAdd64:
    54  		v.Op = OpLOONG64LoweredAtomicAdd64
    55  		return true
    56  	case OpAtomicAnd32:
    57  		v.Op = OpLOONG64LoweredAtomicAnd32
    58  		return true
    59  	case OpAtomicAnd32value:
    60  		v.Op = OpLOONG64LoweredAtomicAnd32value
    61  		return true
    62  	case OpAtomicAnd64value:
    63  		v.Op = OpLOONG64LoweredAtomicAnd64value
    64  		return true
    65  	case OpAtomicAnd8:
    66  		return rewriteValueLOONG64_OpAtomicAnd8(v)
    67  	case OpAtomicCompareAndSwap32:
    68  		return rewriteValueLOONG64_OpAtomicCompareAndSwap32(v)
    69  	case OpAtomicCompareAndSwap32Variant:
    70  		return rewriteValueLOONG64_OpAtomicCompareAndSwap32Variant(v)
    71  	case OpAtomicCompareAndSwap64:
    72  		v.Op = OpLOONG64LoweredAtomicCas64
    73  		return true
    74  	case OpAtomicCompareAndSwap64Variant:
    75  		v.Op = OpLOONG64LoweredAtomicCas64Variant
    76  		return true
    77  	case OpAtomicExchange32:
    78  		v.Op = OpLOONG64LoweredAtomicExchange32
    79  		return true
    80  	case OpAtomicExchange64:
    81  		v.Op = OpLOONG64LoweredAtomicExchange64
    82  		return true
    83  	case OpAtomicExchange8Variant:
    84  		v.Op = OpLOONG64LoweredAtomicExchange8Variant
    85  		return true
    86  	case OpAtomicLoad32:
    87  		v.Op = OpLOONG64LoweredAtomicLoad32
    88  		return true
    89  	case OpAtomicLoad64:
    90  		v.Op = OpLOONG64LoweredAtomicLoad64
    91  		return true
    92  	case OpAtomicLoad8:
    93  		v.Op = OpLOONG64LoweredAtomicLoad8
    94  		return true
    95  	case OpAtomicLoadPtr:
    96  		v.Op = OpLOONG64LoweredAtomicLoad64
    97  		return true
    98  	case OpAtomicOr32:
    99  		v.Op = OpLOONG64LoweredAtomicOr32
   100  		return true
   101  	case OpAtomicOr32value:
   102  		v.Op = OpLOONG64LoweredAtomicOr32value
   103  		return true
   104  	case OpAtomicOr64value:
   105  		v.Op = OpLOONG64LoweredAtomicOr64value
   106  		return true
   107  	case OpAtomicOr8:
   108  		return rewriteValueLOONG64_OpAtomicOr8(v)
   109  	case OpAtomicStore32:
   110  		v.Op = OpLOONG64LoweredAtomicStore32
   111  		return true
   112  	case OpAtomicStore32Variant:
   113  		v.Op = OpLOONG64LoweredAtomicStore32Variant
   114  		return true
   115  	case OpAtomicStore64:
   116  		v.Op = OpLOONG64LoweredAtomicStore64
   117  		return true
   118  	case OpAtomicStore64Variant:
   119  		v.Op = OpLOONG64LoweredAtomicStore64Variant
   120  		return true
   121  	case OpAtomicStore8:
   122  		v.Op = OpLOONG64LoweredAtomicStore8
   123  		return true
   124  	case OpAtomicStore8Variant:
   125  		v.Op = OpLOONG64LoweredAtomicStore8Variant
   126  		return true
   127  	case OpAtomicStorePtrNoWB:
   128  		v.Op = OpLOONG64LoweredAtomicStore64
   129  		return true
   130  	case OpAvg64u:
   131  		return rewriteValueLOONG64_OpAvg64u(v)
   132  	case OpBitLen16:
   133  		return rewriteValueLOONG64_OpBitLen16(v)
   134  	case OpBitLen32:
   135  		return rewriteValueLOONG64_OpBitLen32(v)
   136  	case OpBitLen64:
   137  		return rewriteValueLOONG64_OpBitLen64(v)
   138  	case OpBitLen8:
   139  		return rewriteValueLOONG64_OpBitLen8(v)
   140  	case OpBitRev16:
   141  		return rewriteValueLOONG64_OpBitRev16(v)
   142  	case OpBitRev32:
   143  		v.Op = OpLOONG64BITREVW
   144  		return true
   145  	case OpBitRev64:
   146  		v.Op = OpLOONG64BITREVV
   147  		return true
   148  	case OpBitRev8:
   149  		v.Op = OpLOONG64BITREV4B
   150  		return true
   151  	case OpBswap16:
   152  		v.Op = OpLOONG64REVB2H
   153  		return true
   154  	case OpBswap32:
   155  		v.Op = OpLOONG64REVB2W
   156  		return true
   157  	case OpBswap64:
   158  		v.Op = OpLOONG64REVBV
   159  		return true
   160  	case OpClosureCall:
   161  		v.Op = OpLOONG64CALLclosure
   162  		return true
   163  	case OpCom16:
   164  		return rewriteValueLOONG64_OpCom16(v)
   165  	case OpCom32:
   166  		return rewriteValueLOONG64_OpCom32(v)
   167  	case OpCom64:
   168  		return rewriteValueLOONG64_OpCom64(v)
   169  	case OpCom8:
   170  		return rewriteValueLOONG64_OpCom8(v)
   171  	case OpCondSelect:
   172  		return rewriteValueLOONG64_OpCondSelect(v)
   173  	case OpConst16:
   174  		return rewriteValueLOONG64_OpConst16(v)
   175  	case OpConst32:
   176  		return rewriteValueLOONG64_OpConst32(v)
   177  	case OpConst32F:
   178  		return rewriteValueLOONG64_OpConst32F(v)
   179  	case OpConst64:
   180  		return rewriteValueLOONG64_OpConst64(v)
   181  	case OpConst64F:
   182  		return rewriteValueLOONG64_OpConst64F(v)
   183  	case OpConst8:
   184  		return rewriteValueLOONG64_OpConst8(v)
   185  	case OpConstBool:
   186  		return rewriteValueLOONG64_OpConstBool(v)
   187  	case OpConstNil:
   188  		return rewriteValueLOONG64_OpConstNil(v)
   189  	case OpCopysign:
   190  		v.Op = OpLOONG64FCOPYSGD
   191  		return true
   192  	case OpCtz16:
   193  		return rewriteValueLOONG64_OpCtz16(v)
   194  	case OpCtz16NonZero:
   195  		v.Op = OpCtz64
   196  		return true
   197  	case OpCtz32:
   198  		v.Op = OpLOONG64CTZW
   199  		return true
   200  	case OpCtz32NonZero:
   201  		v.Op = OpCtz64
   202  		return true
   203  	case OpCtz64:
   204  		v.Op = OpLOONG64CTZV
   205  		return true
   206  	case OpCtz64NonZero:
   207  		v.Op = OpCtz64
   208  		return true
   209  	case OpCtz8:
   210  		return rewriteValueLOONG64_OpCtz8(v)
   211  	case OpCtz8NonZero:
   212  		v.Op = OpCtz64
   213  		return true
   214  	case OpCvt32Fto32:
   215  		v.Op = OpLOONG64TRUNCFW
   216  		return true
   217  	case OpCvt32Fto64:
   218  		v.Op = OpLOONG64TRUNCFV
   219  		return true
   220  	case OpCvt32Fto64F:
   221  		v.Op = OpLOONG64MOVFD
   222  		return true
   223  	case OpCvt32to32F:
   224  		v.Op = OpLOONG64MOVWF
   225  		return true
   226  	case OpCvt32to64F:
   227  		v.Op = OpLOONG64MOVWD
   228  		return true
   229  	case OpCvt64Fto32:
   230  		v.Op = OpLOONG64TRUNCDW
   231  		return true
   232  	case OpCvt64Fto32F:
   233  		v.Op = OpLOONG64MOVDF
   234  		return true
   235  	case OpCvt64Fto64:
   236  		v.Op = OpLOONG64TRUNCDV
   237  		return true
   238  	case OpCvt64to32F:
   239  		v.Op = OpLOONG64MOVVF
   240  		return true
   241  	case OpCvt64to64F:
   242  		v.Op = OpLOONG64MOVVD
   243  		return true
   244  	case OpCvtBoolToUint8:
   245  		v.Op = OpCopy
   246  		return true
   247  	case OpDiv16:
   248  		return rewriteValueLOONG64_OpDiv16(v)
   249  	case OpDiv16u:
   250  		return rewriteValueLOONG64_OpDiv16u(v)
   251  	case OpDiv32:
   252  		return rewriteValueLOONG64_OpDiv32(v)
   253  	case OpDiv32F:
   254  		v.Op = OpLOONG64DIVF
   255  		return true
   256  	case OpDiv32u:
   257  		return rewriteValueLOONG64_OpDiv32u(v)
   258  	case OpDiv64:
   259  		return rewriteValueLOONG64_OpDiv64(v)
   260  	case OpDiv64F:
   261  		v.Op = OpLOONG64DIVD
   262  		return true
   263  	case OpDiv64u:
   264  		v.Op = OpLOONG64DIVVU
   265  		return true
   266  	case OpDiv8:
   267  		return rewriteValueLOONG64_OpDiv8(v)
   268  	case OpDiv8u:
   269  		return rewriteValueLOONG64_OpDiv8u(v)
   270  	case OpEq16:
   271  		return rewriteValueLOONG64_OpEq16(v)
   272  	case OpEq32:
   273  		return rewriteValueLOONG64_OpEq32(v)
   274  	case OpEq32F:
   275  		return rewriteValueLOONG64_OpEq32F(v)
   276  	case OpEq64:
   277  		return rewriteValueLOONG64_OpEq64(v)
   278  	case OpEq64F:
   279  		return rewriteValueLOONG64_OpEq64F(v)
   280  	case OpEq8:
   281  		return rewriteValueLOONG64_OpEq8(v)
   282  	case OpEqB:
   283  		return rewriteValueLOONG64_OpEqB(v)
   284  	case OpEqPtr:
   285  		return rewriteValueLOONG64_OpEqPtr(v)
   286  	case OpFMA:
   287  		v.Op = OpLOONG64FMADDD
   288  		return true
   289  	case OpGetCallerPC:
   290  		v.Op = OpLOONG64LoweredGetCallerPC
   291  		return true
   292  	case OpGetCallerSP:
   293  		v.Op = OpLOONG64LoweredGetCallerSP
   294  		return true
   295  	case OpGetClosurePtr:
   296  		v.Op = OpLOONG64LoweredGetClosurePtr
   297  		return true
   298  	case OpHmul32:
   299  		return rewriteValueLOONG64_OpHmul32(v)
   300  	case OpHmul32u:
   301  		return rewriteValueLOONG64_OpHmul32u(v)
   302  	case OpHmul64:
   303  		v.Op = OpLOONG64MULHV
   304  		return true
   305  	case OpHmul64u:
   306  		v.Op = OpLOONG64MULHVU
   307  		return true
   308  	case OpInterCall:
   309  		v.Op = OpLOONG64CALLinter
   310  		return true
   311  	case OpIsInBounds:
   312  		return rewriteValueLOONG64_OpIsInBounds(v)
   313  	case OpIsNonNil:
   314  		return rewriteValueLOONG64_OpIsNonNil(v)
   315  	case OpIsSliceInBounds:
   316  		return rewriteValueLOONG64_OpIsSliceInBounds(v)
   317  	case OpLOONG64ADDD:
   318  		return rewriteValueLOONG64_OpLOONG64ADDD(v)
   319  	case OpLOONG64ADDF:
   320  		return rewriteValueLOONG64_OpLOONG64ADDF(v)
   321  	case OpLOONG64ADDV:
   322  		return rewriteValueLOONG64_OpLOONG64ADDV(v)
   323  	case OpLOONG64ADDVconst:
   324  		return rewriteValueLOONG64_OpLOONG64ADDVconst(v)
   325  	case OpLOONG64AND:
   326  		return rewriteValueLOONG64_OpLOONG64AND(v)
   327  	case OpLOONG64ANDconst:
   328  		return rewriteValueLOONG64_OpLOONG64ANDconst(v)
   329  	case OpLOONG64DIVV:
   330  		return rewriteValueLOONG64_OpLOONG64DIVV(v)
   331  	case OpLOONG64DIVVU:
   332  		return rewriteValueLOONG64_OpLOONG64DIVVU(v)
   333  	case OpLOONG64MASKEQZ:
   334  		return rewriteValueLOONG64_OpLOONG64MASKEQZ(v)
   335  	case OpLOONG64MASKNEZ:
   336  		return rewriteValueLOONG64_OpLOONG64MASKNEZ(v)
   337  	case OpLOONG64MOVBUload:
   338  		return rewriteValueLOONG64_OpLOONG64MOVBUload(v)
   339  	case OpLOONG64MOVBUloadidx:
   340  		return rewriteValueLOONG64_OpLOONG64MOVBUloadidx(v)
   341  	case OpLOONG64MOVBUreg:
   342  		return rewriteValueLOONG64_OpLOONG64MOVBUreg(v)
   343  	case OpLOONG64MOVBload:
   344  		return rewriteValueLOONG64_OpLOONG64MOVBload(v)
   345  	case OpLOONG64MOVBloadidx:
   346  		return rewriteValueLOONG64_OpLOONG64MOVBloadidx(v)
   347  	case OpLOONG64MOVBreg:
   348  		return rewriteValueLOONG64_OpLOONG64MOVBreg(v)
   349  	case OpLOONG64MOVBstore:
   350  		return rewriteValueLOONG64_OpLOONG64MOVBstore(v)
   351  	case OpLOONG64MOVBstoreidx:
   352  		return rewriteValueLOONG64_OpLOONG64MOVBstoreidx(v)
   353  	case OpLOONG64MOVBstorezero:
   354  		return rewriteValueLOONG64_OpLOONG64MOVBstorezero(v)
   355  	case OpLOONG64MOVBstorezeroidx:
   356  		return rewriteValueLOONG64_OpLOONG64MOVBstorezeroidx(v)
   357  	case OpLOONG64MOVDload:
   358  		return rewriteValueLOONG64_OpLOONG64MOVDload(v)
   359  	case OpLOONG64MOVDloadidx:
   360  		return rewriteValueLOONG64_OpLOONG64MOVDloadidx(v)
   361  	case OpLOONG64MOVDstore:
   362  		return rewriteValueLOONG64_OpLOONG64MOVDstore(v)
   363  	case OpLOONG64MOVDstoreidx:
   364  		return rewriteValueLOONG64_OpLOONG64MOVDstoreidx(v)
   365  	case OpLOONG64MOVFload:
   366  		return rewriteValueLOONG64_OpLOONG64MOVFload(v)
   367  	case OpLOONG64MOVFloadidx:
   368  		return rewriteValueLOONG64_OpLOONG64MOVFloadidx(v)
   369  	case OpLOONG64MOVFstore:
   370  		return rewriteValueLOONG64_OpLOONG64MOVFstore(v)
   371  	case OpLOONG64MOVFstoreidx:
   372  		return rewriteValueLOONG64_OpLOONG64MOVFstoreidx(v)
   373  	case OpLOONG64MOVHUload:
   374  		return rewriteValueLOONG64_OpLOONG64MOVHUload(v)
   375  	case OpLOONG64MOVHUloadidx:
   376  		return rewriteValueLOONG64_OpLOONG64MOVHUloadidx(v)
   377  	case OpLOONG64MOVHUreg:
   378  		return rewriteValueLOONG64_OpLOONG64MOVHUreg(v)
   379  	case OpLOONG64MOVHload:
   380  		return rewriteValueLOONG64_OpLOONG64MOVHload(v)
   381  	case OpLOONG64MOVHloadidx:
   382  		return rewriteValueLOONG64_OpLOONG64MOVHloadidx(v)
   383  	case OpLOONG64MOVHreg:
   384  		return rewriteValueLOONG64_OpLOONG64MOVHreg(v)
   385  	case OpLOONG64MOVHstore:
   386  		return rewriteValueLOONG64_OpLOONG64MOVHstore(v)
   387  	case OpLOONG64MOVHstoreidx:
   388  		return rewriteValueLOONG64_OpLOONG64MOVHstoreidx(v)
   389  	case OpLOONG64MOVHstorezero:
   390  		return rewriteValueLOONG64_OpLOONG64MOVHstorezero(v)
   391  	case OpLOONG64MOVHstorezeroidx:
   392  		return rewriteValueLOONG64_OpLOONG64MOVHstorezeroidx(v)
   393  	case OpLOONG64MOVVload:
   394  		return rewriteValueLOONG64_OpLOONG64MOVVload(v)
   395  	case OpLOONG64MOVVloadidx:
   396  		return rewriteValueLOONG64_OpLOONG64MOVVloadidx(v)
   397  	case OpLOONG64MOVVnop:
   398  		return rewriteValueLOONG64_OpLOONG64MOVVnop(v)
   399  	case OpLOONG64MOVVreg:
   400  		return rewriteValueLOONG64_OpLOONG64MOVVreg(v)
   401  	case OpLOONG64MOVVstore:
   402  		return rewriteValueLOONG64_OpLOONG64MOVVstore(v)
   403  	case OpLOONG64MOVVstoreidx:
   404  		return rewriteValueLOONG64_OpLOONG64MOVVstoreidx(v)
   405  	case OpLOONG64MOVVstorezero:
   406  		return rewriteValueLOONG64_OpLOONG64MOVVstorezero(v)
   407  	case OpLOONG64MOVVstorezeroidx:
   408  		return rewriteValueLOONG64_OpLOONG64MOVVstorezeroidx(v)
   409  	case OpLOONG64MOVWUload:
   410  		return rewriteValueLOONG64_OpLOONG64MOVWUload(v)
   411  	case OpLOONG64MOVWUloadidx:
   412  		return rewriteValueLOONG64_OpLOONG64MOVWUloadidx(v)
   413  	case OpLOONG64MOVWUreg:
   414  		return rewriteValueLOONG64_OpLOONG64MOVWUreg(v)
   415  	case OpLOONG64MOVWload:
   416  		return rewriteValueLOONG64_OpLOONG64MOVWload(v)
   417  	case OpLOONG64MOVWloadidx:
   418  		return rewriteValueLOONG64_OpLOONG64MOVWloadidx(v)
   419  	case OpLOONG64MOVWreg:
   420  		return rewriteValueLOONG64_OpLOONG64MOVWreg(v)
   421  	case OpLOONG64MOVWstore:
   422  		return rewriteValueLOONG64_OpLOONG64MOVWstore(v)
   423  	case OpLOONG64MOVWstoreidx:
   424  		return rewriteValueLOONG64_OpLOONG64MOVWstoreidx(v)
   425  	case OpLOONG64MOVWstorezero:
   426  		return rewriteValueLOONG64_OpLOONG64MOVWstorezero(v)
   427  	case OpLOONG64MOVWstorezeroidx:
   428  		return rewriteValueLOONG64_OpLOONG64MOVWstorezeroidx(v)
   429  	case OpLOONG64MULV:
   430  		return rewriteValueLOONG64_OpLOONG64MULV(v)
   431  	case OpLOONG64NEGV:
   432  		return rewriteValueLOONG64_OpLOONG64NEGV(v)
   433  	case OpLOONG64NOR:
   434  		return rewriteValueLOONG64_OpLOONG64NOR(v)
   435  	case OpLOONG64NORconst:
   436  		return rewriteValueLOONG64_OpLOONG64NORconst(v)
   437  	case OpLOONG64OR:
   438  		return rewriteValueLOONG64_OpLOONG64OR(v)
   439  	case OpLOONG64ORconst:
   440  		return rewriteValueLOONG64_OpLOONG64ORconst(v)
   441  	case OpLOONG64REMV:
   442  		return rewriteValueLOONG64_OpLOONG64REMV(v)
   443  	case OpLOONG64REMVU:
   444  		return rewriteValueLOONG64_OpLOONG64REMVU(v)
   445  	case OpLOONG64ROTR:
   446  		return rewriteValueLOONG64_OpLOONG64ROTR(v)
   447  	case OpLOONG64ROTRV:
   448  		return rewriteValueLOONG64_OpLOONG64ROTRV(v)
   449  	case OpLOONG64SGT:
   450  		return rewriteValueLOONG64_OpLOONG64SGT(v)
   451  	case OpLOONG64SGTU:
   452  		return rewriteValueLOONG64_OpLOONG64SGTU(v)
   453  	case OpLOONG64SGTUconst:
   454  		return rewriteValueLOONG64_OpLOONG64SGTUconst(v)
   455  	case OpLOONG64SGTconst:
   456  		return rewriteValueLOONG64_OpLOONG64SGTconst(v)
   457  	case OpLOONG64SLL:
   458  		return rewriteValueLOONG64_OpLOONG64SLL(v)
   459  	case OpLOONG64SLLV:
   460  		return rewriteValueLOONG64_OpLOONG64SLLV(v)
   461  	case OpLOONG64SLLVconst:
   462  		return rewriteValueLOONG64_OpLOONG64SLLVconst(v)
   463  	case OpLOONG64SRA:
   464  		return rewriteValueLOONG64_OpLOONG64SRA(v)
   465  	case OpLOONG64SRAV:
   466  		return rewriteValueLOONG64_OpLOONG64SRAV(v)
   467  	case OpLOONG64SRAVconst:
   468  		return rewriteValueLOONG64_OpLOONG64SRAVconst(v)
   469  	case OpLOONG64SRL:
   470  		return rewriteValueLOONG64_OpLOONG64SRL(v)
   471  	case OpLOONG64SRLV:
   472  		return rewriteValueLOONG64_OpLOONG64SRLV(v)
   473  	case OpLOONG64SRLVconst:
   474  		return rewriteValueLOONG64_OpLOONG64SRLVconst(v)
   475  	case OpLOONG64SUBD:
   476  		return rewriteValueLOONG64_OpLOONG64SUBD(v)
   477  	case OpLOONG64SUBF:
   478  		return rewriteValueLOONG64_OpLOONG64SUBF(v)
   479  	case OpLOONG64SUBV:
   480  		return rewriteValueLOONG64_OpLOONG64SUBV(v)
   481  	case OpLOONG64SUBVconst:
   482  		return rewriteValueLOONG64_OpLOONG64SUBVconst(v)
   483  	case OpLOONG64XOR:
   484  		return rewriteValueLOONG64_OpLOONG64XOR(v)
   485  	case OpLOONG64XORconst:
   486  		return rewriteValueLOONG64_OpLOONG64XORconst(v)
   487  	case OpLeq16:
   488  		return rewriteValueLOONG64_OpLeq16(v)
   489  	case OpLeq16U:
   490  		return rewriteValueLOONG64_OpLeq16U(v)
   491  	case OpLeq32:
   492  		return rewriteValueLOONG64_OpLeq32(v)
   493  	case OpLeq32F:
   494  		return rewriteValueLOONG64_OpLeq32F(v)
   495  	case OpLeq32U:
   496  		return rewriteValueLOONG64_OpLeq32U(v)
   497  	case OpLeq64:
   498  		return rewriteValueLOONG64_OpLeq64(v)
   499  	case OpLeq64F:
   500  		return rewriteValueLOONG64_OpLeq64F(v)
   501  	case OpLeq64U:
   502  		return rewriteValueLOONG64_OpLeq64U(v)
   503  	case OpLeq8:
   504  		return rewriteValueLOONG64_OpLeq8(v)
   505  	case OpLeq8U:
   506  		return rewriteValueLOONG64_OpLeq8U(v)
   507  	case OpLess16:
   508  		return rewriteValueLOONG64_OpLess16(v)
   509  	case OpLess16U:
   510  		return rewriteValueLOONG64_OpLess16U(v)
   511  	case OpLess32:
   512  		return rewriteValueLOONG64_OpLess32(v)
   513  	case OpLess32F:
   514  		return rewriteValueLOONG64_OpLess32F(v)
   515  	case OpLess32U:
   516  		return rewriteValueLOONG64_OpLess32U(v)
   517  	case OpLess64:
   518  		return rewriteValueLOONG64_OpLess64(v)
   519  	case OpLess64F:
   520  		return rewriteValueLOONG64_OpLess64F(v)
   521  	case OpLess64U:
   522  		return rewriteValueLOONG64_OpLess64U(v)
   523  	case OpLess8:
   524  		return rewriteValueLOONG64_OpLess8(v)
   525  	case OpLess8U:
   526  		return rewriteValueLOONG64_OpLess8U(v)
   527  	case OpLoad:
   528  		return rewriteValueLOONG64_OpLoad(v)
   529  	case OpLocalAddr:
   530  		return rewriteValueLOONG64_OpLocalAddr(v)
   531  	case OpLsh16x16:
   532  		return rewriteValueLOONG64_OpLsh16x16(v)
   533  	case OpLsh16x32:
   534  		return rewriteValueLOONG64_OpLsh16x32(v)
   535  	case OpLsh16x64:
   536  		return rewriteValueLOONG64_OpLsh16x64(v)
   537  	case OpLsh16x8:
   538  		return rewriteValueLOONG64_OpLsh16x8(v)
   539  	case OpLsh32x16:
   540  		return rewriteValueLOONG64_OpLsh32x16(v)
   541  	case OpLsh32x32:
   542  		return rewriteValueLOONG64_OpLsh32x32(v)
   543  	case OpLsh32x64:
   544  		return rewriteValueLOONG64_OpLsh32x64(v)
   545  	case OpLsh32x8:
   546  		return rewriteValueLOONG64_OpLsh32x8(v)
   547  	case OpLsh64x16:
   548  		return rewriteValueLOONG64_OpLsh64x16(v)
   549  	case OpLsh64x32:
   550  		return rewriteValueLOONG64_OpLsh64x32(v)
   551  	case OpLsh64x64:
   552  		return rewriteValueLOONG64_OpLsh64x64(v)
   553  	case OpLsh64x8:
   554  		return rewriteValueLOONG64_OpLsh64x8(v)
   555  	case OpLsh8x16:
   556  		return rewriteValueLOONG64_OpLsh8x16(v)
   557  	case OpLsh8x32:
   558  		return rewriteValueLOONG64_OpLsh8x32(v)
   559  	case OpLsh8x64:
   560  		return rewriteValueLOONG64_OpLsh8x64(v)
   561  	case OpLsh8x8:
   562  		return rewriteValueLOONG64_OpLsh8x8(v)
   563  	case OpMax32F:
   564  		v.Op = OpLOONG64FMAXF
   565  		return true
   566  	case OpMax64F:
   567  		v.Op = OpLOONG64FMAXD
   568  		return true
   569  	case OpMin32F:
   570  		v.Op = OpLOONG64FMINF
   571  		return true
   572  	case OpMin64F:
   573  		v.Op = OpLOONG64FMIND
   574  		return true
   575  	case OpMod16:
   576  		return rewriteValueLOONG64_OpMod16(v)
   577  	case OpMod16u:
   578  		return rewriteValueLOONG64_OpMod16u(v)
   579  	case OpMod32:
   580  		return rewriteValueLOONG64_OpMod32(v)
   581  	case OpMod32u:
   582  		return rewriteValueLOONG64_OpMod32u(v)
   583  	case OpMod64:
   584  		return rewriteValueLOONG64_OpMod64(v)
   585  	case OpMod64u:
   586  		v.Op = OpLOONG64REMVU
   587  		return true
   588  	case OpMod8:
   589  		return rewriteValueLOONG64_OpMod8(v)
   590  	case OpMod8u:
   591  		return rewriteValueLOONG64_OpMod8u(v)
   592  	case OpMove:
   593  		return rewriteValueLOONG64_OpMove(v)
   594  	case OpMul16:
   595  		v.Op = OpLOONG64MULV
   596  		return true
   597  	case OpMul32:
   598  		v.Op = OpLOONG64MULV
   599  		return true
   600  	case OpMul32F:
   601  		v.Op = OpLOONG64MULF
   602  		return true
   603  	case OpMul64:
   604  		v.Op = OpLOONG64MULV
   605  		return true
   606  	case OpMul64F:
   607  		v.Op = OpLOONG64MULD
   608  		return true
   609  	case OpMul8:
   610  		v.Op = OpLOONG64MULV
   611  		return true
   612  	case OpNeg16:
   613  		v.Op = OpLOONG64NEGV
   614  		return true
   615  	case OpNeg32:
   616  		v.Op = OpLOONG64NEGV
   617  		return true
   618  	case OpNeg32F:
   619  		v.Op = OpLOONG64NEGF
   620  		return true
   621  	case OpNeg64:
   622  		v.Op = OpLOONG64NEGV
   623  		return true
   624  	case OpNeg64F:
   625  		v.Op = OpLOONG64NEGD
   626  		return true
   627  	case OpNeg8:
   628  		v.Op = OpLOONG64NEGV
   629  		return true
   630  	case OpNeq16:
   631  		return rewriteValueLOONG64_OpNeq16(v)
   632  	case OpNeq32:
   633  		return rewriteValueLOONG64_OpNeq32(v)
   634  	case OpNeq32F:
   635  		return rewriteValueLOONG64_OpNeq32F(v)
   636  	case OpNeq64:
   637  		return rewriteValueLOONG64_OpNeq64(v)
   638  	case OpNeq64F:
   639  		return rewriteValueLOONG64_OpNeq64F(v)
   640  	case OpNeq8:
   641  		return rewriteValueLOONG64_OpNeq8(v)
   642  	case OpNeqB:
   643  		v.Op = OpLOONG64XOR
   644  		return true
   645  	case OpNeqPtr:
   646  		return rewriteValueLOONG64_OpNeqPtr(v)
   647  	case OpNilCheck:
   648  		v.Op = OpLOONG64LoweredNilCheck
   649  		return true
   650  	case OpNot:
   651  		return rewriteValueLOONG64_OpNot(v)
   652  	case OpOffPtr:
   653  		return rewriteValueLOONG64_OpOffPtr(v)
   654  	case OpOr16:
   655  		v.Op = OpLOONG64OR
   656  		return true
   657  	case OpOr32:
   658  		v.Op = OpLOONG64OR
   659  		return true
   660  	case OpOr64:
   661  		v.Op = OpLOONG64OR
   662  		return true
   663  	case OpOr8:
   664  		v.Op = OpLOONG64OR
   665  		return true
   666  	case OpOrB:
   667  		v.Op = OpLOONG64OR
   668  		return true
   669  	case OpPanicBounds:
   670  		return rewriteValueLOONG64_OpPanicBounds(v)
   671  	case OpPopCount16:
   672  		return rewriteValueLOONG64_OpPopCount16(v)
   673  	case OpPopCount32:
   674  		return rewriteValueLOONG64_OpPopCount32(v)
   675  	case OpPopCount64:
   676  		return rewriteValueLOONG64_OpPopCount64(v)
   677  	case OpPubBarrier:
   678  		v.Op = OpLOONG64LoweredPubBarrier
   679  		return true
   680  	case OpRotateLeft16:
   681  		return rewriteValueLOONG64_OpRotateLeft16(v)
   682  	case OpRotateLeft32:
   683  		return rewriteValueLOONG64_OpRotateLeft32(v)
   684  	case OpRotateLeft64:
   685  		return rewriteValueLOONG64_OpRotateLeft64(v)
   686  	case OpRotateLeft8:
   687  		return rewriteValueLOONG64_OpRotateLeft8(v)
   688  	case OpRound32F:
   689  		v.Op = OpLOONG64LoweredRound32F
   690  		return true
   691  	case OpRound64F:
   692  		v.Op = OpLOONG64LoweredRound64F
   693  		return true
   694  	case OpRsh16Ux16:
   695  		return rewriteValueLOONG64_OpRsh16Ux16(v)
   696  	case OpRsh16Ux32:
   697  		return rewriteValueLOONG64_OpRsh16Ux32(v)
   698  	case OpRsh16Ux64:
   699  		return rewriteValueLOONG64_OpRsh16Ux64(v)
   700  	case OpRsh16Ux8:
   701  		return rewriteValueLOONG64_OpRsh16Ux8(v)
   702  	case OpRsh16x16:
   703  		return rewriteValueLOONG64_OpRsh16x16(v)
   704  	case OpRsh16x32:
   705  		return rewriteValueLOONG64_OpRsh16x32(v)
   706  	case OpRsh16x64:
   707  		return rewriteValueLOONG64_OpRsh16x64(v)
   708  	case OpRsh16x8:
   709  		return rewriteValueLOONG64_OpRsh16x8(v)
   710  	case OpRsh32Ux16:
   711  		return rewriteValueLOONG64_OpRsh32Ux16(v)
   712  	case OpRsh32Ux32:
   713  		return rewriteValueLOONG64_OpRsh32Ux32(v)
   714  	case OpRsh32Ux64:
   715  		return rewriteValueLOONG64_OpRsh32Ux64(v)
   716  	case OpRsh32Ux8:
   717  		return rewriteValueLOONG64_OpRsh32Ux8(v)
   718  	case OpRsh32x16:
   719  		return rewriteValueLOONG64_OpRsh32x16(v)
   720  	case OpRsh32x32:
   721  		return rewriteValueLOONG64_OpRsh32x32(v)
   722  	case OpRsh32x64:
   723  		return rewriteValueLOONG64_OpRsh32x64(v)
   724  	case OpRsh32x8:
   725  		return rewriteValueLOONG64_OpRsh32x8(v)
   726  	case OpRsh64Ux16:
   727  		return rewriteValueLOONG64_OpRsh64Ux16(v)
   728  	case OpRsh64Ux32:
   729  		return rewriteValueLOONG64_OpRsh64Ux32(v)
   730  	case OpRsh64Ux64:
   731  		return rewriteValueLOONG64_OpRsh64Ux64(v)
   732  	case OpRsh64Ux8:
   733  		return rewriteValueLOONG64_OpRsh64Ux8(v)
   734  	case OpRsh64x16:
   735  		return rewriteValueLOONG64_OpRsh64x16(v)
   736  	case OpRsh64x32:
   737  		return rewriteValueLOONG64_OpRsh64x32(v)
   738  	case OpRsh64x64:
   739  		return rewriteValueLOONG64_OpRsh64x64(v)
   740  	case OpRsh64x8:
   741  		return rewriteValueLOONG64_OpRsh64x8(v)
   742  	case OpRsh8Ux16:
   743  		return rewriteValueLOONG64_OpRsh8Ux16(v)
   744  	case OpRsh8Ux32:
   745  		return rewriteValueLOONG64_OpRsh8Ux32(v)
   746  	case OpRsh8Ux64:
   747  		return rewriteValueLOONG64_OpRsh8Ux64(v)
   748  	case OpRsh8Ux8:
   749  		return rewriteValueLOONG64_OpRsh8Ux8(v)
   750  	case OpRsh8x16:
   751  		return rewriteValueLOONG64_OpRsh8x16(v)
   752  	case OpRsh8x32:
   753  		return rewriteValueLOONG64_OpRsh8x32(v)
   754  	case OpRsh8x64:
   755  		return rewriteValueLOONG64_OpRsh8x64(v)
   756  	case OpRsh8x8:
   757  		return rewriteValueLOONG64_OpRsh8x8(v)
   758  	case OpSelect0:
   759  		return rewriteValueLOONG64_OpSelect0(v)
   760  	case OpSelect1:
   761  		return rewriteValueLOONG64_OpSelect1(v)
   762  	case OpSelectN:
   763  		return rewriteValueLOONG64_OpSelectN(v)
   764  	case OpSignExt16to32:
   765  		v.Op = OpLOONG64MOVHreg
   766  		return true
   767  	case OpSignExt16to64:
   768  		v.Op = OpLOONG64MOVHreg
   769  		return true
   770  	case OpSignExt32to64:
   771  		v.Op = OpLOONG64MOVWreg
   772  		return true
   773  	case OpSignExt8to16:
   774  		v.Op = OpLOONG64MOVBreg
   775  		return true
   776  	case OpSignExt8to32:
   777  		v.Op = OpLOONG64MOVBreg
   778  		return true
   779  	case OpSignExt8to64:
   780  		v.Op = OpLOONG64MOVBreg
   781  		return true
   782  	case OpSlicemask:
   783  		return rewriteValueLOONG64_OpSlicemask(v)
   784  	case OpSqrt:
   785  		v.Op = OpLOONG64SQRTD
   786  		return true
   787  	case OpSqrt32:
   788  		v.Op = OpLOONG64SQRTF
   789  		return true
   790  	case OpStaticCall:
   791  		v.Op = OpLOONG64CALLstatic
   792  		return true
   793  	case OpStore:
   794  		return rewriteValueLOONG64_OpStore(v)
   795  	case OpSub16:
   796  		v.Op = OpLOONG64SUBV
   797  		return true
   798  	case OpSub32:
   799  		v.Op = OpLOONG64SUBV
   800  		return true
   801  	case OpSub32F:
   802  		v.Op = OpLOONG64SUBF
   803  		return true
   804  	case OpSub64:
   805  		v.Op = OpLOONG64SUBV
   806  		return true
   807  	case OpSub64F:
   808  		v.Op = OpLOONG64SUBD
   809  		return true
   810  	case OpSub8:
   811  		v.Op = OpLOONG64SUBV
   812  		return true
   813  	case OpSubPtr:
   814  		v.Op = OpLOONG64SUBV
   815  		return true
   816  	case OpTailCall:
   817  		v.Op = OpLOONG64CALLtail
   818  		return true
   819  	case OpTrunc16to8:
   820  		v.Op = OpCopy
   821  		return true
   822  	case OpTrunc32to16:
   823  		v.Op = OpCopy
   824  		return true
   825  	case OpTrunc32to8:
   826  		v.Op = OpCopy
   827  		return true
   828  	case OpTrunc64to16:
   829  		v.Op = OpCopy
   830  		return true
   831  	case OpTrunc64to32:
   832  		v.Op = OpCopy
   833  		return true
   834  	case OpTrunc64to8:
   835  		v.Op = OpCopy
   836  		return true
   837  	case OpWB:
   838  		v.Op = OpLOONG64LoweredWB
   839  		return true
   840  	case OpXor16:
   841  		v.Op = OpLOONG64XOR
   842  		return true
   843  	case OpXor32:
   844  		v.Op = OpLOONG64XOR
   845  		return true
   846  	case OpXor64:
   847  		v.Op = OpLOONG64XOR
   848  		return true
   849  	case OpXor8:
   850  		v.Op = OpLOONG64XOR
   851  		return true
   852  	case OpZero:
   853  		return rewriteValueLOONG64_OpZero(v)
   854  	case OpZeroExt16to32:
   855  		v.Op = OpLOONG64MOVHUreg
   856  		return true
   857  	case OpZeroExt16to64:
   858  		v.Op = OpLOONG64MOVHUreg
   859  		return true
   860  	case OpZeroExt32to64:
   861  		v.Op = OpLOONG64MOVWUreg
   862  		return true
   863  	case OpZeroExt8to16:
   864  		v.Op = OpLOONG64MOVBUreg
   865  		return true
   866  	case OpZeroExt8to32:
   867  		v.Op = OpLOONG64MOVBUreg
   868  		return true
   869  	case OpZeroExt8to64:
   870  		v.Op = OpLOONG64MOVBUreg
   871  		return true
   872  	}
   873  	return false
   874  }
   875  func rewriteValueLOONG64_OpAddr(v *Value) bool {
   876  	v_0 := v.Args[0]
   877  	// match: (Addr {sym} base)
   878  	// result: (MOVVaddr {sym} base)
   879  	for {
   880  		sym := auxToSym(v.Aux)
   881  		base := v_0
   882  		v.reset(OpLOONG64MOVVaddr)
   883  		v.Aux = symToAux(sym)
   884  		v.AddArg(base)
   885  		return true
   886  	}
   887  }
   888  func rewriteValueLOONG64_OpAtomicAnd8(v *Value) bool {
   889  	v_2 := v.Args[2]
   890  	v_1 := v.Args[1]
   891  	v_0 := v.Args[0]
   892  	b := v.Block
   893  	typ := &b.Func.Config.Types
   894  	// match: (AtomicAnd8 ptr val mem)
   895  	// result: (LoweredAtomicAnd32 (AND <typ.Uintptr> (MOVVconst [^3]) ptr) (NORconst [0] <typ.UInt32> (SLLV <typ.UInt32> (XORconst <typ.UInt32> [0xff] (ZeroExt8to32 val)) (SLLVconst <typ.UInt64> [3] (ANDconst <typ.UInt64> [3] ptr)))) mem)
   896  	for {
   897  		ptr := v_0
   898  		val := v_1
   899  		mem := v_2
   900  		v.reset(OpLOONG64LoweredAtomicAnd32)
   901  		v0 := b.NewValue0(v.Pos, OpLOONG64AND, typ.Uintptr)
   902  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
   903  		v1.AuxInt = int64ToAuxInt(^3)
   904  		v0.AddArg2(v1, ptr)
   905  		v2 := b.NewValue0(v.Pos, OpLOONG64NORconst, typ.UInt32)
   906  		v2.AuxInt = int64ToAuxInt(0)
   907  		v3 := b.NewValue0(v.Pos, OpLOONG64SLLV, typ.UInt32)
   908  		v4 := b.NewValue0(v.Pos, OpLOONG64XORconst, typ.UInt32)
   909  		v4.AuxInt = int64ToAuxInt(0xff)
   910  		v5 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   911  		v5.AddArg(val)
   912  		v4.AddArg(v5)
   913  		v6 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, typ.UInt64)
   914  		v6.AuxInt = int64ToAuxInt(3)
   915  		v7 := b.NewValue0(v.Pos, OpLOONG64ANDconst, typ.UInt64)
   916  		v7.AuxInt = int64ToAuxInt(3)
   917  		v7.AddArg(ptr)
   918  		v6.AddArg(v7)
   919  		v3.AddArg2(v4, v6)
   920  		v2.AddArg(v3)
   921  		v.AddArg3(v0, v2, mem)
   922  		return true
   923  	}
   924  }
   925  func rewriteValueLOONG64_OpAtomicCompareAndSwap32(v *Value) bool {
   926  	v_3 := v.Args[3]
   927  	v_2 := v.Args[2]
   928  	v_1 := v.Args[1]
   929  	v_0 := v.Args[0]
   930  	b := v.Block
   931  	typ := &b.Func.Config.Types
   932  	// match: (AtomicCompareAndSwap32 ptr old new mem)
   933  	// result: (LoweredAtomicCas32 ptr (SignExt32to64 old) new mem)
   934  	for {
   935  		ptr := v_0
   936  		old := v_1
   937  		new := v_2
   938  		mem := v_3
   939  		v.reset(OpLOONG64LoweredAtomicCas32)
   940  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
   941  		v0.AddArg(old)
   942  		v.AddArg4(ptr, v0, new, mem)
   943  		return true
   944  	}
   945  }
   946  func rewriteValueLOONG64_OpAtomicCompareAndSwap32Variant(v *Value) bool {
   947  	v_3 := v.Args[3]
   948  	v_2 := v.Args[2]
   949  	v_1 := v.Args[1]
   950  	v_0 := v.Args[0]
   951  	b := v.Block
   952  	typ := &b.Func.Config.Types
   953  	// match: (AtomicCompareAndSwap32Variant ptr old new mem)
   954  	// result: (LoweredAtomicCas32Variant ptr (SignExt32to64 old) new mem)
   955  	for {
   956  		ptr := v_0
   957  		old := v_1
   958  		new := v_2
   959  		mem := v_3
   960  		v.reset(OpLOONG64LoweredAtomicCas32Variant)
   961  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
   962  		v0.AddArg(old)
   963  		v.AddArg4(ptr, v0, new, mem)
   964  		return true
   965  	}
   966  }
   967  func rewriteValueLOONG64_OpAtomicOr8(v *Value) bool {
   968  	v_2 := v.Args[2]
   969  	v_1 := v.Args[1]
   970  	v_0 := v.Args[0]
   971  	b := v.Block
   972  	typ := &b.Func.Config.Types
   973  	// match: (AtomicOr8 ptr val mem)
   974  	// result: (LoweredAtomicOr32 (AND <typ.Uintptr> (MOVVconst [^3]) ptr) (SLLV <typ.UInt32> (ZeroExt8to32 val) (SLLVconst <typ.UInt64> [3] (ANDconst <typ.UInt64> [3] ptr))) mem)
   975  	for {
   976  		ptr := v_0
   977  		val := v_1
   978  		mem := v_2
   979  		v.reset(OpLOONG64LoweredAtomicOr32)
   980  		v0 := b.NewValue0(v.Pos, OpLOONG64AND, typ.Uintptr)
   981  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
   982  		v1.AuxInt = int64ToAuxInt(^3)
   983  		v0.AddArg2(v1, ptr)
   984  		v2 := b.NewValue0(v.Pos, OpLOONG64SLLV, typ.UInt32)
   985  		v3 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   986  		v3.AddArg(val)
   987  		v4 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, typ.UInt64)
   988  		v4.AuxInt = int64ToAuxInt(3)
   989  		v5 := b.NewValue0(v.Pos, OpLOONG64ANDconst, typ.UInt64)
   990  		v5.AuxInt = int64ToAuxInt(3)
   991  		v5.AddArg(ptr)
   992  		v4.AddArg(v5)
   993  		v2.AddArg2(v3, v4)
   994  		v.AddArg3(v0, v2, mem)
   995  		return true
   996  	}
   997  }
   998  func rewriteValueLOONG64_OpAvg64u(v *Value) bool {
   999  	v_1 := v.Args[1]
  1000  	v_0 := v.Args[0]
  1001  	b := v.Block
  1002  	// match: (Avg64u <t> x y)
  1003  	// result: (ADDV (SRLVconst <t> (SUBV <t> x y) [1]) y)
  1004  	for {
  1005  		t := v.Type
  1006  		x := v_0
  1007  		y := v_1
  1008  		v.reset(OpLOONG64ADDV)
  1009  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLVconst, t)
  1010  		v0.AuxInt = int64ToAuxInt(1)
  1011  		v1 := b.NewValue0(v.Pos, OpLOONG64SUBV, t)
  1012  		v1.AddArg2(x, y)
  1013  		v0.AddArg(v1)
  1014  		v.AddArg2(v0, y)
  1015  		return true
  1016  	}
  1017  }
  1018  func rewriteValueLOONG64_OpBitLen16(v *Value) bool {
  1019  	v_0 := v.Args[0]
  1020  	b := v.Block
  1021  	typ := &b.Func.Config.Types
  1022  	// match: (BitLen16 x)
  1023  	// result: (BitLen64 (ZeroExt16to64 x))
  1024  	for {
  1025  		x := v_0
  1026  		v.reset(OpBitLen64)
  1027  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1028  		v0.AddArg(x)
  1029  		v.AddArg(v0)
  1030  		return true
  1031  	}
  1032  }
  1033  func rewriteValueLOONG64_OpBitLen32(v *Value) bool {
  1034  	v_0 := v.Args[0]
  1035  	b := v.Block
  1036  	// match: (BitLen32 <t> x)
  1037  	// result: (NEGV <t> (SUBVconst <t> [32] (CLZW <t> x)))
  1038  	for {
  1039  		t := v.Type
  1040  		x := v_0
  1041  		v.reset(OpLOONG64NEGV)
  1042  		v.Type = t
  1043  		v0 := b.NewValue0(v.Pos, OpLOONG64SUBVconst, t)
  1044  		v0.AuxInt = int64ToAuxInt(32)
  1045  		v1 := b.NewValue0(v.Pos, OpLOONG64CLZW, t)
  1046  		v1.AddArg(x)
  1047  		v0.AddArg(v1)
  1048  		v.AddArg(v0)
  1049  		return true
  1050  	}
  1051  }
  1052  func rewriteValueLOONG64_OpBitLen64(v *Value) bool {
  1053  	v_0 := v.Args[0]
  1054  	b := v.Block
  1055  	// match: (BitLen64 <t> x)
  1056  	// result: (NEGV <t> (SUBVconst <t> [64] (CLZV <t> x)))
  1057  	for {
  1058  		t := v.Type
  1059  		x := v_0
  1060  		v.reset(OpLOONG64NEGV)
  1061  		v.Type = t
  1062  		v0 := b.NewValue0(v.Pos, OpLOONG64SUBVconst, t)
  1063  		v0.AuxInt = int64ToAuxInt(64)
  1064  		v1 := b.NewValue0(v.Pos, OpLOONG64CLZV, t)
  1065  		v1.AddArg(x)
  1066  		v0.AddArg(v1)
  1067  		v.AddArg(v0)
  1068  		return true
  1069  	}
  1070  }
  1071  func rewriteValueLOONG64_OpBitLen8(v *Value) bool {
  1072  	v_0 := v.Args[0]
  1073  	b := v.Block
  1074  	typ := &b.Func.Config.Types
  1075  	// match: (BitLen8 x)
  1076  	// result: (BitLen64 (ZeroExt8to64 x))
  1077  	for {
  1078  		x := v_0
  1079  		v.reset(OpBitLen64)
  1080  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1081  		v0.AddArg(x)
  1082  		v.AddArg(v0)
  1083  		return true
  1084  	}
  1085  }
  1086  func rewriteValueLOONG64_OpBitRev16(v *Value) bool {
  1087  	v_0 := v.Args[0]
  1088  	b := v.Block
  1089  	// match: (BitRev16 <t> x)
  1090  	// result: (REVB2H (BITREV4B <t> x))
  1091  	for {
  1092  		t := v.Type
  1093  		x := v_0
  1094  		v.reset(OpLOONG64REVB2H)
  1095  		v0 := b.NewValue0(v.Pos, OpLOONG64BITREV4B, t)
  1096  		v0.AddArg(x)
  1097  		v.AddArg(v0)
  1098  		return true
  1099  	}
  1100  }
  1101  func rewriteValueLOONG64_OpCom16(v *Value) bool {
  1102  	v_0 := v.Args[0]
  1103  	b := v.Block
  1104  	typ := &b.Func.Config.Types
  1105  	// match: (Com16 x)
  1106  	// result: (NOR (MOVVconst [0]) x)
  1107  	for {
  1108  		x := v_0
  1109  		v.reset(OpLOONG64NOR)
  1110  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1111  		v0.AuxInt = int64ToAuxInt(0)
  1112  		v.AddArg2(v0, x)
  1113  		return true
  1114  	}
  1115  }
  1116  func rewriteValueLOONG64_OpCom32(v *Value) bool {
  1117  	v_0 := v.Args[0]
  1118  	b := v.Block
  1119  	typ := &b.Func.Config.Types
  1120  	// match: (Com32 x)
  1121  	// result: (NOR (MOVVconst [0]) x)
  1122  	for {
  1123  		x := v_0
  1124  		v.reset(OpLOONG64NOR)
  1125  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1126  		v0.AuxInt = int64ToAuxInt(0)
  1127  		v.AddArg2(v0, x)
  1128  		return true
  1129  	}
  1130  }
  1131  func rewriteValueLOONG64_OpCom64(v *Value) bool {
  1132  	v_0 := v.Args[0]
  1133  	b := v.Block
  1134  	typ := &b.Func.Config.Types
  1135  	// match: (Com64 x)
  1136  	// result: (NOR (MOVVconst [0]) x)
  1137  	for {
  1138  		x := v_0
  1139  		v.reset(OpLOONG64NOR)
  1140  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1141  		v0.AuxInt = int64ToAuxInt(0)
  1142  		v.AddArg2(v0, x)
  1143  		return true
  1144  	}
  1145  }
  1146  func rewriteValueLOONG64_OpCom8(v *Value) bool {
  1147  	v_0 := v.Args[0]
  1148  	b := v.Block
  1149  	typ := &b.Func.Config.Types
  1150  	// match: (Com8 x)
  1151  	// result: (NOR (MOVVconst [0]) x)
  1152  	for {
  1153  		x := v_0
  1154  		v.reset(OpLOONG64NOR)
  1155  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1156  		v0.AuxInt = int64ToAuxInt(0)
  1157  		v.AddArg2(v0, x)
  1158  		return true
  1159  	}
  1160  }
  1161  func rewriteValueLOONG64_OpCondSelect(v *Value) bool {
  1162  	v_2 := v.Args[2]
  1163  	v_1 := v.Args[1]
  1164  	v_0 := v.Args[0]
  1165  	b := v.Block
  1166  	// match: (CondSelect <t> x y cond)
  1167  	// result: (OR (MASKEQZ <t> x cond) (MASKNEZ <t> y cond))
  1168  	for {
  1169  		t := v.Type
  1170  		x := v_0
  1171  		y := v_1
  1172  		cond := v_2
  1173  		v.reset(OpLOONG64OR)
  1174  		v0 := b.NewValue0(v.Pos, OpLOONG64MASKEQZ, t)
  1175  		v0.AddArg2(x, cond)
  1176  		v1 := b.NewValue0(v.Pos, OpLOONG64MASKNEZ, t)
  1177  		v1.AddArg2(y, cond)
  1178  		v.AddArg2(v0, v1)
  1179  		return true
  1180  	}
  1181  }
  1182  func rewriteValueLOONG64_OpConst16(v *Value) bool {
  1183  	// match: (Const16 [val])
  1184  	// result: (MOVVconst [int64(val)])
  1185  	for {
  1186  		val := auxIntToInt16(v.AuxInt)
  1187  		v.reset(OpLOONG64MOVVconst)
  1188  		v.AuxInt = int64ToAuxInt(int64(val))
  1189  		return true
  1190  	}
  1191  }
  1192  func rewriteValueLOONG64_OpConst32(v *Value) bool {
  1193  	// match: (Const32 [val])
  1194  	// result: (MOVVconst [int64(val)])
  1195  	for {
  1196  		val := auxIntToInt32(v.AuxInt)
  1197  		v.reset(OpLOONG64MOVVconst)
  1198  		v.AuxInt = int64ToAuxInt(int64(val))
  1199  		return true
  1200  	}
  1201  }
  1202  func rewriteValueLOONG64_OpConst32F(v *Value) bool {
  1203  	// match: (Const32F [val])
  1204  	// result: (MOVFconst [float64(val)])
  1205  	for {
  1206  		val := auxIntToFloat32(v.AuxInt)
  1207  		v.reset(OpLOONG64MOVFconst)
  1208  		v.AuxInt = float64ToAuxInt(float64(val))
  1209  		return true
  1210  	}
  1211  }
  1212  func rewriteValueLOONG64_OpConst64(v *Value) bool {
  1213  	// match: (Const64 [val])
  1214  	// result: (MOVVconst [int64(val)])
  1215  	for {
  1216  		val := auxIntToInt64(v.AuxInt)
  1217  		v.reset(OpLOONG64MOVVconst)
  1218  		v.AuxInt = int64ToAuxInt(int64(val))
  1219  		return true
  1220  	}
  1221  }
  1222  func rewriteValueLOONG64_OpConst64F(v *Value) bool {
  1223  	// match: (Const64F [val])
  1224  	// result: (MOVDconst [float64(val)])
  1225  	for {
  1226  		val := auxIntToFloat64(v.AuxInt)
  1227  		v.reset(OpLOONG64MOVDconst)
  1228  		v.AuxInt = float64ToAuxInt(float64(val))
  1229  		return true
  1230  	}
  1231  }
  1232  func rewriteValueLOONG64_OpConst8(v *Value) bool {
  1233  	// match: (Const8 [val])
  1234  	// result: (MOVVconst [int64(val)])
  1235  	for {
  1236  		val := auxIntToInt8(v.AuxInt)
  1237  		v.reset(OpLOONG64MOVVconst)
  1238  		v.AuxInt = int64ToAuxInt(int64(val))
  1239  		return true
  1240  	}
  1241  }
  1242  func rewriteValueLOONG64_OpConstBool(v *Value) bool {
  1243  	// match: (ConstBool [t])
  1244  	// result: (MOVVconst [int64(b2i(t))])
  1245  	for {
  1246  		t := auxIntToBool(v.AuxInt)
  1247  		v.reset(OpLOONG64MOVVconst)
  1248  		v.AuxInt = int64ToAuxInt(int64(b2i(t)))
  1249  		return true
  1250  	}
  1251  }
  1252  func rewriteValueLOONG64_OpConstNil(v *Value) bool {
  1253  	// match: (ConstNil)
  1254  	// result: (MOVVconst [0])
  1255  	for {
  1256  		v.reset(OpLOONG64MOVVconst)
  1257  		v.AuxInt = int64ToAuxInt(0)
  1258  		return true
  1259  	}
  1260  }
  1261  func rewriteValueLOONG64_OpCtz16(v *Value) bool {
  1262  	v_0 := v.Args[0]
  1263  	b := v.Block
  1264  	typ := &b.Func.Config.Types
  1265  	// match: (Ctz16 x)
  1266  	// result: (CTZV (OR <typ.UInt64> x (MOVVconst [1<<16])))
  1267  	for {
  1268  		x := v_0
  1269  		v.reset(OpLOONG64CTZV)
  1270  		v0 := b.NewValue0(v.Pos, OpLOONG64OR, typ.UInt64)
  1271  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1272  		v1.AuxInt = int64ToAuxInt(1 << 16)
  1273  		v0.AddArg2(x, v1)
  1274  		v.AddArg(v0)
  1275  		return true
  1276  	}
  1277  }
  1278  func rewriteValueLOONG64_OpCtz8(v *Value) bool {
  1279  	v_0 := v.Args[0]
  1280  	b := v.Block
  1281  	typ := &b.Func.Config.Types
  1282  	// match: (Ctz8 x)
  1283  	// result: (CTZV (OR <typ.UInt64> x (MOVVconst [1<<8])))
  1284  	for {
  1285  		x := v_0
  1286  		v.reset(OpLOONG64CTZV)
  1287  		v0 := b.NewValue0(v.Pos, OpLOONG64OR, typ.UInt64)
  1288  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1289  		v1.AuxInt = int64ToAuxInt(1 << 8)
  1290  		v0.AddArg2(x, v1)
  1291  		v.AddArg(v0)
  1292  		return true
  1293  	}
  1294  }
  1295  func rewriteValueLOONG64_OpDiv16(v *Value) bool {
  1296  	v_1 := v.Args[1]
  1297  	v_0 := v.Args[0]
  1298  	b := v.Block
  1299  	typ := &b.Func.Config.Types
  1300  	// match: (Div16 x y)
  1301  	// result: (DIVV (SignExt16to64 x) (SignExt16to64 y))
  1302  	for {
  1303  		x := v_0
  1304  		y := v_1
  1305  		v.reset(OpLOONG64DIVV)
  1306  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  1307  		v0.AddArg(x)
  1308  		v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  1309  		v1.AddArg(y)
  1310  		v.AddArg2(v0, v1)
  1311  		return true
  1312  	}
  1313  }
  1314  func rewriteValueLOONG64_OpDiv16u(v *Value) bool {
  1315  	v_1 := v.Args[1]
  1316  	v_0 := v.Args[0]
  1317  	b := v.Block
  1318  	typ := &b.Func.Config.Types
  1319  	// match: (Div16u x y)
  1320  	// result: (DIVVU (ZeroExt16to64 x) (ZeroExt16to64 y))
  1321  	for {
  1322  		x := v_0
  1323  		y := v_1
  1324  		v.reset(OpLOONG64DIVVU)
  1325  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1326  		v0.AddArg(x)
  1327  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1328  		v1.AddArg(y)
  1329  		v.AddArg2(v0, v1)
  1330  		return true
  1331  	}
  1332  }
  1333  func rewriteValueLOONG64_OpDiv32(v *Value) bool {
  1334  	v_1 := v.Args[1]
  1335  	v_0 := v.Args[0]
  1336  	b := v.Block
  1337  	typ := &b.Func.Config.Types
  1338  	// match: (Div32 x y)
  1339  	// result: (DIVV (SignExt32to64 x) (SignExt32to64 y))
  1340  	for {
  1341  		x := v_0
  1342  		y := v_1
  1343  		v.reset(OpLOONG64DIVV)
  1344  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1345  		v0.AddArg(x)
  1346  		v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1347  		v1.AddArg(y)
  1348  		v.AddArg2(v0, v1)
  1349  		return true
  1350  	}
  1351  }
  1352  func rewriteValueLOONG64_OpDiv32u(v *Value) bool {
  1353  	v_1 := v.Args[1]
  1354  	v_0 := v.Args[0]
  1355  	b := v.Block
  1356  	typ := &b.Func.Config.Types
  1357  	// match: (Div32u x y)
  1358  	// result: (DIVVU (ZeroExt32to64 x) (ZeroExt32to64 y))
  1359  	for {
  1360  		x := v_0
  1361  		y := v_1
  1362  		v.reset(OpLOONG64DIVVU)
  1363  		v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1364  		v0.AddArg(x)
  1365  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1366  		v1.AddArg(y)
  1367  		v.AddArg2(v0, v1)
  1368  		return true
  1369  	}
  1370  }
  1371  func rewriteValueLOONG64_OpDiv64(v *Value) bool {
  1372  	v_1 := v.Args[1]
  1373  	v_0 := v.Args[0]
  1374  	// match: (Div64 x y)
  1375  	// result: (DIVV x y)
  1376  	for {
  1377  		x := v_0
  1378  		y := v_1
  1379  		v.reset(OpLOONG64DIVV)
  1380  		v.AddArg2(x, y)
  1381  		return true
  1382  	}
  1383  }
  1384  func rewriteValueLOONG64_OpDiv8(v *Value) bool {
  1385  	v_1 := v.Args[1]
  1386  	v_0 := v.Args[0]
  1387  	b := v.Block
  1388  	typ := &b.Func.Config.Types
  1389  	// match: (Div8 x y)
  1390  	// result: (DIVV (SignExt8to64 x) (SignExt8to64 y))
  1391  	for {
  1392  		x := v_0
  1393  		y := v_1
  1394  		v.reset(OpLOONG64DIVV)
  1395  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  1396  		v0.AddArg(x)
  1397  		v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  1398  		v1.AddArg(y)
  1399  		v.AddArg2(v0, v1)
  1400  		return true
  1401  	}
  1402  }
  1403  func rewriteValueLOONG64_OpDiv8u(v *Value) bool {
  1404  	v_1 := v.Args[1]
  1405  	v_0 := v.Args[0]
  1406  	b := v.Block
  1407  	typ := &b.Func.Config.Types
  1408  	// match: (Div8u x y)
  1409  	// result: (DIVVU (ZeroExt8to64 x) (ZeroExt8to64 y))
  1410  	for {
  1411  		x := v_0
  1412  		y := v_1
  1413  		v.reset(OpLOONG64DIVVU)
  1414  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1415  		v0.AddArg(x)
  1416  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1417  		v1.AddArg(y)
  1418  		v.AddArg2(v0, v1)
  1419  		return true
  1420  	}
  1421  }
  1422  func rewriteValueLOONG64_OpEq16(v *Value) bool {
  1423  	v_1 := v.Args[1]
  1424  	v_0 := v.Args[0]
  1425  	b := v.Block
  1426  	typ := &b.Func.Config.Types
  1427  	// match: (Eq16 x y)
  1428  	// result: (SGTU (MOVVconst [1]) (XOR (ZeroExt16to64 x) (ZeroExt16to64 y)))
  1429  	for {
  1430  		x := v_0
  1431  		y := v_1
  1432  		v.reset(OpLOONG64SGTU)
  1433  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1434  		v0.AuxInt = int64ToAuxInt(1)
  1435  		v1 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.UInt64)
  1436  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1437  		v2.AddArg(x)
  1438  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1439  		v3.AddArg(y)
  1440  		v1.AddArg2(v2, v3)
  1441  		v.AddArg2(v0, v1)
  1442  		return true
  1443  	}
  1444  }
  1445  func rewriteValueLOONG64_OpEq32(v *Value) bool {
  1446  	v_1 := v.Args[1]
  1447  	v_0 := v.Args[0]
  1448  	b := v.Block
  1449  	typ := &b.Func.Config.Types
  1450  	// match: (Eq32 x y)
  1451  	// result: (SGTU (MOVVconst [1]) (XOR (ZeroExt32to64 x) (ZeroExt32to64 y)))
  1452  	for {
  1453  		x := v_0
  1454  		y := v_1
  1455  		v.reset(OpLOONG64SGTU)
  1456  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1457  		v0.AuxInt = int64ToAuxInt(1)
  1458  		v1 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.UInt64)
  1459  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1460  		v2.AddArg(x)
  1461  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1462  		v3.AddArg(y)
  1463  		v1.AddArg2(v2, v3)
  1464  		v.AddArg2(v0, v1)
  1465  		return true
  1466  	}
  1467  }
  1468  func rewriteValueLOONG64_OpEq32F(v *Value) bool {
  1469  	v_1 := v.Args[1]
  1470  	v_0 := v.Args[0]
  1471  	b := v.Block
  1472  	// match: (Eq32F x y)
  1473  	// result: (FPFlagTrue (CMPEQF x y))
  1474  	for {
  1475  		x := v_0
  1476  		y := v_1
  1477  		v.reset(OpLOONG64FPFlagTrue)
  1478  		v0 := b.NewValue0(v.Pos, OpLOONG64CMPEQF, types.TypeFlags)
  1479  		v0.AddArg2(x, y)
  1480  		v.AddArg(v0)
  1481  		return true
  1482  	}
  1483  }
  1484  func rewriteValueLOONG64_OpEq64(v *Value) bool {
  1485  	v_1 := v.Args[1]
  1486  	v_0 := v.Args[0]
  1487  	b := v.Block
  1488  	typ := &b.Func.Config.Types
  1489  	// match: (Eq64 x y)
  1490  	// result: (SGTU (MOVVconst [1]) (XOR x y))
  1491  	for {
  1492  		x := v_0
  1493  		y := v_1
  1494  		v.reset(OpLOONG64SGTU)
  1495  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1496  		v0.AuxInt = int64ToAuxInt(1)
  1497  		v1 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.UInt64)
  1498  		v1.AddArg2(x, y)
  1499  		v.AddArg2(v0, v1)
  1500  		return true
  1501  	}
  1502  }
  1503  func rewriteValueLOONG64_OpEq64F(v *Value) bool {
  1504  	v_1 := v.Args[1]
  1505  	v_0 := v.Args[0]
  1506  	b := v.Block
  1507  	// match: (Eq64F x y)
  1508  	// result: (FPFlagTrue (CMPEQD x y))
  1509  	for {
  1510  		x := v_0
  1511  		y := v_1
  1512  		v.reset(OpLOONG64FPFlagTrue)
  1513  		v0 := b.NewValue0(v.Pos, OpLOONG64CMPEQD, types.TypeFlags)
  1514  		v0.AddArg2(x, y)
  1515  		v.AddArg(v0)
  1516  		return true
  1517  	}
  1518  }
  1519  func rewriteValueLOONG64_OpEq8(v *Value) bool {
  1520  	v_1 := v.Args[1]
  1521  	v_0 := v.Args[0]
  1522  	b := v.Block
  1523  	typ := &b.Func.Config.Types
  1524  	// match: (Eq8 x y)
  1525  	// result: (SGTU (MOVVconst [1]) (XOR (ZeroExt8to64 x) (ZeroExt8to64 y)))
  1526  	for {
  1527  		x := v_0
  1528  		y := v_1
  1529  		v.reset(OpLOONG64SGTU)
  1530  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1531  		v0.AuxInt = int64ToAuxInt(1)
  1532  		v1 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.UInt64)
  1533  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1534  		v2.AddArg(x)
  1535  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1536  		v3.AddArg(y)
  1537  		v1.AddArg2(v2, v3)
  1538  		v.AddArg2(v0, v1)
  1539  		return true
  1540  	}
  1541  }
  1542  func rewriteValueLOONG64_OpEqB(v *Value) bool {
  1543  	v_1 := v.Args[1]
  1544  	v_0 := v.Args[0]
  1545  	b := v.Block
  1546  	typ := &b.Func.Config.Types
  1547  	// match: (EqB x y)
  1548  	// result: (XOR (MOVVconst [1]) (XOR <typ.Bool> x y))
  1549  	for {
  1550  		x := v_0
  1551  		y := v_1
  1552  		v.reset(OpLOONG64XOR)
  1553  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1554  		v0.AuxInt = int64ToAuxInt(1)
  1555  		v1 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.Bool)
  1556  		v1.AddArg2(x, y)
  1557  		v.AddArg2(v0, v1)
  1558  		return true
  1559  	}
  1560  }
  1561  func rewriteValueLOONG64_OpEqPtr(v *Value) bool {
  1562  	v_1 := v.Args[1]
  1563  	v_0 := v.Args[0]
  1564  	b := v.Block
  1565  	typ := &b.Func.Config.Types
  1566  	// match: (EqPtr x y)
  1567  	// result: (SGTU (MOVVconst [1]) (XOR x y))
  1568  	for {
  1569  		x := v_0
  1570  		y := v_1
  1571  		v.reset(OpLOONG64SGTU)
  1572  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1573  		v0.AuxInt = int64ToAuxInt(1)
  1574  		v1 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.UInt64)
  1575  		v1.AddArg2(x, y)
  1576  		v.AddArg2(v0, v1)
  1577  		return true
  1578  	}
  1579  }
  1580  func rewriteValueLOONG64_OpHmul32(v *Value) bool {
  1581  	v_1 := v.Args[1]
  1582  	v_0 := v.Args[0]
  1583  	b := v.Block
  1584  	typ := &b.Func.Config.Types
  1585  	// match: (Hmul32 x y)
  1586  	// result: (SRAVconst (MULV (SignExt32to64 x) (SignExt32to64 y)) [32])
  1587  	for {
  1588  		x := v_0
  1589  		y := v_1
  1590  		v.reset(OpLOONG64SRAVconst)
  1591  		v.AuxInt = int64ToAuxInt(32)
  1592  		v0 := b.NewValue0(v.Pos, OpLOONG64MULV, typ.Int64)
  1593  		v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1594  		v1.AddArg(x)
  1595  		v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1596  		v2.AddArg(y)
  1597  		v0.AddArg2(v1, v2)
  1598  		v.AddArg(v0)
  1599  		return true
  1600  	}
  1601  }
  1602  func rewriteValueLOONG64_OpHmul32u(v *Value) bool {
  1603  	v_1 := v.Args[1]
  1604  	v_0 := v.Args[0]
  1605  	b := v.Block
  1606  	typ := &b.Func.Config.Types
  1607  	// match: (Hmul32u x y)
  1608  	// result: (SRLVconst (MULV (ZeroExt32to64 x) (ZeroExt32to64 y)) [32])
  1609  	for {
  1610  		x := v_0
  1611  		y := v_1
  1612  		v.reset(OpLOONG64SRLVconst)
  1613  		v.AuxInt = int64ToAuxInt(32)
  1614  		v0 := b.NewValue0(v.Pos, OpLOONG64MULV, typ.Int64)
  1615  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1616  		v1.AddArg(x)
  1617  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1618  		v2.AddArg(y)
  1619  		v0.AddArg2(v1, v2)
  1620  		v.AddArg(v0)
  1621  		return true
  1622  	}
  1623  }
  1624  func rewriteValueLOONG64_OpIsInBounds(v *Value) bool {
  1625  	v_1 := v.Args[1]
  1626  	v_0 := v.Args[0]
  1627  	// match: (IsInBounds idx len)
  1628  	// result: (SGTU len idx)
  1629  	for {
  1630  		idx := v_0
  1631  		len := v_1
  1632  		v.reset(OpLOONG64SGTU)
  1633  		v.AddArg2(len, idx)
  1634  		return true
  1635  	}
  1636  }
  1637  func rewriteValueLOONG64_OpIsNonNil(v *Value) bool {
  1638  	v_0 := v.Args[0]
  1639  	b := v.Block
  1640  	typ := &b.Func.Config.Types
  1641  	// match: (IsNonNil ptr)
  1642  	// result: (SGTU ptr (MOVVconst [0]))
  1643  	for {
  1644  		ptr := v_0
  1645  		v.reset(OpLOONG64SGTU)
  1646  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1647  		v0.AuxInt = int64ToAuxInt(0)
  1648  		v.AddArg2(ptr, v0)
  1649  		return true
  1650  	}
  1651  }
  1652  func rewriteValueLOONG64_OpIsSliceInBounds(v *Value) bool {
  1653  	v_1 := v.Args[1]
  1654  	v_0 := v.Args[0]
  1655  	b := v.Block
  1656  	typ := &b.Func.Config.Types
  1657  	// match: (IsSliceInBounds idx len)
  1658  	// result: (XOR (MOVVconst [1]) (SGTU idx len))
  1659  	for {
  1660  		idx := v_0
  1661  		len := v_1
  1662  		v.reset(OpLOONG64XOR)
  1663  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1664  		v0.AuxInt = int64ToAuxInt(1)
  1665  		v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  1666  		v1.AddArg2(idx, len)
  1667  		v.AddArg2(v0, v1)
  1668  		return true
  1669  	}
  1670  }
  1671  func rewriteValueLOONG64_OpLOONG64ADDD(v *Value) bool {
  1672  	v_1 := v.Args[1]
  1673  	v_0 := v.Args[0]
  1674  	// match: (ADDD (MULD x y) z)
  1675  	// cond: z.Block.Func.useFMA(v)
  1676  	// result: (FMADDD x y z)
  1677  	for {
  1678  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1679  			if v_0.Op != OpLOONG64MULD {
  1680  				continue
  1681  			}
  1682  			y := v_0.Args[1]
  1683  			x := v_0.Args[0]
  1684  			z := v_1
  1685  			if !(z.Block.Func.useFMA(v)) {
  1686  				continue
  1687  			}
  1688  			v.reset(OpLOONG64FMADDD)
  1689  			v.AddArg3(x, y, z)
  1690  			return true
  1691  		}
  1692  		break
  1693  	}
  1694  	// match: (ADDD z (NEGD (MULD x y)))
  1695  	// cond: z.Block.Func.useFMA(v)
  1696  	// result: (FNMSUBD x y z)
  1697  	for {
  1698  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1699  			z := v_0
  1700  			if v_1.Op != OpLOONG64NEGD {
  1701  				continue
  1702  			}
  1703  			v_1_0 := v_1.Args[0]
  1704  			if v_1_0.Op != OpLOONG64MULD {
  1705  				continue
  1706  			}
  1707  			y := v_1_0.Args[1]
  1708  			x := v_1_0.Args[0]
  1709  			if !(z.Block.Func.useFMA(v)) {
  1710  				continue
  1711  			}
  1712  			v.reset(OpLOONG64FNMSUBD)
  1713  			v.AddArg3(x, y, z)
  1714  			return true
  1715  		}
  1716  		break
  1717  	}
  1718  	return false
  1719  }
  1720  func rewriteValueLOONG64_OpLOONG64ADDF(v *Value) bool {
  1721  	v_1 := v.Args[1]
  1722  	v_0 := v.Args[0]
  1723  	// match: (ADDF (MULF x y) z)
  1724  	// cond: z.Block.Func.useFMA(v)
  1725  	// result: (FMADDF x y z)
  1726  	for {
  1727  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1728  			if v_0.Op != OpLOONG64MULF {
  1729  				continue
  1730  			}
  1731  			y := v_0.Args[1]
  1732  			x := v_0.Args[0]
  1733  			z := v_1
  1734  			if !(z.Block.Func.useFMA(v)) {
  1735  				continue
  1736  			}
  1737  			v.reset(OpLOONG64FMADDF)
  1738  			v.AddArg3(x, y, z)
  1739  			return true
  1740  		}
  1741  		break
  1742  	}
  1743  	// match: (ADDF z (NEGF (MULF x y)))
  1744  	// cond: z.Block.Func.useFMA(v)
  1745  	// result: (FNMSUBF x y z)
  1746  	for {
  1747  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1748  			z := v_0
  1749  			if v_1.Op != OpLOONG64NEGF {
  1750  				continue
  1751  			}
  1752  			v_1_0 := v_1.Args[0]
  1753  			if v_1_0.Op != OpLOONG64MULF {
  1754  				continue
  1755  			}
  1756  			y := v_1_0.Args[1]
  1757  			x := v_1_0.Args[0]
  1758  			if !(z.Block.Func.useFMA(v)) {
  1759  				continue
  1760  			}
  1761  			v.reset(OpLOONG64FNMSUBF)
  1762  			v.AddArg3(x, y, z)
  1763  			return true
  1764  		}
  1765  		break
  1766  	}
  1767  	return false
  1768  }
  1769  func rewriteValueLOONG64_OpLOONG64ADDV(v *Value) bool {
  1770  	v_1 := v.Args[1]
  1771  	v_0 := v.Args[0]
  1772  	// match: (ADDV x (MOVVconst <t> [c]))
  1773  	// cond: is32Bit(c) && !t.IsPtr()
  1774  	// result: (ADDVconst [c] x)
  1775  	for {
  1776  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1777  			x := v_0
  1778  			if v_1.Op != OpLOONG64MOVVconst {
  1779  				continue
  1780  			}
  1781  			t := v_1.Type
  1782  			c := auxIntToInt64(v_1.AuxInt)
  1783  			if !(is32Bit(c) && !t.IsPtr()) {
  1784  				continue
  1785  			}
  1786  			v.reset(OpLOONG64ADDVconst)
  1787  			v.AuxInt = int64ToAuxInt(c)
  1788  			v.AddArg(x)
  1789  			return true
  1790  		}
  1791  		break
  1792  	}
  1793  	// match: (ADDV x (NEGV y))
  1794  	// result: (SUBV x y)
  1795  	for {
  1796  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1797  			x := v_0
  1798  			if v_1.Op != OpLOONG64NEGV {
  1799  				continue
  1800  			}
  1801  			y := v_1.Args[0]
  1802  			v.reset(OpLOONG64SUBV)
  1803  			v.AddArg2(x, y)
  1804  			return true
  1805  		}
  1806  		break
  1807  	}
  1808  	return false
  1809  }
  1810  func rewriteValueLOONG64_OpLOONG64ADDVconst(v *Value) bool {
  1811  	v_0 := v.Args[0]
  1812  	// match: (ADDVconst [off1] (MOVVaddr [off2] {sym} ptr))
  1813  	// cond: is32Bit(off1+int64(off2))
  1814  	// result: (MOVVaddr [int32(off1)+int32(off2)] {sym} ptr)
  1815  	for {
  1816  		off1 := auxIntToInt64(v.AuxInt)
  1817  		if v_0.Op != OpLOONG64MOVVaddr {
  1818  			break
  1819  		}
  1820  		off2 := auxIntToInt32(v_0.AuxInt)
  1821  		sym := auxToSym(v_0.Aux)
  1822  		ptr := v_0.Args[0]
  1823  		if !(is32Bit(off1 + int64(off2))) {
  1824  			break
  1825  		}
  1826  		v.reset(OpLOONG64MOVVaddr)
  1827  		v.AuxInt = int32ToAuxInt(int32(off1) + int32(off2))
  1828  		v.Aux = symToAux(sym)
  1829  		v.AddArg(ptr)
  1830  		return true
  1831  	}
  1832  	// match: (ADDVconst [0] x)
  1833  	// result: x
  1834  	for {
  1835  		if auxIntToInt64(v.AuxInt) != 0 {
  1836  			break
  1837  		}
  1838  		x := v_0
  1839  		v.copyOf(x)
  1840  		return true
  1841  	}
  1842  	// match: (ADDVconst [c] (MOVVconst [d]))
  1843  	// result: (MOVVconst [c+d])
  1844  	for {
  1845  		c := auxIntToInt64(v.AuxInt)
  1846  		if v_0.Op != OpLOONG64MOVVconst {
  1847  			break
  1848  		}
  1849  		d := auxIntToInt64(v_0.AuxInt)
  1850  		v.reset(OpLOONG64MOVVconst)
  1851  		v.AuxInt = int64ToAuxInt(c + d)
  1852  		return true
  1853  	}
  1854  	// match: (ADDVconst [c] (ADDVconst [d] x))
  1855  	// cond: is32Bit(c+d)
  1856  	// result: (ADDVconst [c+d] x)
  1857  	for {
  1858  		c := auxIntToInt64(v.AuxInt)
  1859  		if v_0.Op != OpLOONG64ADDVconst {
  1860  			break
  1861  		}
  1862  		d := auxIntToInt64(v_0.AuxInt)
  1863  		x := v_0.Args[0]
  1864  		if !(is32Bit(c + d)) {
  1865  			break
  1866  		}
  1867  		v.reset(OpLOONG64ADDVconst)
  1868  		v.AuxInt = int64ToAuxInt(c + d)
  1869  		v.AddArg(x)
  1870  		return true
  1871  	}
  1872  	// match: (ADDVconst [c] (SUBVconst [d] x))
  1873  	// cond: is32Bit(c-d)
  1874  	// result: (ADDVconst [c-d] x)
  1875  	for {
  1876  		c := auxIntToInt64(v.AuxInt)
  1877  		if v_0.Op != OpLOONG64SUBVconst {
  1878  			break
  1879  		}
  1880  		d := auxIntToInt64(v_0.AuxInt)
  1881  		x := v_0.Args[0]
  1882  		if !(is32Bit(c - d)) {
  1883  			break
  1884  		}
  1885  		v.reset(OpLOONG64ADDVconst)
  1886  		v.AuxInt = int64ToAuxInt(c - d)
  1887  		v.AddArg(x)
  1888  		return true
  1889  	}
  1890  	return false
  1891  }
  1892  func rewriteValueLOONG64_OpLOONG64AND(v *Value) bool {
  1893  	v_1 := v.Args[1]
  1894  	v_0 := v.Args[0]
  1895  	// match: (AND x (MOVVconst [c]))
  1896  	// cond: is32Bit(c)
  1897  	// result: (ANDconst [c] x)
  1898  	for {
  1899  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1900  			x := v_0
  1901  			if v_1.Op != OpLOONG64MOVVconst {
  1902  				continue
  1903  			}
  1904  			c := auxIntToInt64(v_1.AuxInt)
  1905  			if !(is32Bit(c)) {
  1906  				continue
  1907  			}
  1908  			v.reset(OpLOONG64ANDconst)
  1909  			v.AuxInt = int64ToAuxInt(c)
  1910  			v.AddArg(x)
  1911  			return true
  1912  		}
  1913  		break
  1914  	}
  1915  	// match: (AND x x)
  1916  	// result: x
  1917  	for {
  1918  		x := v_0
  1919  		if x != v_1 {
  1920  			break
  1921  		}
  1922  		v.copyOf(x)
  1923  		return true
  1924  	}
  1925  	return false
  1926  }
  1927  func rewriteValueLOONG64_OpLOONG64ANDconst(v *Value) bool {
  1928  	v_0 := v.Args[0]
  1929  	// match: (ANDconst [0] _)
  1930  	// result: (MOVVconst [0])
  1931  	for {
  1932  		if auxIntToInt64(v.AuxInt) != 0 {
  1933  			break
  1934  		}
  1935  		v.reset(OpLOONG64MOVVconst)
  1936  		v.AuxInt = int64ToAuxInt(0)
  1937  		return true
  1938  	}
  1939  	// match: (ANDconst [-1] x)
  1940  	// result: x
  1941  	for {
  1942  		if auxIntToInt64(v.AuxInt) != -1 {
  1943  			break
  1944  		}
  1945  		x := v_0
  1946  		v.copyOf(x)
  1947  		return true
  1948  	}
  1949  	// match: (ANDconst [c] (MOVVconst [d]))
  1950  	// result: (MOVVconst [c&d])
  1951  	for {
  1952  		c := auxIntToInt64(v.AuxInt)
  1953  		if v_0.Op != OpLOONG64MOVVconst {
  1954  			break
  1955  		}
  1956  		d := auxIntToInt64(v_0.AuxInt)
  1957  		v.reset(OpLOONG64MOVVconst)
  1958  		v.AuxInt = int64ToAuxInt(c & d)
  1959  		return true
  1960  	}
  1961  	// match: (ANDconst [c] (ANDconst [d] x))
  1962  	// result: (ANDconst [c&d] x)
  1963  	for {
  1964  		c := auxIntToInt64(v.AuxInt)
  1965  		if v_0.Op != OpLOONG64ANDconst {
  1966  			break
  1967  		}
  1968  		d := auxIntToInt64(v_0.AuxInt)
  1969  		x := v_0.Args[0]
  1970  		v.reset(OpLOONG64ANDconst)
  1971  		v.AuxInt = int64ToAuxInt(c & d)
  1972  		v.AddArg(x)
  1973  		return true
  1974  	}
  1975  	return false
  1976  }
  1977  func rewriteValueLOONG64_OpLOONG64DIVV(v *Value) bool {
  1978  	v_1 := v.Args[1]
  1979  	v_0 := v.Args[0]
  1980  	// match: (DIVV (MOVVconst [c]) (MOVVconst [d]))
  1981  	// cond: d != 0
  1982  	// result: (MOVVconst [c/d])
  1983  	for {
  1984  		if v_0.Op != OpLOONG64MOVVconst {
  1985  			break
  1986  		}
  1987  		c := auxIntToInt64(v_0.AuxInt)
  1988  		if v_1.Op != OpLOONG64MOVVconst {
  1989  			break
  1990  		}
  1991  		d := auxIntToInt64(v_1.AuxInt)
  1992  		if !(d != 0) {
  1993  			break
  1994  		}
  1995  		v.reset(OpLOONG64MOVVconst)
  1996  		v.AuxInt = int64ToAuxInt(c / d)
  1997  		return true
  1998  	}
  1999  	return false
  2000  }
  2001  func rewriteValueLOONG64_OpLOONG64DIVVU(v *Value) bool {
  2002  	v_1 := v.Args[1]
  2003  	v_0 := v.Args[0]
  2004  	// match: (DIVVU x (MOVVconst [1]))
  2005  	// result: x
  2006  	for {
  2007  		x := v_0
  2008  		if v_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_1.AuxInt) != 1 {
  2009  			break
  2010  		}
  2011  		v.copyOf(x)
  2012  		return true
  2013  	}
  2014  	// match: (DIVVU x (MOVVconst [c]))
  2015  	// cond: isPowerOfTwo(c)
  2016  	// result: (SRLVconst [log64(c)] x)
  2017  	for {
  2018  		x := v_0
  2019  		if v_1.Op != OpLOONG64MOVVconst {
  2020  			break
  2021  		}
  2022  		c := auxIntToInt64(v_1.AuxInt)
  2023  		if !(isPowerOfTwo(c)) {
  2024  			break
  2025  		}
  2026  		v.reset(OpLOONG64SRLVconst)
  2027  		v.AuxInt = int64ToAuxInt(log64(c))
  2028  		v.AddArg(x)
  2029  		return true
  2030  	}
  2031  	// match: (DIVVU (MOVVconst [c]) (MOVVconst [d]))
  2032  	// cond: d != 0
  2033  	// result: (MOVVconst [int64(uint64(c)/uint64(d))])
  2034  	for {
  2035  		if v_0.Op != OpLOONG64MOVVconst {
  2036  			break
  2037  		}
  2038  		c := auxIntToInt64(v_0.AuxInt)
  2039  		if v_1.Op != OpLOONG64MOVVconst {
  2040  			break
  2041  		}
  2042  		d := auxIntToInt64(v_1.AuxInt)
  2043  		if !(d != 0) {
  2044  			break
  2045  		}
  2046  		v.reset(OpLOONG64MOVVconst)
  2047  		v.AuxInt = int64ToAuxInt(int64(uint64(c) / uint64(d)))
  2048  		return true
  2049  	}
  2050  	return false
  2051  }
  2052  func rewriteValueLOONG64_OpLOONG64MASKEQZ(v *Value) bool {
  2053  	v_1 := v.Args[1]
  2054  	v_0 := v.Args[0]
  2055  	// match: (MASKEQZ (MOVVconst [0]) cond)
  2056  	// result: (MOVVconst [0])
  2057  	for {
  2058  		if v_0.Op != OpLOONG64MOVVconst || auxIntToInt64(v_0.AuxInt) != 0 {
  2059  			break
  2060  		}
  2061  		v.reset(OpLOONG64MOVVconst)
  2062  		v.AuxInt = int64ToAuxInt(0)
  2063  		return true
  2064  	}
  2065  	// match: (MASKEQZ x (MOVVconst [c]))
  2066  	// cond: c == 0
  2067  	// result: (MOVVconst [0])
  2068  	for {
  2069  		if v_1.Op != OpLOONG64MOVVconst {
  2070  			break
  2071  		}
  2072  		c := auxIntToInt64(v_1.AuxInt)
  2073  		if !(c == 0) {
  2074  			break
  2075  		}
  2076  		v.reset(OpLOONG64MOVVconst)
  2077  		v.AuxInt = int64ToAuxInt(0)
  2078  		return true
  2079  	}
  2080  	// match: (MASKEQZ x (MOVVconst [c]))
  2081  	// cond: c != 0
  2082  	// result: x
  2083  	for {
  2084  		x := v_0
  2085  		if v_1.Op != OpLOONG64MOVVconst {
  2086  			break
  2087  		}
  2088  		c := auxIntToInt64(v_1.AuxInt)
  2089  		if !(c != 0) {
  2090  			break
  2091  		}
  2092  		v.copyOf(x)
  2093  		return true
  2094  	}
  2095  	return false
  2096  }
  2097  func rewriteValueLOONG64_OpLOONG64MASKNEZ(v *Value) bool {
  2098  	v_0 := v.Args[0]
  2099  	// match: (MASKNEZ (MOVVconst [0]) cond)
  2100  	// result: (MOVVconst [0])
  2101  	for {
  2102  		if v_0.Op != OpLOONG64MOVVconst || auxIntToInt64(v_0.AuxInt) != 0 {
  2103  			break
  2104  		}
  2105  		v.reset(OpLOONG64MOVVconst)
  2106  		v.AuxInt = int64ToAuxInt(0)
  2107  		return true
  2108  	}
  2109  	return false
  2110  }
  2111  func rewriteValueLOONG64_OpLOONG64MOVBUload(v *Value) bool {
  2112  	v_1 := v.Args[1]
  2113  	v_0 := v.Args[0]
  2114  	b := v.Block
  2115  	config := b.Func.Config
  2116  	// match: (MOVBUload [off1] {sym} (ADDVconst [off2] ptr) mem)
  2117  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  2118  	// result: (MOVBUload [off1+int32(off2)] {sym} ptr mem)
  2119  	for {
  2120  		off1 := auxIntToInt32(v.AuxInt)
  2121  		sym := auxToSym(v.Aux)
  2122  		if v_0.Op != OpLOONG64ADDVconst {
  2123  			break
  2124  		}
  2125  		off2 := auxIntToInt64(v_0.AuxInt)
  2126  		ptr := v_0.Args[0]
  2127  		mem := v_1
  2128  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  2129  			break
  2130  		}
  2131  		v.reset(OpLOONG64MOVBUload)
  2132  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2133  		v.Aux = symToAux(sym)
  2134  		v.AddArg2(ptr, mem)
  2135  		return true
  2136  	}
  2137  	// match: (MOVBUload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  2138  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  2139  	// result: (MOVBUload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  2140  	for {
  2141  		off1 := auxIntToInt32(v.AuxInt)
  2142  		sym1 := auxToSym(v.Aux)
  2143  		if v_0.Op != OpLOONG64MOVVaddr {
  2144  			break
  2145  		}
  2146  		off2 := auxIntToInt32(v_0.AuxInt)
  2147  		sym2 := auxToSym(v_0.Aux)
  2148  		ptr := v_0.Args[0]
  2149  		mem := v_1
  2150  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  2151  			break
  2152  		}
  2153  		v.reset(OpLOONG64MOVBUload)
  2154  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2155  		v.Aux = symToAux(mergeSym(sym1, sym2))
  2156  		v.AddArg2(ptr, mem)
  2157  		return true
  2158  	}
  2159  	// match: (MOVBUload [off] {sym} (ADDV ptr idx) mem)
  2160  	// cond: off == 0 && sym == nil
  2161  	// result: (MOVBUloadidx ptr idx mem)
  2162  	for {
  2163  		off := auxIntToInt32(v.AuxInt)
  2164  		sym := auxToSym(v.Aux)
  2165  		if v_0.Op != OpLOONG64ADDV {
  2166  			break
  2167  		}
  2168  		idx := v_0.Args[1]
  2169  		ptr := v_0.Args[0]
  2170  		mem := v_1
  2171  		if !(off == 0 && sym == nil) {
  2172  			break
  2173  		}
  2174  		v.reset(OpLOONG64MOVBUloadidx)
  2175  		v.AddArg3(ptr, idx, mem)
  2176  		return true
  2177  	}
  2178  	return false
  2179  }
  2180  func rewriteValueLOONG64_OpLOONG64MOVBUloadidx(v *Value) bool {
  2181  	v_2 := v.Args[2]
  2182  	v_1 := v.Args[1]
  2183  	v_0 := v.Args[0]
  2184  	// match: (MOVBUloadidx ptr (MOVVconst [c]) mem)
  2185  	// cond: is32Bit(c)
  2186  	// result: (MOVBUload [int32(c)] ptr mem)
  2187  	for {
  2188  		ptr := v_0
  2189  		if v_1.Op != OpLOONG64MOVVconst {
  2190  			break
  2191  		}
  2192  		c := auxIntToInt64(v_1.AuxInt)
  2193  		mem := v_2
  2194  		if !(is32Bit(c)) {
  2195  			break
  2196  		}
  2197  		v.reset(OpLOONG64MOVBUload)
  2198  		v.AuxInt = int32ToAuxInt(int32(c))
  2199  		v.AddArg2(ptr, mem)
  2200  		return true
  2201  	}
  2202  	// match: (MOVBUloadidx (MOVVconst [c]) ptr mem)
  2203  	// cond: is32Bit(c)
  2204  	// result: (MOVBUload [int32(c)] ptr mem)
  2205  	for {
  2206  		if v_0.Op != OpLOONG64MOVVconst {
  2207  			break
  2208  		}
  2209  		c := auxIntToInt64(v_0.AuxInt)
  2210  		ptr := v_1
  2211  		mem := v_2
  2212  		if !(is32Bit(c)) {
  2213  			break
  2214  		}
  2215  		v.reset(OpLOONG64MOVBUload)
  2216  		v.AuxInt = int32ToAuxInt(int32(c))
  2217  		v.AddArg2(ptr, mem)
  2218  		return true
  2219  	}
  2220  	return false
  2221  }
  2222  func rewriteValueLOONG64_OpLOONG64MOVBUreg(v *Value) bool {
  2223  	v_0 := v.Args[0]
  2224  	// match: (MOVBUreg (SRLVconst [rc] x))
  2225  	// cond: rc < 8
  2226  	// result: (BSTRPICKV [rc + (7+rc)<<6] x)
  2227  	for {
  2228  		if v_0.Op != OpLOONG64SRLVconst {
  2229  			break
  2230  		}
  2231  		rc := auxIntToInt64(v_0.AuxInt)
  2232  		x := v_0.Args[0]
  2233  		if !(rc < 8) {
  2234  			break
  2235  		}
  2236  		v.reset(OpLOONG64BSTRPICKV)
  2237  		v.AuxInt = int64ToAuxInt(rc + (7+rc)<<6)
  2238  		v.AddArg(x)
  2239  		return true
  2240  	}
  2241  	// match: (MOVBUreg x:(SGT _ _))
  2242  	// result: x
  2243  	for {
  2244  		x := v_0
  2245  		if x.Op != OpLOONG64SGT {
  2246  			break
  2247  		}
  2248  		v.copyOf(x)
  2249  		return true
  2250  	}
  2251  	// match: (MOVBUreg x:(SGTU _ _))
  2252  	// result: x
  2253  	for {
  2254  		x := v_0
  2255  		if x.Op != OpLOONG64SGTU {
  2256  			break
  2257  		}
  2258  		v.copyOf(x)
  2259  		return true
  2260  	}
  2261  	// match: (MOVBUreg x:(XOR (MOVVconst [1]) (SGT _ _)))
  2262  	// result: x
  2263  	for {
  2264  		x := v_0
  2265  		if x.Op != OpLOONG64XOR {
  2266  			break
  2267  		}
  2268  		_ = x.Args[1]
  2269  		x_0 := x.Args[0]
  2270  		x_1 := x.Args[1]
  2271  		for _i0 := 0; _i0 <= 1; _i0, x_0, x_1 = _i0+1, x_1, x_0 {
  2272  			if x_0.Op != OpLOONG64MOVVconst || auxIntToInt64(x_0.AuxInt) != 1 || x_1.Op != OpLOONG64SGT {
  2273  				continue
  2274  			}
  2275  			v.copyOf(x)
  2276  			return true
  2277  		}
  2278  		break
  2279  	}
  2280  	// match: (MOVBUreg x:(XOR (MOVVconst [1]) (SGTU _ _)))
  2281  	// result: x
  2282  	for {
  2283  		x := v_0
  2284  		if x.Op != OpLOONG64XOR {
  2285  			break
  2286  		}
  2287  		_ = x.Args[1]
  2288  		x_0 := x.Args[0]
  2289  		x_1 := x.Args[1]
  2290  		for _i0 := 0; _i0 <= 1; _i0, x_0, x_1 = _i0+1, x_1, x_0 {
  2291  			if x_0.Op != OpLOONG64MOVVconst || auxIntToInt64(x_0.AuxInt) != 1 || x_1.Op != OpLOONG64SGTU {
  2292  				continue
  2293  			}
  2294  			v.copyOf(x)
  2295  			return true
  2296  		}
  2297  		break
  2298  	}
  2299  	// match: (MOVBUreg x:(MOVBUload _ _))
  2300  	// result: (MOVVreg x)
  2301  	for {
  2302  		x := v_0
  2303  		if x.Op != OpLOONG64MOVBUload {
  2304  			break
  2305  		}
  2306  		v.reset(OpLOONG64MOVVreg)
  2307  		v.AddArg(x)
  2308  		return true
  2309  	}
  2310  	// match: (MOVBUreg x:(MOVBUreg _))
  2311  	// result: (MOVVreg x)
  2312  	for {
  2313  		x := v_0
  2314  		if x.Op != OpLOONG64MOVBUreg {
  2315  			break
  2316  		}
  2317  		v.reset(OpLOONG64MOVVreg)
  2318  		v.AddArg(x)
  2319  		return true
  2320  	}
  2321  	// match: (MOVBUreg (SLLVconst [lc] x))
  2322  	// cond: lc >= 8
  2323  	// result: (MOVVconst [0])
  2324  	for {
  2325  		if v_0.Op != OpLOONG64SLLVconst {
  2326  			break
  2327  		}
  2328  		lc := auxIntToInt64(v_0.AuxInt)
  2329  		if !(lc >= 8) {
  2330  			break
  2331  		}
  2332  		v.reset(OpLOONG64MOVVconst)
  2333  		v.AuxInt = int64ToAuxInt(0)
  2334  		return true
  2335  	}
  2336  	// match: (MOVBUreg (MOVVconst [c]))
  2337  	// result: (MOVVconst [int64(uint8(c))])
  2338  	for {
  2339  		if v_0.Op != OpLOONG64MOVVconst {
  2340  			break
  2341  		}
  2342  		c := auxIntToInt64(v_0.AuxInt)
  2343  		v.reset(OpLOONG64MOVVconst)
  2344  		v.AuxInt = int64ToAuxInt(int64(uint8(c)))
  2345  		return true
  2346  	}
  2347  	// match: (MOVBUreg (ANDconst [c] x))
  2348  	// result: (ANDconst [c&0xff] x)
  2349  	for {
  2350  		if v_0.Op != OpLOONG64ANDconst {
  2351  			break
  2352  		}
  2353  		c := auxIntToInt64(v_0.AuxInt)
  2354  		x := v_0.Args[0]
  2355  		v.reset(OpLOONG64ANDconst)
  2356  		v.AuxInt = int64ToAuxInt(c & 0xff)
  2357  		v.AddArg(x)
  2358  		return true
  2359  	}
  2360  	return false
  2361  }
  2362  func rewriteValueLOONG64_OpLOONG64MOVBload(v *Value) bool {
  2363  	v_1 := v.Args[1]
  2364  	v_0 := v.Args[0]
  2365  	b := v.Block
  2366  	config := b.Func.Config
  2367  	// match: (MOVBload [off1] {sym} (ADDVconst [off2] ptr) mem)
  2368  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  2369  	// result: (MOVBload [off1+int32(off2)] {sym} ptr mem)
  2370  	for {
  2371  		off1 := auxIntToInt32(v.AuxInt)
  2372  		sym := auxToSym(v.Aux)
  2373  		if v_0.Op != OpLOONG64ADDVconst {
  2374  			break
  2375  		}
  2376  		off2 := auxIntToInt64(v_0.AuxInt)
  2377  		ptr := v_0.Args[0]
  2378  		mem := v_1
  2379  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  2380  			break
  2381  		}
  2382  		v.reset(OpLOONG64MOVBload)
  2383  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2384  		v.Aux = symToAux(sym)
  2385  		v.AddArg2(ptr, mem)
  2386  		return true
  2387  	}
  2388  	// match: (MOVBload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  2389  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  2390  	// result: (MOVBload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  2391  	for {
  2392  		off1 := auxIntToInt32(v.AuxInt)
  2393  		sym1 := auxToSym(v.Aux)
  2394  		if v_0.Op != OpLOONG64MOVVaddr {
  2395  			break
  2396  		}
  2397  		off2 := auxIntToInt32(v_0.AuxInt)
  2398  		sym2 := auxToSym(v_0.Aux)
  2399  		ptr := v_0.Args[0]
  2400  		mem := v_1
  2401  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  2402  			break
  2403  		}
  2404  		v.reset(OpLOONG64MOVBload)
  2405  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2406  		v.Aux = symToAux(mergeSym(sym1, sym2))
  2407  		v.AddArg2(ptr, mem)
  2408  		return true
  2409  	}
  2410  	// match: (MOVBload [off] {sym} (ADDV ptr idx) mem)
  2411  	// cond: off == 0 && sym == nil
  2412  	// result: (MOVBloadidx ptr idx mem)
  2413  	for {
  2414  		off := auxIntToInt32(v.AuxInt)
  2415  		sym := auxToSym(v.Aux)
  2416  		if v_0.Op != OpLOONG64ADDV {
  2417  			break
  2418  		}
  2419  		idx := v_0.Args[1]
  2420  		ptr := v_0.Args[0]
  2421  		mem := v_1
  2422  		if !(off == 0 && sym == nil) {
  2423  			break
  2424  		}
  2425  		v.reset(OpLOONG64MOVBloadidx)
  2426  		v.AddArg3(ptr, idx, mem)
  2427  		return true
  2428  	}
  2429  	return false
  2430  }
  2431  func rewriteValueLOONG64_OpLOONG64MOVBloadidx(v *Value) bool {
  2432  	v_2 := v.Args[2]
  2433  	v_1 := v.Args[1]
  2434  	v_0 := v.Args[0]
  2435  	// match: (MOVBloadidx ptr (MOVVconst [c]) mem)
  2436  	// cond: is32Bit(c)
  2437  	// result: (MOVBload [int32(c)] ptr mem)
  2438  	for {
  2439  		ptr := v_0
  2440  		if v_1.Op != OpLOONG64MOVVconst {
  2441  			break
  2442  		}
  2443  		c := auxIntToInt64(v_1.AuxInt)
  2444  		mem := v_2
  2445  		if !(is32Bit(c)) {
  2446  			break
  2447  		}
  2448  		v.reset(OpLOONG64MOVBload)
  2449  		v.AuxInt = int32ToAuxInt(int32(c))
  2450  		v.AddArg2(ptr, mem)
  2451  		return true
  2452  	}
  2453  	// match: (MOVBloadidx (MOVVconst [c]) ptr mem)
  2454  	// cond: is32Bit(c)
  2455  	// result: (MOVBload [int32(c)] ptr mem)
  2456  	for {
  2457  		if v_0.Op != OpLOONG64MOVVconst {
  2458  			break
  2459  		}
  2460  		c := auxIntToInt64(v_0.AuxInt)
  2461  		ptr := v_1
  2462  		mem := v_2
  2463  		if !(is32Bit(c)) {
  2464  			break
  2465  		}
  2466  		v.reset(OpLOONG64MOVBload)
  2467  		v.AuxInt = int32ToAuxInt(int32(c))
  2468  		v.AddArg2(ptr, mem)
  2469  		return true
  2470  	}
  2471  	return false
  2472  }
  2473  func rewriteValueLOONG64_OpLOONG64MOVBreg(v *Value) bool {
  2474  	v_0 := v.Args[0]
  2475  	// match: (MOVBreg x:(MOVBload _ _))
  2476  	// result: (MOVVreg x)
  2477  	for {
  2478  		x := v_0
  2479  		if x.Op != OpLOONG64MOVBload {
  2480  			break
  2481  		}
  2482  		v.reset(OpLOONG64MOVVreg)
  2483  		v.AddArg(x)
  2484  		return true
  2485  	}
  2486  	// match: (MOVBreg x:(MOVBreg _))
  2487  	// result: (MOVVreg x)
  2488  	for {
  2489  		x := v_0
  2490  		if x.Op != OpLOONG64MOVBreg {
  2491  			break
  2492  		}
  2493  		v.reset(OpLOONG64MOVVreg)
  2494  		v.AddArg(x)
  2495  		return true
  2496  	}
  2497  	// match: (MOVBreg (MOVVconst [c]))
  2498  	// result: (MOVVconst [int64(int8(c))])
  2499  	for {
  2500  		if v_0.Op != OpLOONG64MOVVconst {
  2501  			break
  2502  		}
  2503  		c := auxIntToInt64(v_0.AuxInt)
  2504  		v.reset(OpLOONG64MOVVconst)
  2505  		v.AuxInt = int64ToAuxInt(int64(int8(c)))
  2506  		return true
  2507  	}
  2508  	return false
  2509  }
  2510  func rewriteValueLOONG64_OpLOONG64MOVBstore(v *Value) bool {
  2511  	v_2 := v.Args[2]
  2512  	v_1 := v.Args[1]
  2513  	v_0 := v.Args[0]
  2514  	b := v.Block
  2515  	config := b.Func.Config
  2516  	// match: (MOVBstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  2517  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  2518  	// result: (MOVBstore [off1+int32(off2)] {sym} ptr val mem)
  2519  	for {
  2520  		off1 := auxIntToInt32(v.AuxInt)
  2521  		sym := auxToSym(v.Aux)
  2522  		if v_0.Op != OpLOONG64ADDVconst {
  2523  			break
  2524  		}
  2525  		off2 := auxIntToInt64(v_0.AuxInt)
  2526  		ptr := v_0.Args[0]
  2527  		val := v_1
  2528  		mem := v_2
  2529  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  2530  			break
  2531  		}
  2532  		v.reset(OpLOONG64MOVBstore)
  2533  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2534  		v.Aux = symToAux(sym)
  2535  		v.AddArg3(ptr, val, mem)
  2536  		return true
  2537  	}
  2538  	// match: (MOVBstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  2539  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  2540  	// result: (MOVBstore [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr val mem)
  2541  	for {
  2542  		off1 := auxIntToInt32(v.AuxInt)
  2543  		sym1 := auxToSym(v.Aux)
  2544  		if v_0.Op != OpLOONG64MOVVaddr {
  2545  			break
  2546  		}
  2547  		off2 := auxIntToInt32(v_0.AuxInt)
  2548  		sym2 := auxToSym(v_0.Aux)
  2549  		ptr := v_0.Args[0]
  2550  		val := v_1
  2551  		mem := v_2
  2552  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  2553  			break
  2554  		}
  2555  		v.reset(OpLOONG64MOVBstore)
  2556  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2557  		v.Aux = symToAux(mergeSym(sym1, sym2))
  2558  		v.AddArg3(ptr, val, mem)
  2559  		return true
  2560  	}
  2561  	// match: (MOVBstore [off] {sym} ptr (MOVBreg x) mem)
  2562  	// result: (MOVBstore [off] {sym} ptr x mem)
  2563  	for {
  2564  		off := auxIntToInt32(v.AuxInt)
  2565  		sym := auxToSym(v.Aux)
  2566  		ptr := v_0
  2567  		if v_1.Op != OpLOONG64MOVBreg {
  2568  			break
  2569  		}
  2570  		x := v_1.Args[0]
  2571  		mem := v_2
  2572  		v.reset(OpLOONG64MOVBstore)
  2573  		v.AuxInt = int32ToAuxInt(off)
  2574  		v.Aux = symToAux(sym)
  2575  		v.AddArg3(ptr, x, mem)
  2576  		return true
  2577  	}
  2578  	// match: (MOVBstore [off] {sym} ptr (MOVBUreg x) mem)
  2579  	// result: (MOVBstore [off] {sym} ptr x mem)
  2580  	for {
  2581  		off := auxIntToInt32(v.AuxInt)
  2582  		sym := auxToSym(v.Aux)
  2583  		ptr := v_0
  2584  		if v_1.Op != OpLOONG64MOVBUreg {
  2585  			break
  2586  		}
  2587  		x := v_1.Args[0]
  2588  		mem := v_2
  2589  		v.reset(OpLOONG64MOVBstore)
  2590  		v.AuxInt = int32ToAuxInt(off)
  2591  		v.Aux = symToAux(sym)
  2592  		v.AddArg3(ptr, x, mem)
  2593  		return true
  2594  	}
  2595  	// match: (MOVBstore [off] {sym} ptr (MOVHreg x) mem)
  2596  	// result: (MOVBstore [off] {sym} ptr x mem)
  2597  	for {
  2598  		off := auxIntToInt32(v.AuxInt)
  2599  		sym := auxToSym(v.Aux)
  2600  		ptr := v_0
  2601  		if v_1.Op != OpLOONG64MOVHreg {
  2602  			break
  2603  		}
  2604  		x := v_1.Args[0]
  2605  		mem := v_2
  2606  		v.reset(OpLOONG64MOVBstore)
  2607  		v.AuxInt = int32ToAuxInt(off)
  2608  		v.Aux = symToAux(sym)
  2609  		v.AddArg3(ptr, x, mem)
  2610  		return true
  2611  	}
  2612  	// match: (MOVBstore [off] {sym} ptr (MOVHUreg x) mem)
  2613  	// result: (MOVBstore [off] {sym} ptr x mem)
  2614  	for {
  2615  		off := auxIntToInt32(v.AuxInt)
  2616  		sym := auxToSym(v.Aux)
  2617  		ptr := v_0
  2618  		if v_1.Op != OpLOONG64MOVHUreg {
  2619  			break
  2620  		}
  2621  		x := v_1.Args[0]
  2622  		mem := v_2
  2623  		v.reset(OpLOONG64MOVBstore)
  2624  		v.AuxInt = int32ToAuxInt(off)
  2625  		v.Aux = symToAux(sym)
  2626  		v.AddArg3(ptr, x, mem)
  2627  		return true
  2628  	}
  2629  	// match: (MOVBstore [off] {sym} ptr (MOVWreg x) mem)
  2630  	// result: (MOVBstore [off] {sym} ptr x mem)
  2631  	for {
  2632  		off := auxIntToInt32(v.AuxInt)
  2633  		sym := auxToSym(v.Aux)
  2634  		ptr := v_0
  2635  		if v_1.Op != OpLOONG64MOVWreg {
  2636  			break
  2637  		}
  2638  		x := v_1.Args[0]
  2639  		mem := v_2
  2640  		v.reset(OpLOONG64MOVBstore)
  2641  		v.AuxInt = int32ToAuxInt(off)
  2642  		v.Aux = symToAux(sym)
  2643  		v.AddArg3(ptr, x, mem)
  2644  		return true
  2645  	}
  2646  	// match: (MOVBstore [off] {sym} ptr (MOVWUreg x) mem)
  2647  	// result: (MOVBstore [off] {sym} ptr x mem)
  2648  	for {
  2649  		off := auxIntToInt32(v.AuxInt)
  2650  		sym := auxToSym(v.Aux)
  2651  		ptr := v_0
  2652  		if v_1.Op != OpLOONG64MOVWUreg {
  2653  			break
  2654  		}
  2655  		x := v_1.Args[0]
  2656  		mem := v_2
  2657  		v.reset(OpLOONG64MOVBstore)
  2658  		v.AuxInt = int32ToAuxInt(off)
  2659  		v.Aux = symToAux(sym)
  2660  		v.AddArg3(ptr, x, mem)
  2661  		return true
  2662  	}
  2663  	// match: (MOVBstore [off] {sym} ptr (MOVVconst [0]) mem)
  2664  	// result: (MOVBstorezero [off] {sym} ptr mem)
  2665  	for {
  2666  		off := auxIntToInt32(v.AuxInt)
  2667  		sym := auxToSym(v.Aux)
  2668  		ptr := v_0
  2669  		if v_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
  2670  			break
  2671  		}
  2672  		mem := v_2
  2673  		v.reset(OpLOONG64MOVBstorezero)
  2674  		v.AuxInt = int32ToAuxInt(off)
  2675  		v.Aux = symToAux(sym)
  2676  		v.AddArg2(ptr, mem)
  2677  		return true
  2678  	}
  2679  	// match: (MOVBstore [off] {sym} (ADDV ptr idx) val mem)
  2680  	// cond: off == 0 && sym == nil
  2681  	// result: (MOVBstoreidx ptr idx val mem)
  2682  	for {
  2683  		off := auxIntToInt32(v.AuxInt)
  2684  		sym := auxToSym(v.Aux)
  2685  		if v_0.Op != OpLOONG64ADDV {
  2686  			break
  2687  		}
  2688  		idx := v_0.Args[1]
  2689  		ptr := v_0.Args[0]
  2690  		val := v_1
  2691  		mem := v_2
  2692  		if !(off == 0 && sym == nil) {
  2693  			break
  2694  		}
  2695  		v.reset(OpLOONG64MOVBstoreidx)
  2696  		v.AddArg4(ptr, idx, val, mem)
  2697  		return true
  2698  	}
  2699  	return false
  2700  }
  2701  func rewriteValueLOONG64_OpLOONG64MOVBstoreidx(v *Value) bool {
  2702  	v_3 := v.Args[3]
  2703  	v_2 := v.Args[2]
  2704  	v_1 := v.Args[1]
  2705  	v_0 := v.Args[0]
  2706  	// match: (MOVBstoreidx ptr (MOVVconst [c]) val mem)
  2707  	// cond: is32Bit(c)
  2708  	// result: (MOVBstore [int32(c)] ptr val mem)
  2709  	for {
  2710  		ptr := v_0
  2711  		if v_1.Op != OpLOONG64MOVVconst {
  2712  			break
  2713  		}
  2714  		c := auxIntToInt64(v_1.AuxInt)
  2715  		val := v_2
  2716  		mem := v_3
  2717  		if !(is32Bit(c)) {
  2718  			break
  2719  		}
  2720  		v.reset(OpLOONG64MOVBstore)
  2721  		v.AuxInt = int32ToAuxInt(int32(c))
  2722  		v.AddArg3(ptr, val, mem)
  2723  		return true
  2724  	}
  2725  	// match: (MOVBstoreidx (MOVVconst [c]) idx val mem)
  2726  	// cond: is32Bit(c)
  2727  	// result: (MOVBstore [int32(c)] idx val mem)
  2728  	for {
  2729  		if v_0.Op != OpLOONG64MOVVconst {
  2730  			break
  2731  		}
  2732  		c := auxIntToInt64(v_0.AuxInt)
  2733  		idx := v_1
  2734  		val := v_2
  2735  		mem := v_3
  2736  		if !(is32Bit(c)) {
  2737  			break
  2738  		}
  2739  		v.reset(OpLOONG64MOVBstore)
  2740  		v.AuxInt = int32ToAuxInt(int32(c))
  2741  		v.AddArg3(idx, val, mem)
  2742  		return true
  2743  	}
  2744  	// match: (MOVBstoreidx ptr idx (MOVVconst [0]) mem)
  2745  	// result: (MOVBstorezeroidx ptr idx mem)
  2746  	for {
  2747  		ptr := v_0
  2748  		idx := v_1
  2749  		if v_2.Op != OpLOONG64MOVVconst || auxIntToInt64(v_2.AuxInt) != 0 {
  2750  			break
  2751  		}
  2752  		mem := v_3
  2753  		v.reset(OpLOONG64MOVBstorezeroidx)
  2754  		v.AddArg3(ptr, idx, mem)
  2755  		return true
  2756  	}
  2757  	return false
  2758  }
  2759  func rewriteValueLOONG64_OpLOONG64MOVBstorezero(v *Value) bool {
  2760  	v_1 := v.Args[1]
  2761  	v_0 := v.Args[0]
  2762  	b := v.Block
  2763  	config := b.Func.Config
  2764  	// match: (MOVBstorezero [off1] {sym} (ADDVconst [off2] ptr) mem)
  2765  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  2766  	// result: (MOVBstorezero [off1+int32(off2)] {sym} ptr mem)
  2767  	for {
  2768  		off1 := auxIntToInt32(v.AuxInt)
  2769  		sym := auxToSym(v.Aux)
  2770  		if v_0.Op != OpLOONG64ADDVconst {
  2771  			break
  2772  		}
  2773  		off2 := auxIntToInt64(v_0.AuxInt)
  2774  		ptr := v_0.Args[0]
  2775  		mem := v_1
  2776  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  2777  			break
  2778  		}
  2779  		v.reset(OpLOONG64MOVBstorezero)
  2780  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2781  		v.Aux = symToAux(sym)
  2782  		v.AddArg2(ptr, mem)
  2783  		return true
  2784  	}
  2785  	// match: (MOVBstorezero [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  2786  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  2787  	// result: (MOVBstorezero [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  2788  	for {
  2789  		off1 := auxIntToInt32(v.AuxInt)
  2790  		sym1 := auxToSym(v.Aux)
  2791  		if v_0.Op != OpLOONG64MOVVaddr {
  2792  			break
  2793  		}
  2794  		off2 := auxIntToInt32(v_0.AuxInt)
  2795  		sym2 := auxToSym(v_0.Aux)
  2796  		ptr := v_0.Args[0]
  2797  		mem := v_1
  2798  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  2799  			break
  2800  		}
  2801  		v.reset(OpLOONG64MOVBstorezero)
  2802  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2803  		v.Aux = symToAux(mergeSym(sym1, sym2))
  2804  		v.AddArg2(ptr, mem)
  2805  		return true
  2806  	}
  2807  	// match: (MOVBstorezero [off] {sym} (ADDV ptr idx) mem)
  2808  	// cond: off == 0 && sym == nil
  2809  	// result: (MOVBstorezeroidx ptr idx mem)
  2810  	for {
  2811  		off := auxIntToInt32(v.AuxInt)
  2812  		sym := auxToSym(v.Aux)
  2813  		if v_0.Op != OpLOONG64ADDV {
  2814  			break
  2815  		}
  2816  		idx := v_0.Args[1]
  2817  		ptr := v_0.Args[0]
  2818  		mem := v_1
  2819  		if !(off == 0 && sym == nil) {
  2820  			break
  2821  		}
  2822  		v.reset(OpLOONG64MOVBstorezeroidx)
  2823  		v.AddArg3(ptr, idx, mem)
  2824  		return true
  2825  	}
  2826  	return false
  2827  }
  2828  func rewriteValueLOONG64_OpLOONG64MOVBstorezeroidx(v *Value) bool {
  2829  	v_2 := v.Args[2]
  2830  	v_1 := v.Args[1]
  2831  	v_0 := v.Args[0]
  2832  	// match: (MOVBstorezeroidx ptr (MOVVconst [c]) mem)
  2833  	// cond: is32Bit(c)
  2834  	// result: (MOVBstorezero [int32(c)] ptr mem)
  2835  	for {
  2836  		ptr := v_0
  2837  		if v_1.Op != OpLOONG64MOVVconst {
  2838  			break
  2839  		}
  2840  		c := auxIntToInt64(v_1.AuxInt)
  2841  		mem := v_2
  2842  		if !(is32Bit(c)) {
  2843  			break
  2844  		}
  2845  		v.reset(OpLOONG64MOVBstorezero)
  2846  		v.AuxInt = int32ToAuxInt(int32(c))
  2847  		v.AddArg2(ptr, mem)
  2848  		return true
  2849  	}
  2850  	// match: (MOVBstorezeroidx (MOVVconst [c]) idx mem)
  2851  	// cond: is32Bit(c)
  2852  	// result: (MOVBstorezero [int32(c)] idx mem)
  2853  	for {
  2854  		if v_0.Op != OpLOONG64MOVVconst {
  2855  			break
  2856  		}
  2857  		c := auxIntToInt64(v_0.AuxInt)
  2858  		idx := v_1
  2859  		mem := v_2
  2860  		if !(is32Bit(c)) {
  2861  			break
  2862  		}
  2863  		v.reset(OpLOONG64MOVBstorezero)
  2864  		v.AuxInt = int32ToAuxInt(int32(c))
  2865  		v.AddArg2(idx, mem)
  2866  		return true
  2867  	}
  2868  	return false
  2869  }
  2870  func rewriteValueLOONG64_OpLOONG64MOVDload(v *Value) bool {
  2871  	v_1 := v.Args[1]
  2872  	v_0 := v.Args[0]
  2873  	b := v.Block
  2874  	config := b.Func.Config
  2875  	// match: (MOVDload [off] {sym} ptr (MOVVstore [off] {sym} ptr val _))
  2876  	// result: (MOVVgpfp val)
  2877  	for {
  2878  		off := auxIntToInt32(v.AuxInt)
  2879  		sym := auxToSym(v.Aux)
  2880  		ptr := v_0
  2881  		if v_1.Op != OpLOONG64MOVVstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  2882  			break
  2883  		}
  2884  		val := v_1.Args[1]
  2885  		if ptr != v_1.Args[0] {
  2886  			break
  2887  		}
  2888  		v.reset(OpLOONG64MOVVgpfp)
  2889  		v.AddArg(val)
  2890  		return true
  2891  	}
  2892  	// match: (MOVDload [off1] {sym} (ADDVconst [off2] ptr) mem)
  2893  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  2894  	// result: (MOVDload [off1+int32(off2)] {sym} ptr mem)
  2895  	for {
  2896  		off1 := auxIntToInt32(v.AuxInt)
  2897  		sym := auxToSym(v.Aux)
  2898  		if v_0.Op != OpLOONG64ADDVconst {
  2899  			break
  2900  		}
  2901  		off2 := auxIntToInt64(v_0.AuxInt)
  2902  		ptr := v_0.Args[0]
  2903  		mem := v_1
  2904  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  2905  			break
  2906  		}
  2907  		v.reset(OpLOONG64MOVDload)
  2908  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2909  		v.Aux = symToAux(sym)
  2910  		v.AddArg2(ptr, mem)
  2911  		return true
  2912  	}
  2913  	// match: (MOVDload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  2914  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  2915  	// result: (MOVDload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  2916  	for {
  2917  		off1 := auxIntToInt32(v.AuxInt)
  2918  		sym1 := auxToSym(v.Aux)
  2919  		if v_0.Op != OpLOONG64MOVVaddr {
  2920  			break
  2921  		}
  2922  		off2 := auxIntToInt32(v_0.AuxInt)
  2923  		sym2 := auxToSym(v_0.Aux)
  2924  		ptr := v_0.Args[0]
  2925  		mem := v_1
  2926  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  2927  			break
  2928  		}
  2929  		v.reset(OpLOONG64MOVDload)
  2930  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2931  		v.Aux = symToAux(mergeSym(sym1, sym2))
  2932  		v.AddArg2(ptr, mem)
  2933  		return true
  2934  	}
  2935  	// match: (MOVDload [off] {sym} (ADDV ptr idx) mem)
  2936  	// cond: off == 0 && sym == nil
  2937  	// result: (MOVDloadidx ptr idx mem)
  2938  	for {
  2939  		off := auxIntToInt32(v.AuxInt)
  2940  		sym := auxToSym(v.Aux)
  2941  		if v_0.Op != OpLOONG64ADDV {
  2942  			break
  2943  		}
  2944  		idx := v_0.Args[1]
  2945  		ptr := v_0.Args[0]
  2946  		mem := v_1
  2947  		if !(off == 0 && sym == nil) {
  2948  			break
  2949  		}
  2950  		v.reset(OpLOONG64MOVDloadidx)
  2951  		v.AddArg3(ptr, idx, mem)
  2952  		return true
  2953  	}
  2954  	return false
  2955  }
  2956  func rewriteValueLOONG64_OpLOONG64MOVDloadidx(v *Value) bool {
  2957  	v_2 := v.Args[2]
  2958  	v_1 := v.Args[1]
  2959  	v_0 := v.Args[0]
  2960  	// match: (MOVDloadidx ptr (MOVVconst [c]) mem)
  2961  	// cond: is32Bit(c)
  2962  	// result: (MOVDload [int32(c)] ptr mem)
  2963  	for {
  2964  		ptr := v_0
  2965  		if v_1.Op != OpLOONG64MOVVconst {
  2966  			break
  2967  		}
  2968  		c := auxIntToInt64(v_1.AuxInt)
  2969  		mem := v_2
  2970  		if !(is32Bit(c)) {
  2971  			break
  2972  		}
  2973  		v.reset(OpLOONG64MOVDload)
  2974  		v.AuxInt = int32ToAuxInt(int32(c))
  2975  		v.AddArg2(ptr, mem)
  2976  		return true
  2977  	}
  2978  	// match: (MOVDloadidx (MOVVconst [c]) ptr mem)
  2979  	// cond: is32Bit(c)
  2980  	// result: (MOVDload [int32(c)] ptr mem)
  2981  	for {
  2982  		if v_0.Op != OpLOONG64MOVVconst {
  2983  			break
  2984  		}
  2985  		c := auxIntToInt64(v_0.AuxInt)
  2986  		ptr := v_1
  2987  		mem := v_2
  2988  		if !(is32Bit(c)) {
  2989  			break
  2990  		}
  2991  		v.reset(OpLOONG64MOVDload)
  2992  		v.AuxInt = int32ToAuxInt(int32(c))
  2993  		v.AddArg2(ptr, mem)
  2994  		return true
  2995  	}
  2996  	return false
  2997  }
  2998  func rewriteValueLOONG64_OpLOONG64MOVDstore(v *Value) bool {
  2999  	v_2 := v.Args[2]
  3000  	v_1 := v.Args[1]
  3001  	v_0 := v.Args[0]
  3002  	b := v.Block
  3003  	config := b.Func.Config
  3004  	// match: (MOVDstore [off] {sym} ptr (MOVVgpfp val) mem)
  3005  	// result: (MOVVstore [off] {sym} ptr val mem)
  3006  	for {
  3007  		off := auxIntToInt32(v.AuxInt)
  3008  		sym := auxToSym(v.Aux)
  3009  		ptr := v_0
  3010  		if v_1.Op != OpLOONG64MOVVgpfp {
  3011  			break
  3012  		}
  3013  		val := v_1.Args[0]
  3014  		mem := v_2
  3015  		v.reset(OpLOONG64MOVVstore)
  3016  		v.AuxInt = int32ToAuxInt(off)
  3017  		v.Aux = symToAux(sym)
  3018  		v.AddArg3(ptr, val, mem)
  3019  		return true
  3020  	}
  3021  	// match: (MOVDstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  3022  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  3023  	// result: (MOVDstore [off1+int32(off2)] {sym} ptr val mem)
  3024  	for {
  3025  		off1 := auxIntToInt32(v.AuxInt)
  3026  		sym := auxToSym(v.Aux)
  3027  		if v_0.Op != OpLOONG64ADDVconst {
  3028  			break
  3029  		}
  3030  		off2 := auxIntToInt64(v_0.AuxInt)
  3031  		ptr := v_0.Args[0]
  3032  		val := v_1
  3033  		mem := v_2
  3034  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  3035  			break
  3036  		}
  3037  		v.reset(OpLOONG64MOVDstore)
  3038  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3039  		v.Aux = symToAux(sym)
  3040  		v.AddArg3(ptr, val, mem)
  3041  		return true
  3042  	}
  3043  	// match: (MOVDstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  3044  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  3045  	// result: (MOVDstore [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr val mem)
  3046  	for {
  3047  		off1 := auxIntToInt32(v.AuxInt)
  3048  		sym1 := auxToSym(v.Aux)
  3049  		if v_0.Op != OpLOONG64MOVVaddr {
  3050  			break
  3051  		}
  3052  		off2 := auxIntToInt32(v_0.AuxInt)
  3053  		sym2 := auxToSym(v_0.Aux)
  3054  		ptr := v_0.Args[0]
  3055  		val := v_1
  3056  		mem := v_2
  3057  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  3058  			break
  3059  		}
  3060  		v.reset(OpLOONG64MOVDstore)
  3061  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3062  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3063  		v.AddArg3(ptr, val, mem)
  3064  		return true
  3065  	}
  3066  	// match: (MOVDstore [off] {sym} (ADDV ptr idx) val mem)
  3067  	// cond: off == 0 && sym == nil
  3068  	// result: (MOVDstoreidx ptr idx val mem)
  3069  	for {
  3070  		off := auxIntToInt32(v.AuxInt)
  3071  		sym := auxToSym(v.Aux)
  3072  		if v_0.Op != OpLOONG64ADDV {
  3073  			break
  3074  		}
  3075  		idx := v_0.Args[1]
  3076  		ptr := v_0.Args[0]
  3077  		val := v_1
  3078  		mem := v_2
  3079  		if !(off == 0 && sym == nil) {
  3080  			break
  3081  		}
  3082  		v.reset(OpLOONG64MOVDstoreidx)
  3083  		v.AddArg4(ptr, idx, val, mem)
  3084  		return true
  3085  	}
  3086  	return false
  3087  }
  3088  func rewriteValueLOONG64_OpLOONG64MOVDstoreidx(v *Value) bool {
  3089  	v_3 := v.Args[3]
  3090  	v_2 := v.Args[2]
  3091  	v_1 := v.Args[1]
  3092  	v_0 := v.Args[0]
  3093  	// match: (MOVDstoreidx ptr (MOVVconst [c]) val mem)
  3094  	// cond: is32Bit(c)
  3095  	// result: (MOVDstore [int32(c)] ptr val mem)
  3096  	for {
  3097  		ptr := v_0
  3098  		if v_1.Op != OpLOONG64MOVVconst {
  3099  			break
  3100  		}
  3101  		c := auxIntToInt64(v_1.AuxInt)
  3102  		val := v_2
  3103  		mem := v_3
  3104  		if !(is32Bit(c)) {
  3105  			break
  3106  		}
  3107  		v.reset(OpLOONG64MOVDstore)
  3108  		v.AuxInt = int32ToAuxInt(int32(c))
  3109  		v.AddArg3(ptr, val, mem)
  3110  		return true
  3111  	}
  3112  	// match: (MOVDstoreidx (MOVVconst [c]) idx val mem)
  3113  	// cond: is32Bit(c)
  3114  	// result: (MOVDstore [int32(c)] idx val mem)
  3115  	for {
  3116  		if v_0.Op != OpLOONG64MOVVconst {
  3117  			break
  3118  		}
  3119  		c := auxIntToInt64(v_0.AuxInt)
  3120  		idx := v_1
  3121  		val := v_2
  3122  		mem := v_3
  3123  		if !(is32Bit(c)) {
  3124  			break
  3125  		}
  3126  		v.reset(OpLOONG64MOVDstore)
  3127  		v.AuxInt = int32ToAuxInt(int32(c))
  3128  		v.AddArg3(idx, val, mem)
  3129  		return true
  3130  	}
  3131  	return false
  3132  }
  3133  func rewriteValueLOONG64_OpLOONG64MOVFload(v *Value) bool {
  3134  	v_1 := v.Args[1]
  3135  	v_0 := v.Args[0]
  3136  	b := v.Block
  3137  	config := b.Func.Config
  3138  	// match: (MOVFload [off] {sym} ptr (MOVWstore [off] {sym} ptr val _))
  3139  	// result: (MOVWgpfp val)
  3140  	for {
  3141  		off := auxIntToInt32(v.AuxInt)
  3142  		sym := auxToSym(v.Aux)
  3143  		ptr := v_0
  3144  		if v_1.Op != OpLOONG64MOVWstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  3145  			break
  3146  		}
  3147  		val := v_1.Args[1]
  3148  		if ptr != v_1.Args[0] {
  3149  			break
  3150  		}
  3151  		v.reset(OpLOONG64MOVWgpfp)
  3152  		v.AddArg(val)
  3153  		return true
  3154  	}
  3155  	// match: (MOVFload [off1] {sym} (ADDVconst [off2] ptr) mem)
  3156  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  3157  	// result: (MOVFload [off1+int32(off2)] {sym} ptr mem)
  3158  	for {
  3159  		off1 := auxIntToInt32(v.AuxInt)
  3160  		sym := auxToSym(v.Aux)
  3161  		if v_0.Op != OpLOONG64ADDVconst {
  3162  			break
  3163  		}
  3164  		off2 := auxIntToInt64(v_0.AuxInt)
  3165  		ptr := v_0.Args[0]
  3166  		mem := v_1
  3167  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  3168  			break
  3169  		}
  3170  		v.reset(OpLOONG64MOVFload)
  3171  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3172  		v.Aux = symToAux(sym)
  3173  		v.AddArg2(ptr, mem)
  3174  		return true
  3175  	}
  3176  	// match: (MOVFload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  3177  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  3178  	// result: (MOVFload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  3179  	for {
  3180  		off1 := auxIntToInt32(v.AuxInt)
  3181  		sym1 := auxToSym(v.Aux)
  3182  		if v_0.Op != OpLOONG64MOVVaddr {
  3183  			break
  3184  		}
  3185  		off2 := auxIntToInt32(v_0.AuxInt)
  3186  		sym2 := auxToSym(v_0.Aux)
  3187  		ptr := v_0.Args[0]
  3188  		mem := v_1
  3189  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  3190  			break
  3191  		}
  3192  		v.reset(OpLOONG64MOVFload)
  3193  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3194  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3195  		v.AddArg2(ptr, mem)
  3196  		return true
  3197  	}
  3198  	// match: (MOVFload [off] {sym} (ADDV ptr idx) mem)
  3199  	// cond: off == 0 && sym == nil
  3200  	// result: (MOVFloadidx ptr idx mem)
  3201  	for {
  3202  		off := auxIntToInt32(v.AuxInt)
  3203  		sym := auxToSym(v.Aux)
  3204  		if v_0.Op != OpLOONG64ADDV {
  3205  			break
  3206  		}
  3207  		idx := v_0.Args[1]
  3208  		ptr := v_0.Args[0]
  3209  		mem := v_1
  3210  		if !(off == 0 && sym == nil) {
  3211  			break
  3212  		}
  3213  		v.reset(OpLOONG64MOVFloadidx)
  3214  		v.AddArg3(ptr, idx, mem)
  3215  		return true
  3216  	}
  3217  	return false
  3218  }
  3219  func rewriteValueLOONG64_OpLOONG64MOVFloadidx(v *Value) bool {
  3220  	v_2 := v.Args[2]
  3221  	v_1 := v.Args[1]
  3222  	v_0 := v.Args[0]
  3223  	// match: (MOVFloadidx ptr (MOVVconst [c]) mem)
  3224  	// cond: is32Bit(c)
  3225  	// result: (MOVFload [int32(c)] ptr mem)
  3226  	for {
  3227  		ptr := v_0
  3228  		if v_1.Op != OpLOONG64MOVVconst {
  3229  			break
  3230  		}
  3231  		c := auxIntToInt64(v_1.AuxInt)
  3232  		mem := v_2
  3233  		if !(is32Bit(c)) {
  3234  			break
  3235  		}
  3236  		v.reset(OpLOONG64MOVFload)
  3237  		v.AuxInt = int32ToAuxInt(int32(c))
  3238  		v.AddArg2(ptr, mem)
  3239  		return true
  3240  	}
  3241  	// match: (MOVFloadidx (MOVVconst [c]) ptr mem)
  3242  	// cond: is32Bit(c)
  3243  	// result: (MOVFload [int32(c)] ptr mem)
  3244  	for {
  3245  		if v_0.Op != OpLOONG64MOVVconst {
  3246  			break
  3247  		}
  3248  		c := auxIntToInt64(v_0.AuxInt)
  3249  		ptr := v_1
  3250  		mem := v_2
  3251  		if !(is32Bit(c)) {
  3252  			break
  3253  		}
  3254  		v.reset(OpLOONG64MOVFload)
  3255  		v.AuxInt = int32ToAuxInt(int32(c))
  3256  		v.AddArg2(ptr, mem)
  3257  		return true
  3258  	}
  3259  	return false
  3260  }
  3261  func rewriteValueLOONG64_OpLOONG64MOVFstore(v *Value) bool {
  3262  	v_2 := v.Args[2]
  3263  	v_1 := v.Args[1]
  3264  	v_0 := v.Args[0]
  3265  	b := v.Block
  3266  	config := b.Func.Config
  3267  	// match: (MOVFstore [off] {sym} ptr (MOVWgpfp val) mem)
  3268  	// result: (MOVWstore [off] {sym} ptr val mem)
  3269  	for {
  3270  		off := auxIntToInt32(v.AuxInt)
  3271  		sym := auxToSym(v.Aux)
  3272  		ptr := v_0
  3273  		if v_1.Op != OpLOONG64MOVWgpfp {
  3274  			break
  3275  		}
  3276  		val := v_1.Args[0]
  3277  		mem := v_2
  3278  		v.reset(OpLOONG64MOVWstore)
  3279  		v.AuxInt = int32ToAuxInt(off)
  3280  		v.Aux = symToAux(sym)
  3281  		v.AddArg3(ptr, val, mem)
  3282  		return true
  3283  	}
  3284  	// match: (MOVFstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  3285  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  3286  	// result: (MOVFstore [off1+int32(off2)] {sym} ptr val mem)
  3287  	for {
  3288  		off1 := auxIntToInt32(v.AuxInt)
  3289  		sym := auxToSym(v.Aux)
  3290  		if v_0.Op != OpLOONG64ADDVconst {
  3291  			break
  3292  		}
  3293  		off2 := auxIntToInt64(v_0.AuxInt)
  3294  		ptr := v_0.Args[0]
  3295  		val := v_1
  3296  		mem := v_2
  3297  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  3298  			break
  3299  		}
  3300  		v.reset(OpLOONG64MOVFstore)
  3301  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3302  		v.Aux = symToAux(sym)
  3303  		v.AddArg3(ptr, val, mem)
  3304  		return true
  3305  	}
  3306  	// match: (MOVFstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  3307  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  3308  	// result: (MOVFstore [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr val mem)
  3309  	for {
  3310  		off1 := auxIntToInt32(v.AuxInt)
  3311  		sym1 := auxToSym(v.Aux)
  3312  		if v_0.Op != OpLOONG64MOVVaddr {
  3313  			break
  3314  		}
  3315  		off2 := auxIntToInt32(v_0.AuxInt)
  3316  		sym2 := auxToSym(v_0.Aux)
  3317  		ptr := v_0.Args[0]
  3318  		val := v_1
  3319  		mem := v_2
  3320  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  3321  			break
  3322  		}
  3323  		v.reset(OpLOONG64MOVFstore)
  3324  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3325  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3326  		v.AddArg3(ptr, val, mem)
  3327  		return true
  3328  	}
  3329  	// match: (MOVFstore [off] {sym} (ADDV ptr idx) val mem)
  3330  	// cond: off == 0 && sym == nil
  3331  	// result: (MOVFstoreidx ptr idx val mem)
  3332  	for {
  3333  		off := auxIntToInt32(v.AuxInt)
  3334  		sym := auxToSym(v.Aux)
  3335  		if v_0.Op != OpLOONG64ADDV {
  3336  			break
  3337  		}
  3338  		idx := v_0.Args[1]
  3339  		ptr := v_0.Args[0]
  3340  		val := v_1
  3341  		mem := v_2
  3342  		if !(off == 0 && sym == nil) {
  3343  			break
  3344  		}
  3345  		v.reset(OpLOONG64MOVFstoreidx)
  3346  		v.AddArg4(ptr, idx, val, mem)
  3347  		return true
  3348  	}
  3349  	return false
  3350  }
  3351  func rewriteValueLOONG64_OpLOONG64MOVFstoreidx(v *Value) bool {
  3352  	v_3 := v.Args[3]
  3353  	v_2 := v.Args[2]
  3354  	v_1 := v.Args[1]
  3355  	v_0 := v.Args[0]
  3356  	// match: (MOVFstoreidx ptr (MOVVconst [c]) val mem)
  3357  	// cond: is32Bit(c)
  3358  	// result: (MOVFstore [int32(c)] ptr val mem)
  3359  	for {
  3360  		ptr := v_0
  3361  		if v_1.Op != OpLOONG64MOVVconst {
  3362  			break
  3363  		}
  3364  		c := auxIntToInt64(v_1.AuxInt)
  3365  		val := v_2
  3366  		mem := v_3
  3367  		if !(is32Bit(c)) {
  3368  			break
  3369  		}
  3370  		v.reset(OpLOONG64MOVFstore)
  3371  		v.AuxInt = int32ToAuxInt(int32(c))
  3372  		v.AddArg3(ptr, val, mem)
  3373  		return true
  3374  	}
  3375  	// match: (MOVFstoreidx (MOVVconst [c]) idx val mem)
  3376  	// cond: is32Bit(c)
  3377  	// result: (MOVFstore [int32(c)] idx val mem)
  3378  	for {
  3379  		if v_0.Op != OpLOONG64MOVVconst {
  3380  			break
  3381  		}
  3382  		c := auxIntToInt64(v_0.AuxInt)
  3383  		idx := v_1
  3384  		val := v_2
  3385  		mem := v_3
  3386  		if !(is32Bit(c)) {
  3387  			break
  3388  		}
  3389  		v.reset(OpLOONG64MOVFstore)
  3390  		v.AuxInt = int32ToAuxInt(int32(c))
  3391  		v.AddArg3(idx, val, mem)
  3392  		return true
  3393  	}
  3394  	return false
  3395  }
  3396  func rewriteValueLOONG64_OpLOONG64MOVHUload(v *Value) bool {
  3397  	v_1 := v.Args[1]
  3398  	v_0 := v.Args[0]
  3399  	b := v.Block
  3400  	config := b.Func.Config
  3401  	// match: (MOVHUload [off1] {sym} (ADDVconst [off2] ptr) mem)
  3402  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  3403  	// result: (MOVHUload [off1+int32(off2)] {sym} ptr mem)
  3404  	for {
  3405  		off1 := auxIntToInt32(v.AuxInt)
  3406  		sym := auxToSym(v.Aux)
  3407  		if v_0.Op != OpLOONG64ADDVconst {
  3408  			break
  3409  		}
  3410  		off2 := auxIntToInt64(v_0.AuxInt)
  3411  		ptr := v_0.Args[0]
  3412  		mem := v_1
  3413  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  3414  			break
  3415  		}
  3416  		v.reset(OpLOONG64MOVHUload)
  3417  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3418  		v.Aux = symToAux(sym)
  3419  		v.AddArg2(ptr, mem)
  3420  		return true
  3421  	}
  3422  	// match: (MOVHUload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  3423  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  3424  	// result: (MOVHUload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  3425  	for {
  3426  		off1 := auxIntToInt32(v.AuxInt)
  3427  		sym1 := auxToSym(v.Aux)
  3428  		if v_0.Op != OpLOONG64MOVVaddr {
  3429  			break
  3430  		}
  3431  		off2 := auxIntToInt32(v_0.AuxInt)
  3432  		sym2 := auxToSym(v_0.Aux)
  3433  		ptr := v_0.Args[0]
  3434  		mem := v_1
  3435  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  3436  			break
  3437  		}
  3438  		v.reset(OpLOONG64MOVHUload)
  3439  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3440  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3441  		v.AddArg2(ptr, mem)
  3442  		return true
  3443  	}
  3444  	// match: (MOVHUload [off] {sym} (ADDV ptr idx) mem)
  3445  	// cond: off == 0 && sym == nil
  3446  	// result: (MOVHUloadidx ptr idx mem)
  3447  	for {
  3448  		off := auxIntToInt32(v.AuxInt)
  3449  		sym := auxToSym(v.Aux)
  3450  		if v_0.Op != OpLOONG64ADDV {
  3451  			break
  3452  		}
  3453  		idx := v_0.Args[1]
  3454  		ptr := v_0.Args[0]
  3455  		mem := v_1
  3456  		if !(off == 0 && sym == nil) {
  3457  			break
  3458  		}
  3459  		v.reset(OpLOONG64MOVHUloadidx)
  3460  		v.AddArg3(ptr, idx, mem)
  3461  		return true
  3462  	}
  3463  	return false
  3464  }
  3465  func rewriteValueLOONG64_OpLOONG64MOVHUloadidx(v *Value) bool {
  3466  	v_2 := v.Args[2]
  3467  	v_1 := v.Args[1]
  3468  	v_0 := v.Args[0]
  3469  	// match: (MOVHUloadidx ptr (MOVVconst [c]) mem)
  3470  	// cond: is32Bit(c)
  3471  	// result: (MOVHUload [int32(c)] ptr mem)
  3472  	for {
  3473  		ptr := v_0
  3474  		if v_1.Op != OpLOONG64MOVVconst {
  3475  			break
  3476  		}
  3477  		c := auxIntToInt64(v_1.AuxInt)
  3478  		mem := v_2
  3479  		if !(is32Bit(c)) {
  3480  			break
  3481  		}
  3482  		v.reset(OpLOONG64MOVHUload)
  3483  		v.AuxInt = int32ToAuxInt(int32(c))
  3484  		v.AddArg2(ptr, mem)
  3485  		return true
  3486  	}
  3487  	// match: (MOVHUloadidx (MOVVconst [c]) ptr mem)
  3488  	// cond: is32Bit(c)
  3489  	// result: (MOVHUload [int32(c)] ptr mem)
  3490  	for {
  3491  		if v_0.Op != OpLOONG64MOVVconst {
  3492  			break
  3493  		}
  3494  		c := auxIntToInt64(v_0.AuxInt)
  3495  		ptr := v_1
  3496  		mem := v_2
  3497  		if !(is32Bit(c)) {
  3498  			break
  3499  		}
  3500  		v.reset(OpLOONG64MOVHUload)
  3501  		v.AuxInt = int32ToAuxInt(int32(c))
  3502  		v.AddArg2(ptr, mem)
  3503  		return true
  3504  	}
  3505  	return false
  3506  }
  3507  func rewriteValueLOONG64_OpLOONG64MOVHUreg(v *Value) bool {
  3508  	v_0 := v.Args[0]
  3509  	// match: (MOVHUreg (SRLVconst [rc] x))
  3510  	// cond: rc < 16
  3511  	// result: (BSTRPICKV [rc + (15+rc)<<6] x)
  3512  	for {
  3513  		if v_0.Op != OpLOONG64SRLVconst {
  3514  			break
  3515  		}
  3516  		rc := auxIntToInt64(v_0.AuxInt)
  3517  		x := v_0.Args[0]
  3518  		if !(rc < 16) {
  3519  			break
  3520  		}
  3521  		v.reset(OpLOONG64BSTRPICKV)
  3522  		v.AuxInt = int64ToAuxInt(rc + (15+rc)<<6)
  3523  		v.AddArg(x)
  3524  		return true
  3525  	}
  3526  	// match: (MOVHUreg x:(MOVBUload _ _))
  3527  	// result: (MOVVreg x)
  3528  	for {
  3529  		x := v_0
  3530  		if x.Op != OpLOONG64MOVBUload {
  3531  			break
  3532  		}
  3533  		v.reset(OpLOONG64MOVVreg)
  3534  		v.AddArg(x)
  3535  		return true
  3536  	}
  3537  	// match: (MOVHUreg x:(MOVHUload _ _))
  3538  	// result: (MOVVreg x)
  3539  	for {
  3540  		x := v_0
  3541  		if x.Op != OpLOONG64MOVHUload {
  3542  			break
  3543  		}
  3544  		v.reset(OpLOONG64MOVVreg)
  3545  		v.AddArg(x)
  3546  		return true
  3547  	}
  3548  	// match: (MOVHUreg x:(MOVBUreg _))
  3549  	// result: (MOVVreg x)
  3550  	for {
  3551  		x := v_0
  3552  		if x.Op != OpLOONG64MOVBUreg {
  3553  			break
  3554  		}
  3555  		v.reset(OpLOONG64MOVVreg)
  3556  		v.AddArg(x)
  3557  		return true
  3558  	}
  3559  	// match: (MOVHUreg x:(MOVHUreg _))
  3560  	// result: (MOVVreg x)
  3561  	for {
  3562  		x := v_0
  3563  		if x.Op != OpLOONG64MOVHUreg {
  3564  			break
  3565  		}
  3566  		v.reset(OpLOONG64MOVVreg)
  3567  		v.AddArg(x)
  3568  		return true
  3569  	}
  3570  	// match: (MOVHUreg (SLLVconst [lc] x))
  3571  	// cond: lc >= 16
  3572  	// result: (MOVVconst [0])
  3573  	for {
  3574  		if v_0.Op != OpLOONG64SLLVconst {
  3575  			break
  3576  		}
  3577  		lc := auxIntToInt64(v_0.AuxInt)
  3578  		if !(lc >= 16) {
  3579  			break
  3580  		}
  3581  		v.reset(OpLOONG64MOVVconst)
  3582  		v.AuxInt = int64ToAuxInt(0)
  3583  		return true
  3584  	}
  3585  	// match: (MOVHUreg (MOVVconst [c]))
  3586  	// result: (MOVVconst [int64(uint16(c))])
  3587  	for {
  3588  		if v_0.Op != OpLOONG64MOVVconst {
  3589  			break
  3590  		}
  3591  		c := auxIntToInt64(v_0.AuxInt)
  3592  		v.reset(OpLOONG64MOVVconst)
  3593  		v.AuxInt = int64ToAuxInt(int64(uint16(c)))
  3594  		return true
  3595  	}
  3596  	return false
  3597  }
  3598  func rewriteValueLOONG64_OpLOONG64MOVHload(v *Value) bool {
  3599  	v_1 := v.Args[1]
  3600  	v_0 := v.Args[0]
  3601  	b := v.Block
  3602  	config := b.Func.Config
  3603  	// match: (MOVHload [off1] {sym} (ADDVconst [off2] ptr) mem)
  3604  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  3605  	// result: (MOVHload [off1+int32(off2)] {sym} ptr mem)
  3606  	for {
  3607  		off1 := auxIntToInt32(v.AuxInt)
  3608  		sym := auxToSym(v.Aux)
  3609  		if v_0.Op != OpLOONG64ADDVconst {
  3610  			break
  3611  		}
  3612  		off2 := auxIntToInt64(v_0.AuxInt)
  3613  		ptr := v_0.Args[0]
  3614  		mem := v_1
  3615  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  3616  			break
  3617  		}
  3618  		v.reset(OpLOONG64MOVHload)
  3619  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3620  		v.Aux = symToAux(sym)
  3621  		v.AddArg2(ptr, mem)
  3622  		return true
  3623  	}
  3624  	// match: (MOVHload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  3625  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  3626  	// result: (MOVHload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  3627  	for {
  3628  		off1 := auxIntToInt32(v.AuxInt)
  3629  		sym1 := auxToSym(v.Aux)
  3630  		if v_0.Op != OpLOONG64MOVVaddr {
  3631  			break
  3632  		}
  3633  		off2 := auxIntToInt32(v_0.AuxInt)
  3634  		sym2 := auxToSym(v_0.Aux)
  3635  		ptr := v_0.Args[0]
  3636  		mem := v_1
  3637  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  3638  			break
  3639  		}
  3640  		v.reset(OpLOONG64MOVHload)
  3641  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3642  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3643  		v.AddArg2(ptr, mem)
  3644  		return true
  3645  	}
  3646  	// match: (MOVHload [off] {sym} (ADDV ptr idx) mem)
  3647  	// cond: off == 0 && sym == nil
  3648  	// result: (MOVHloadidx ptr idx mem)
  3649  	for {
  3650  		off := auxIntToInt32(v.AuxInt)
  3651  		sym := auxToSym(v.Aux)
  3652  		if v_0.Op != OpLOONG64ADDV {
  3653  			break
  3654  		}
  3655  		idx := v_0.Args[1]
  3656  		ptr := v_0.Args[0]
  3657  		mem := v_1
  3658  		if !(off == 0 && sym == nil) {
  3659  			break
  3660  		}
  3661  		v.reset(OpLOONG64MOVHloadidx)
  3662  		v.AddArg3(ptr, idx, mem)
  3663  		return true
  3664  	}
  3665  	return false
  3666  }
  3667  func rewriteValueLOONG64_OpLOONG64MOVHloadidx(v *Value) bool {
  3668  	v_2 := v.Args[2]
  3669  	v_1 := v.Args[1]
  3670  	v_0 := v.Args[0]
  3671  	// match: (MOVHloadidx ptr (MOVVconst [c]) mem)
  3672  	// cond: is32Bit(c)
  3673  	// result: (MOVHload [int32(c)] ptr mem)
  3674  	for {
  3675  		ptr := v_0
  3676  		if v_1.Op != OpLOONG64MOVVconst {
  3677  			break
  3678  		}
  3679  		c := auxIntToInt64(v_1.AuxInt)
  3680  		mem := v_2
  3681  		if !(is32Bit(c)) {
  3682  			break
  3683  		}
  3684  		v.reset(OpLOONG64MOVHload)
  3685  		v.AuxInt = int32ToAuxInt(int32(c))
  3686  		v.AddArg2(ptr, mem)
  3687  		return true
  3688  	}
  3689  	// match: (MOVHloadidx (MOVVconst [c]) ptr mem)
  3690  	// cond: is32Bit(c)
  3691  	// result: (MOVHload [int32(c)] ptr mem)
  3692  	for {
  3693  		if v_0.Op != OpLOONG64MOVVconst {
  3694  			break
  3695  		}
  3696  		c := auxIntToInt64(v_0.AuxInt)
  3697  		ptr := v_1
  3698  		mem := v_2
  3699  		if !(is32Bit(c)) {
  3700  			break
  3701  		}
  3702  		v.reset(OpLOONG64MOVHload)
  3703  		v.AuxInt = int32ToAuxInt(int32(c))
  3704  		v.AddArg2(ptr, mem)
  3705  		return true
  3706  	}
  3707  	return false
  3708  }
  3709  func rewriteValueLOONG64_OpLOONG64MOVHreg(v *Value) bool {
  3710  	v_0 := v.Args[0]
  3711  	// match: (MOVHreg x:(MOVBload _ _))
  3712  	// result: (MOVVreg x)
  3713  	for {
  3714  		x := v_0
  3715  		if x.Op != OpLOONG64MOVBload {
  3716  			break
  3717  		}
  3718  		v.reset(OpLOONG64MOVVreg)
  3719  		v.AddArg(x)
  3720  		return true
  3721  	}
  3722  	// match: (MOVHreg x:(MOVBUload _ _))
  3723  	// result: (MOVVreg x)
  3724  	for {
  3725  		x := v_0
  3726  		if x.Op != OpLOONG64MOVBUload {
  3727  			break
  3728  		}
  3729  		v.reset(OpLOONG64MOVVreg)
  3730  		v.AddArg(x)
  3731  		return true
  3732  	}
  3733  	// match: (MOVHreg x:(MOVHload _ _))
  3734  	// result: (MOVVreg x)
  3735  	for {
  3736  		x := v_0
  3737  		if x.Op != OpLOONG64MOVHload {
  3738  			break
  3739  		}
  3740  		v.reset(OpLOONG64MOVVreg)
  3741  		v.AddArg(x)
  3742  		return true
  3743  	}
  3744  	// match: (MOVHreg x:(MOVBreg _))
  3745  	// result: (MOVVreg x)
  3746  	for {
  3747  		x := v_0
  3748  		if x.Op != OpLOONG64MOVBreg {
  3749  			break
  3750  		}
  3751  		v.reset(OpLOONG64MOVVreg)
  3752  		v.AddArg(x)
  3753  		return true
  3754  	}
  3755  	// match: (MOVHreg x:(MOVBUreg _))
  3756  	// result: (MOVVreg x)
  3757  	for {
  3758  		x := v_0
  3759  		if x.Op != OpLOONG64MOVBUreg {
  3760  			break
  3761  		}
  3762  		v.reset(OpLOONG64MOVVreg)
  3763  		v.AddArg(x)
  3764  		return true
  3765  	}
  3766  	// match: (MOVHreg x:(MOVHreg _))
  3767  	// result: (MOVVreg x)
  3768  	for {
  3769  		x := v_0
  3770  		if x.Op != OpLOONG64MOVHreg {
  3771  			break
  3772  		}
  3773  		v.reset(OpLOONG64MOVVreg)
  3774  		v.AddArg(x)
  3775  		return true
  3776  	}
  3777  	// match: (MOVHreg (MOVVconst [c]))
  3778  	// result: (MOVVconst [int64(int16(c))])
  3779  	for {
  3780  		if v_0.Op != OpLOONG64MOVVconst {
  3781  			break
  3782  		}
  3783  		c := auxIntToInt64(v_0.AuxInt)
  3784  		v.reset(OpLOONG64MOVVconst)
  3785  		v.AuxInt = int64ToAuxInt(int64(int16(c)))
  3786  		return true
  3787  	}
  3788  	return false
  3789  }
  3790  func rewriteValueLOONG64_OpLOONG64MOVHstore(v *Value) bool {
  3791  	v_2 := v.Args[2]
  3792  	v_1 := v.Args[1]
  3793  	v_0 := v.Args[0]
  3794  	b := v.Block
  3795  	config := b.Func.Config
  3796  	// match: (MOVHstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  3797  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  3798  	// result: (MOVHstore [off1+int32(off2)] {sym} ptr val mem)
  3799  	for {
  3800  		off1 := auxIntToInt32(v.AuxInt)
  3801  		sym := auxToSym(v.Aux)
  3802  		if v_0.Op != OpLOONG64ADDVconst {
  3803  			break
  3804  		}
  3805  		off2 := auxIntToInt64(v_0.AuxInt)
  3806  		ptr := v_0.Args[0]
  3807  		val := v_1
  3808  		mem := v_2
  3809  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  3810  			break
  3811  		}
  3812  		v.reset(OpLOONG64MOVHstore)
  3813  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3814  		v.Aux = symToAux(sym)
  3815  		v.AddArg3(ptr, val, mem)
  3816  		return true
  3817  	}
  3818  	// match: (MOVHstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  3819  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  3820  	// result: (MOVHstore [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr val mem)
  3821  	for {
  3822  		off1 := auxIntToInt32(v.AuxInt)
  3823  		sym1 := auxToSym(v.Aux)
  3824  		if v_0.Op != OpLOONG64MOVVaddr {
  3825  			break
  3826  		}
  3827  		off2 := auxIntToInt32(v_0.AuxInt)
  3828  		sym2 := auxToSym(v_0.Aux)
  3829  		ptr := v_0.Args[0]
  3830  		val := v_1
  3831  		mem := v_2
  3832  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  3833  			break
  3834  		}
  3835  		v.reset(OpLOONG64MOVHstore)
  3836  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3837  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3838  		v.AddArg3(ptr, val, mem)
  3839  		return true
  3840  	}
  3841  	// match: (MOVHstore [off] {sym} ptr (MOVHreg x) mem)
  3842  	// result: (MOVHstore [off] {sym} ptr x mem)
  3843  	for {
  3844  		off := auxIntToInt32(v.AuxInt)
  3845  		sym := auxToSym(v.Aux)
  3846  		ptr := v_0
  3847  		if v_1.Op != OpLOONG64MOVHreg {
  3848  			break
  3849  		}
  3850  		x := v_1.Args[0]
  3851  		mem := v_2
  3852  		v.reset(OpLOONG64MOVHstore)
  3853  		v.AuxInt = int32ToAuxInt(off)
  3854  		v.Aux = symToAux(sym)
  3855  		v.AddArg3(ptr, x, mem)
  3856  		return true
  3857  	}
  3858  	// match: (MOVHstore [off] {sym} ptr (MOVHUreg x) mem)
  3859  	// result: (MOVHstore [off] {sym} ptr x mem)
  3860  	for {
  3861  		off := auxIntToInt32(v.AuxInt)
  3862  		sym := auxToSym(v.Aux)
  3863  		ptr := v_0
  3864  		if v_1.Op != OpLOONG64MOVHUreg {
  3865  			break
  3866  		}
  3867  		x := v_1.Args[0]
  3868  		mem := v_2
  3869  		v.reset(OpLOONG64MOVHstore)
  3870  		v.AuxInt = int32ToAuxInt(off)
  3871  		v.Aux = symToAux(sym)
  3872  		v.AddArg3(ptr, x, mem)
  3873  		return true
  3874  	}
  3875  	// match: (MOVHstore [off] {sym} ptr (MOVWreg x) mem)
  3876  	// result: (MOVHstore [off] {sym} ptr x mem)
  3877  	for {
  3878  		off := auxIntToInt32(v.AuxInt)
  3879  		sym := auxToSym(v.Aux)
  3880  		ptr := v_0
  3881  		if v_1.Op != OpLOONG64MOVWreg {
  3882  			break
  3883  		}
  3884  		x := v_1.Args[0]
  3885  		mem := v_2
  3886  		v.reset(OpLOONG64MOVHstore)
  3887  		v.AuxInt = int32ToAuxInt(off)
  3888  		v.Aux = symToAux(sym)
  3889  		v.AddArg3(ptr, x, mem)
  3890  		return true
  3891  	}
  3892  	// match: (MOVHstore [off] {sym} ptr (MOVWUreg x) mem)
  3893  	// result: (MOVHstore [off] {sym} ptr x mem)
  3894  	for {
  3895  		off := auxIntToInt32(v.AuxInt)
  3896  		sym := auxToSym(v.Aux)
  3897  		ptr := v_0
  3898  		if v_1.Op != OpLOONG64MOVWUreg {
  3899  			break
  3900  		}
  3901  		x := v_1.Args[0]
  3902  		mem := v_2
  3903  		v.reset(OpLOONG64MOVHstore)
  3904  		v.AuxInt = int32ToAuxInt(off)
  3905  		v.Aux = symToAux(sym)
  3906  		v.AddArg3(ptr, x, mem)
  3907  		return true
  3908  	}
  3909  	// match: (MOVHstore [off] {sym} ptr (MOVVconst [0]) mem)
  3910  	// result: (MOVHstorezero [off] {sym} ptr mem)
  3911  	for {
  3912  		off := auxIntToInt32(v.AuxInt)
  3913  		sym := auxToSym(v.Aux)
  3914  		ptr := v_0
  3915  		if v_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
  3916  			break
  3917  		}
  3918  		mem := v_2
  3919  		v.reset(OpLOONG64MOVHstorezero)
  3920  		v.AuxInt = int32ToAuxInt(off)
  3921  		v.Aux = symToAux(sym)
  3922  		v.AddArg2(ptr, mem)
  3923  		return true
  3924  	}
  3925  	// match: (MOVHstore [off] {sym} (ADDV ptr idx) val mem)
  3926  	// cond: off == 0 && sym == nil
  3927  	// result: (MOVHstoreidx ptr idx val mem)
  3928  	for {
  3929  		off := auxIntToInt32(v.AuxInt)
  3930  		sym := auxToSym(v.Aux)
  3931  		if v_0.Op != OpLOONG64ADDV {
  3932  			break
  3933  		}
  3934  		idx := v_0.Args[1]
  3935  		ptr := v_0.Args[0]
  3936  		val := v_1
  3937  		mem := v_2
  3938  		if !(off == 0 && sym == nil) {
  3939  			break
  3940  		}
  3941  		v.reset(OpLOONG64MOVHstoreidx)
  3942  		v.AddArg4(ptr, idx, val, mem)
  3943  		return true
  3944  	}
  3945  	return false
  3946  }
  3947  func rewriteValueLOONG64_OpLOONG64MOVHstoreidx(v *Value) bool {
  3948  	v_3 := v.Args[3]
  3949  	v_2 := v.Args[2]
  3950  	v_1 := v.Args[1]
  3951  	v_0 := v.Args[0]
  3952  	// match: (MOVHstoreidx ptr (MOVVconst [c]) val mem)
  3953  	// cond: is32Bit(c)
  3954  	// result: (MOVHstore [int32(c)] ptr val mem)
  3955  	for {
  3956  		ptr := v_0
  3957  		if v_1.Op != OpLOONG64MOVVconst {
  3958  			break
  3959  		}
  3960  		c := auxIntToInt64(v_1.AuxInt)
  3961  		val := v_2
  3962  		mem := v_3
  3963  		if !(is32Bit(c)) {
  3964  			break
  3965  		}
  3966  		v.reset(OpLOONG64MOVHstore)
  3967  		v.AuxInt = int32ToAuxInt(int32(c))
  3968  		v.AddArg3(ptr, val, mem)
  3969  		return true
  3970  	}
  3971  	// match: (MOVHstoreidx (MOVVconst [c]) idx val mem)
  3972  	// cond: is32Bit(c)
  3973  	// result: (MOVHstore [int32(c)] idx val mem)
  3974  	for {
  3975  		if v_0.Op != OpLOONG64MOVVconst {
  3976  			break
  3977  		}
  3978  		c := auxIntToInt64(v_0.AuxInt)
  3979  		idx := v_1
  3980  		val := v_2
  3981  		mem := v_3
  3982  		if !(is32Bit(c)) {
  3983  			break
  3984  		}
  3985  		v.reset(OpLOONG64MOVHstore)
  3986  		v.AuxInt = int32ToAuxInt(int32(c))
  3987  		v.AddArg3(idx, val, mem)
  3988  		return true
  3989  	}
  3990  	// match: (MOVHstoreidx ptr idx (MOVVconst [0]) mem)
  3991  	// result: (MOVHstorezeroidx ptr idx mem)
  3992  	for {
  3993  		ptr := v_0
  3994  		idx := v_1
  3995  		if v_2.Op != OpLOONG64MOVVconst || auxIntToInt64(v_2.AuxInt) != 0 {
  3996  			break
  3997  		}
  3998  		mem := v_3
  3999  		v.reset(OpLOONG64MOVHstorezeroidx)
  4000  		v.AddArg3(ptr, idx, mem)
  4001  		return true
  4002  	}
  4003  	return false
  4004  }
  4005  func rewriteValueLOONG64_OpLOONG64MOVHstorezero(v *Value) bool {
  4006  	v_1 := v.Args[1]
  4007  	v_0 := v.Args[0]
  4008  	b := v.Block
  4009  	config := b.Func.Config
  4010  	// match: (MOVHstorezero [off1] {sym} (ADDVconst [off2] ptr) mem)
  4011  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  4012  	// result: (MOVHstorezero [off1+int32(off2)] {sym} ptr mem)
  4013  	for {
  4014  		off1 := auxIntToInt32(v.AuxInt)
  4015  		sym := auxToSym(v.Aux)
  4016  		if v_0.Op != OpLOONG64ADDVconst {
  4017  			break
  4018  		}
  4019  		off2 := auxIntToInt64(v_0.AuxInt)
  4020  		ptr := v_0.Args[0]
  4021  		mem := v_1
  4022  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4023  			break
  4024  		}
  4025  		v.reset(OpLOONG64MOVHstorezero)
  4026  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4027  		v.Aux = symToAux(sym)
  4028  		v.AddArg2(ptr, mem)
  4029  		return true
  4030  	}
  4031  	// match: (MOVHstorezero [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4032  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  4033  	// result: (MOVHstorezero [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  4034  	for {
  4035  		off1 := auxIntToInt32(v.AuxInt)
  4036  		sym1 := auxToSym(v.Aux)
  4037  		if v_0.Op != OpLOONG64MOVVaddr {
  4038  			break
  4039  		}
  4040  		off2 := auxIntToInt32(v_0.AuxInt)
  4041  		sym2 := auxToSym(v_0.Aux)
  4042  		ptr := v_0.Args[0]
  4043  		mem := v_1
  4044  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4045  			break
  4046  		}
  4047  		v.reset(OpLOONG64MOVHstorezero)
  4048  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4049  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4050  		v.AddArg2(ptr, mem)
  4051  		return true
  4052  	}
  4053  	// match: (MOVHstorezero [off] {sym} (ADDV ptr idx) mem)
  4054  	// cond: off == 0 && sym == nil
  4055  	// result: (MOVHstorezeroidx ptr idx mem)
  4056  	for {
  4057  		off := auxIntToInt32(v.AuxInt)
  4058  		sym := auxToSym(v.Aux)
  4059  		if v_0.Op != OpLOONG64ADDV {
  4060  			break
  4061  		}
  4062  		idx := v_0.Args[1]
  4063  		ptr := v_0.Args[0]
  4064  		mem := v_1
  4065  		if !(off == 0 && sym == nil) {
  4066  			break
  4067  		}
  4068  		v.reset(OpLOONG64MOVHstorezeroidx)
  4069  		v.AddArg3(ptr, idx, mem)
  4070  		return true
  4071  	}
  4072  	return false
  4073  }
  4074  func rewriteValueLOONG64_OpLOONG64MOVHstorezeroidx(v *Value) bool {
  4075  	v_2 := v.Args[2]
  4076  	v_1 := v.Args[1]
  4077  	v_0 := v.Args[0]
  4078  	// match: (MOVHstorezeroidx ptr (MOVVconst [c]) mem)
  4079  	// cond: is32Bit(c)
  4080  	// result: (MOVHstorezero [int32(c)] ptr mem)
  4081  	for {
  4082  		ptr := v_0
  4083  		if v_1.Op != OpLOONG64MOVVconst {
  4084  			break
  4085  		}
  4086  		c := auxIntToInt64(v_1.AuxInt)
  4087  		mem := v_2
  4088  		if !(is32Bit(c)) {
  4089  			break
  4090  		}
  4091  		v.reset(OpLOONG64MOVHstorezero)
  4092  		v.AuxInt = int32ToAuxInt(int32(c))
  4093  		v.AddArg2(ptr, mem)
  4094  		return true
  4095  	}
  4096  	// match: (MOVHstorezeroidx (MOVVconst [c]) idx mem)
  4097  	// cond: is32Bit(c)
  4098  	// result: (MOVHstorezero [int32(c)] idx mem)
  4099  	for {
  4100  		if v_0.Op != OpLOONG64MOVVconst {
  4101  			break
  4102  		}
  4103  		c := auxIntToInt64(v_0.AuxInt)
  4104  		idx := v_1
  4105  		mem := v_2
  4106  		if !(is32Bit(c)) {
  4107  			break
  4108  		}
  4109  		v.reset(OpLOONG64MOVHstorezero)
  4110  		v.AuxInt = int32ToAuxInt(int32(c))
  4111  		v.AddArg2(idx, mem)
  4112  		return true
  4113  	}
  4114  	return false
  4115  }
  4116  func rewriteValueLOONG64_OpLOONG64MOVVload(v *Value) bool {
  4117  	v_1 := v.Args[1]
  4118  	v_0 := v.Args[0]
  4119  	b := v.Block
  4120  	config := b.Func.Config
  4121  	// match: (MOVVload [off] {sym} ptr (MOVDstore [off] {sym} ptr val _))
  4122  	// result: (MOVVfpgp val)
  4123  	for {
  4124  		off := auxIntToInt32(v.AuxInt)
  4125  		sym := auxToSym(v.Aux)
  4126  		ptr := v_0
  4127  		if v_1.Op != OpLOONG64MOVDstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  4128  			break
  4129  		}
  4130  		val := v_1.Args[1]
  4131  		if ptr != v_1.Args[0] {
  4132  			break
  4133  		}
  4134  		v.reset(OpLOONG64MOVVfpgp)
  4135  		v.AddArg(val)
  4136  		return true
  4137  	}
  4138  	// match: (MOVVload [off1] {sym} (ADDVconst [off2] ptr) mem)
  4139  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  4140  	// result: (MOVVload [off1+int32(off2)] {sym} ptr mem)
  4141  	for {
  4142  		off1 := auxIntToInt32(v.AuxInt)
  4143  		sym := auxToSym(v.Aux)
  4144  		if v_0.Op != OpLOONG64ADDVconst {
  4145  			break
  4146  		}
  4147  		off2 := auxIntToInt64(v_0.AuxInt)
  4148  		ptr := v_0.Args[0]
  4149  		mem := v_1
  4150  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4151  			break
  4152  		}
  4153  		v.reset(OpLOONG64MOVVload)
  4154  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4155  		v.Aux = symToAux(sym)
  4156  		v.AddArg2(ptr, mem)
  4157  		return true
  4158  	}
  4159  	// match: (MOVVload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4160  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  4161  	// result: (MOVVload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  4162  	for {
  4163  		off1 := auxIntToInt32(v.AuxInt)
  4164  		sym1 := auxToSym(v.Aux)
  4165  		if v_0.Op != OpLOONG64MOVVaddr {
  4166  			break
  4167  		}
  4168  		off2 := auxIntToInt32(v_0.AuxInt)
  4169  		sym2 := auxToSym(v_0.Aux)
  4170  		ptr := v_0.Args[0]
  4171  		mem := v_1
  4172  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4173  			break
  4174  		}
  4175  		v.reset(OpLOONG64MOVVload)
  4176  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4177  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4178  		v.AddArg2(ptr, mem)
  4179  		return true
  4180  	}
  4181  	// match: (MOVVload [off] {sym} (ADDV ptr idx) mem)
  4182  	// cond: off == 0 && sym == nil
  4183  	// result: (MOVVloadidx ptr idx mem)
  4184  	for {
  4185  		off := auxIntToInt32(v.AuxInt)
  4186  		sym := auxToSym(v.Aux)
  4187  		if v_0.Op != OpLOONG64ADDV {
  4188  			break
  4189  		}
  4190  		idx := v_0.Args[1]
  4191  		ptr := v_0.Args[0]
  4192  		mem := v_1
  4193  		if !(off == 0 && sym == nil) {
  4194  			break
  4195  		}
  4196  		v.reset(OpLOONG64MOVVloadidx)
  4197  		v.AddArg3(ptr, idx, mem)
  4198  		return true
  4199  	}
  4200  	return false
  4201  }
  4202  func rewriteValueLOONG64_OpLOONG64MOVVloadidx(v *Value) bool {
  4203  	v_2 := v.Args[2]
  4204  	v_1 := v.Args[1]
  4205  	v_0 := v.Args[0]
  4206  	// match: (MOVVloadidx ptr (MOVVconst [c]) mem)
  4207  	// cond: is32Bit(c)
  4208  	// result: (MOVVload [int32(c)] ptr mem)
  4209  	for {
  4210  		ptr := v_0
  4211  		if v_1.Op != OpLOONG64MOVVconst {
  4212  			break
  4213  		}
  4214  		c := auxIntToInt64(v_1.AuxInt)
  4215  		mem := v_2
  4216  		if !(is32Bit(c)) {
  4217  			break
  4218  		}
  4219  		v.reset(OpLOONG64MOVVload)
  4220  		v.AuxInt = int32ToAuxInt(int32(c))
  4221  		v.AddArg2(ptr, mem)
  4222  		return true
  4223  	}
  4224  	// match: (MOVVloadidx (MOVVconst [c]) ptr mem)
  4225  	// cond: is32Bit(c)
  4226  	// result: (MOVVload [int32(c)] ptr mem)
  4227  	for {
  4228  		if v_0.Op != OpLOONG64MOVVconst {
  4229  			break
  4230  		}
  4231  		c := auxIntToInt64(v_0.AuxInt)
  4232  		ptr := v_1
  4233  		mem := v_2
  4234  		if !(is32Bit(c)) {
  4235  			break
  4236  		}
  4237  		v.reset(OpLOONG64MOVVload)
  4238  		v.AuxInt = int32ToAuxInt(int32(c))
  4239  		v.AddArg2(ptr, mem)
  4240  		return true
  4241  	}
  4242  	return false
  4243  }
  4244  func rewriteValueLOONG64_OpLOONG64MOVVnop(v *Value) bool {
  4245  	v_0 := v.Args[0]
  4246  	// match: (MOVVnop (MOVVconst [c]))
  4247  	// result: (MOVVconst [c])
  4248  	for {
  4249  		if v_0.Op != OpLOONG64MOVVconst {
  4250  			break
  4251  		}
  4252  		c := auxIntToInt64(v_0.AuxInt)
  4253  		v.reset(OpLOONG64MOVVconst)
  4254  		v.AuxInt = int64ToAuxInt(c)
  4255  		return true
  4256  	}
  4257  	return false
  4258  }
  4259  func rewriteValueLOONG64_OpLOONG64MOVVreg(v *Value) bool {
  4260  	v_0 := v.Args[0]
  4261  	// match: (MOVVreg x)
  4262  	// cond: x.Uses == 1
  4263  	// result: (MOVVnop x)
  4264  	for {
  4265  		x := v_0
  4266  		if !(x.Uses == 1) {
  4267  			break
  4268  		}
  4269  		v.reset(OpLOONG64MOVVnop)
  4270  		v.AddArg(x)
  4271  		return true
  4272  	}
  4273  	// match: (MOVVreg (MOVVconst [c]))
  4274  	// result: (MOVVconst [c])
  4275  	for {
  4276  		if v_0.Op != OpLOONG64MOVVconst {
  4277  			break
  4278  		}
  4279  		c := auxIntToInt64(v_0.AuxInt)
  4280  		v.reset(OpLOONG64MOVVconst)
  4281  		v.AuxInt = int64ToAuxInt(c)
  4282  		return true
  4283  	}
  4284  	return false
  4285  }
  4286  func rewriteValueLOONG64_OpLOONG64MOVVstore(v *Value) bool {
  4287  	v_2 := v.Args[2]
  4288  	v_1 := v.Args[1]
  4289  	v_0 := v.Args[0]
  4290  	b := v.Block
  4291  	config := b.Func.Config
  4292  	// match: (MOVVstore [off] {sym} ptr (MOVVfpgp val) mem)
  4293  	// result: (MOVDstore [off] {sym} ptr val mem)
  4294  	for {
  4295  		off := auxIntToInt32(v.AuxInt)
  4296  		sym := auxToSym(v.Aux)
  4297  		ptr := v_0
  4298  		if v_1.Op != OpLOONG64MOVVfpgp {
  4299  			break
  4300  		}
  4301  		val := v_1.Args[0]
  4302  		mem := v_2
  4303  		v.reset(OpLOONG64MOVDstore)
  4304  		v.AuxInt = int32ToAuxInt(off)
  4305  		v.Aux = symToAux(sym)
  4306  		v.AddArg3(ptr, val, mem)
  4307  		return true
  4308  	}
  4309  	// match: (MOVVstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  4310  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  4311  	// result: (MOVVstore [off1+int32(off2)] {sym} ptr val mem)
  4312  	for {
  4313  		off1 := auxIntToInt32(v.AuxInt)
  4314  		sym := auxToSym(v.Aux)
  4315  		if v_0.Op != OpLOONG64ADDVconst {
  4316  			break
  4317  		}
  4318  		off2 := auxIntToInt64(v_0.AuxInt)
  4319  		ptr := v_0.Args[0]
  4320  		val := v_1
  4321  		mem := v_2
  4322  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4323  			break
  4324  		}
  4325  		v.reset(OpLOONG64MOVVstore)
  4326  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4327  		v.Aux = symToAux(sym)
  4328  		v.AddArg3(ptr, val, mem)
  4329  		return true
  4330  	}
  4331  	// match: (MOVVstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  4332  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  4333  	// result: (MOVVstore [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr val mem)
  4334  	for {
  4335  		off1 := auxIntToInt32(v.AuxInt)
  4336  		sym1 := auxToSym(v.Aux)
  4337  		if v_0.Op != OpLOONG64MOVVaddr {
  4338  			break
  4339  		}
  4340  		off2 := auxIntToInt32(v_0.AuxInt)
  4341  		sym2 := auxToSym(v_0.Aux)
  4342  		ptr := v_0.Args[0]
  4343  		val := v_1
  4344  		mem := v_2
  4345  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4346  			break
  4347  		}
  4348  		v.reset(OpLOONG64MOVVstore)
  4349  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4350  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4351  		v.AddArg3(ptr, val, mem)
  4352  		return true
  4353  	}
  4354  	// match: (MOVVstore [off] {sym} ptr (MOVVconst [0]) mem)
  4355  	// result: (MOVVstorezero [off] {sym} ptr mem)
  4356  	for {
  4357  		off := auxIntToInt32(v.AuxInt)
  4358  		sym := auxToSym(v.Aux)
  4359  		ptr := v_0
  4360  		if v_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
  4361  			break
  4362  		}
  4363  		mem := v_2
  4364  		v.reset(OpLOONG64MOVVstorezero)
  4365  		v.AuxInt = int32ToAuxInt(off)
  4366  		v.Aux = symToAux(sym)
  4367  		v.AddArg2(ptr, mem)
  4368  		return true
  4369  	}
  4370  	// match: (MOVVstore [off] {sym} (ADDV ptr idx) val mem)
  4371  	// cond: off == 0 && sym == nil
  4372  	// result: (MOVVstoreidx ptr idx val mem)
  4373  	for {
  4374  		off := auxIntToInt32(v.AuxInt)
  4375  		sym := auxToSym(v.Aux)
  4376  		if v_0.Op != OpLOONG64ADDV {
  4377  			break
  4378  		}
  4379  		idx := v_0.Args[1]
  4380  		ptr := v_0.Args[0]
  4381  		val := v_1
  4382  		mem := v_2
  4383  		if !(off == 0 && sym == nil) {
  4384  			break
  4385  		}
  4386  		v.reset(OpLOONG64MOVVstoreidx)
  4387  		v.AddArg4(ptr, idx, val, mem)
  4388  		return true
  4389  	}
  4390  	return false
  4391  }
  4392  func rewriteValueLOONG64_OpLOONG64MOVVstoreidx(v *Value) bool {
  4393  	v_3 := v.Args[3]
  4394  	v_2 := v.Args[2]
  4395  	v_1 := v.Args[1]
  4396  	v_0 := v.Args[0]
  4397  	// match: (MOVVstoreidx ptr (MOVVconst [c]) val mem)
  4398  	// cond: is32Bit(c)
  4399  	// result: (MOVVstore [int32(c)] ptr val mem)
  4400  	for {
  4401  		ptr := v_0
  4402  		if v_1.Op != OpLOONG64MOVVconst {
  4403  			break
  4404  		}
  4405  		c := auxIntToInt64(v_1.AuxInt)
  4406  		val := v_2
  4407  		mem := v_3
  4408  		if !(is32Bit(c)) {
  4409  			break
  4410  		}
  4411  		v.reset(OpLOONG64MOVVstore)
  4412  		v.AuxInt = int32ToAuxInt(int32(c))
  4413  		v.AddArg3(ptr, val, mem)
  4414  		return true
  4415  	}
  4416  	// match: (MOVVstoreidx (MOVVconst [c]) idx val mem)
  4417  	// cond: is32Bit(c)
  4418  	// result: (MOVVstore [int32(c)] idx val mem)
  4419  	for {
  4420  		if v_0.Op != OpLOONG64MOVVconst {
  4421  			break
  4422  		}
  4423  		c := auxIntToInt64(v_0.AuxInt)
  4424  		idx := v_1
  4425  		val := v_2
  4426  		mem := v_3
  4427  		if !(is32Bit(c)) {
  4428  			break
  4429  		}
  4430  		v.reset(OpLOONG64MOVVstore)
  4431  		v.AuxInt = int32ToAuxInt(int32(c))
  4432  		v.AddArg3(idx, val, mem)
  4433  		return true
  4434  	}
  4435  	// match: (MOVVstoreidx ptr idx (MOVVconst [0]) mem)
  4436  	// result: (MOVVstorezeroidx ptr idx mem)
  4437  	for {
  4438  		ptr := v_0
  4439  		idx := v_1
  4440  		if v_2.Op != OpLOONG64MOVVconst || auxIntToInt64(v_2.AuxInt) != 0 {
  4441  			break
  4442  		}
  4443  		mem := v_3
  4444  		v.reset(OpLOONG64MOVVstorezeroidx)
  4445  		v.AddArg3(ptr, idx, mem)
  4446  		return true
  4447  	}
  4448  	return false
  4449  }
  4450  func rewriteValueLOONG64_OpLOONG64MOVVstorezero(v *Value) bool {
  4451  	v_1 := v.Args[1]
  4452  	v_0 := v.Args[0]
  4453  	b := v.Block
  4454  	config := b.Func.Config
  4455  	// match: (MOVVstorezero [off1] {sym} (ADDVconst [off2] ptr) mem)
  4456  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  4457  	// result: (MOVVstorezero [off1+int32(off2)] {sym} ptr mem)
  4458  	for {
  4459  		off1 := auxIntToInt32(v.AuxInt)
  4460  		sym := auxToSym(v.Aux)
  4461  		if v_0.Op != OpLOONG64ADDVconst {
  4462  			break
  4463  		}
  4464  		off2 := auxIntToInt64(v_0.AuxInt)
  4465  		ptr := v_0.Args[0]
  4466  		mem := v_1
  4467  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4468  			break
  4469  		}
  4470  		v.reset(OpLOONG64MOVVstorezero)
  4471  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4472  		v.Aux = symToAux(sym)
  4473  		v.AddArg2(ptr, mem)
  4474  		return true
  4475  	}
  4476  	// match: (MOVVstorezero [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4477  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  4478  	// result: (MOVVstorezero [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  4479  	for {
  4480  		off1 := auxIntToInt32(v.AuxInt)
  4481  		sym1 := auxToSym(v.Aux)
  4482  		if v_0.Op != OpLOONG64MOVVaddr {
  4483  			break
  4484  		}
  4485  		off2 := auxIntToInt32(v_0.AuxInt)
  4486  		sym2 := auxToSym(v_0.Aux)
  4487  		ptr := v_0.Args[0]
  4488  		mem := v_1
  4489  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4490  			break
  4491  		}
  4492  		v.reset(OpLOONG64MOVVstorezero)
  4493  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4494  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4495  		v.AddArg2(ptr, mem)
  4496  		return true
  4497  	}
  4498  	// match: (MOVVstorezero [off] {sym} (ADDV ptr idx) mem)
  4499  	// cond: off == 0 && sym == nil
  4500  	// result: (MOVVstorezeroidx ptr idx mem)
  4501  	for {
  4502  		off := auxIntToInt32(v.AuxInt)
  4503  		sym := auxToSym(v.Aux)
  4504  		if v_0.Op != OpLOONG64ADDV {
  4505  			break
  4506  		}
  4507  		idx := v_0.Args[1]
  4508  		ptr := v_0.Args[0]
  4509  		mem := v_1
  4510  		if !(off == 0 && sym == nil) {
  4511  			break
  4512  		}
  4513  		v.reset(OpLOONG64MOVVstorezeroidx)
  4514  		v.AddArg3(ptr, idx, mem)
  4515  		return true
  4516  	}
  4517  	return false
  4518  }
  4519  func rewriteValueLOONG64_OpLOONG64MOVVstorezeroidx(v *Value) bool {
  4520  	v_2 := v.Args[2]
  4521  	v_1 := v.Args[1]
  4522  	v_0 := v.Args[0]
  4523  	// match: (MOVVstorezeroidx ptr (MOVVconst [c]) mem)
  4524  	// cond: is32Bit(c)
  4525  	// result: (MOVVstorezero [int32(c)] ptr mem)
  4526  	for {
  4527  		ptr := v_0
  4528  		if v_1.Op != OpLOONG64MOVVconst {
  4529  			break
  4530  		}
  4531  		c := auxIntToInt64(v_1.AuxInt)
  4532  		mem := v_2
  4533  		if !(is32Bit(c)) {
  4534  			break
  4535  		}
  4536  		v.reset(OpLOONG64MOVVstorezero)
  4537  		v.AuxInt = int32ToAuxInt(int32(c))
  4538  		v.AddArg2(ptr, mem)
  4539  		return true
  4540  	}
  4541  	// match: (MOVVstorezeroidx (MOVVconst [c]) idx mem)
  4542  	// cond: is32Bit(c)
  4543  	// result: (MOVVstorezero [int32(c)] idx mem)
  4544  	for {
  4545  		if v_0.Op != OpLOONG64MOVVconst {
  4546  			break
  4547  		}
  4548  		c := auxIntToInt64(v_0.AuxInt)
  4549  		idx := v_1
  4550  		mem := v_2
  4551  		if !(is32Bit(c)) {
  4552  			break
  4553  		}
  4554  		v.reset(OpLOONG64MOVVstorezero)
  4555  		v.AuxInt = int32ToAuxInt(int32(c))
  4556  		v.AddArg2(idx, mem)
  4557  		return true
  4558  	}
  4559  	return false
  4560  }
  4561  func rewriteValueLOONG64_OpLOONG64MOVWUload(v *Value) bool {
  4562  	v_1 := v.Args[1]
  4563  	v_0 := v.Args[0]
  4564  	b := v.Block
  4565  	config := b.Func.Config
  4566  	typ := &b.Func.Config.Types
  4567  	// match: (MOVWUload [off] {sym} ptr (MOVFstore [off] {sym} ptr val _))
  4568  	// result: (ZeroExt32to64 (MOVWfpgp <typ.Float32> val))
  4569  	for {
  4570  		off := auxIntToInt32(v.AuxInt)
  4571  		sym := auxToSym(v.Aux)
  4572  		ptr := v_0
  4573  		if v_1.Op != OpLOONG64MOVFstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  4574  			break
  4575  		}
  4576  		val := v_1.Args[1]
  4577  		if ptr != v_1.Args[0] {
  4578  			break
  4579  		}
  4580  		v.reset(OpZeroExt32to64)
  4581  		v0 := b.NewValue0(v_1.Pos, OpLOONG64MOVWfpgp, typ.Float32)
  4582  		v0.AddArg(val)
  4583  		v.AddArg(v0)
  4584  		return true
  4585  	}
  4586  	// match: (MOVWUload [off1] {sym} (ADDVconst [off2] ptr) mem)
  4587  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  4588  	// result: (MOVWUload [off1+int32(off2)] {sym} ptr mem)
  4589  	for {
  4590  		off1 := auxIntToInt32(v.AuxInt)
  4591  		sym := auxToSym(v.Aux)
  4592  		if v_0.Op != OpLOONG64ADDVconst {
  4593  			break
  4594  		}
  4595  		off2 := auxIntToInt64(v_0.AuxInt)
  4596  		ptr := v_0.Args[0]
  4597  		mem := v_1
  4598  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4599  			break
  4600  		}
  4601  		v.reset(OpLOONG64MOVWUload)
  4602  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4603  		v.Aux = symToAux(sym)
  4604  		v.AddArg2(ptr, mem)
  4605  		return true
  4606  	}
  4607  	// match: (MOVWUload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4608  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  4609  	// result: (MOVWUload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  4610  	for {
  4611  		off1 := auxIntToInt32(v.AuxInt)
  4612  		sym1 := auxToSym(v.Aux)
  4613  		if v_0.Op != OpLOONG64MOVVaddr {
  4614  			break
  4615  		}
  4616  		off2 := auxIntToInt32(v_0.AuxInt)
  4617  		sym2 := auxToSym(v_0.Aux)
  4618  		ptr := v_0.Args[0]
  4619  		mem := v_1
  4620  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4621  			break
  4622  		}
  4623  		v.reset(OpLOONG64MOVWUload)
  4624  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4625  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4626  		v.AddArg2(ptr, mem)
  4627  		return true
  4628  	}
  4629  	// match: (MOVWUload [off] {sym} (ADDV ptr idx) mem)
  4630  	// cond: off == 0 && sym == nil
  4631  	// result: (MOVWUloadidx ptr idx mem)
  4632  	for {
  4633  		off := auxIntToInt32(v.AuxInt)
  4634  		sym := auxToSym(v.Aux)
  4635  		if v_0.Op != OpLOONG64ADDV {
  4636  			break
  4637  		}
  4638  		idx := v_0.Args[1]
  4639  		ptr := v_0.Args[0]
  4640  		mem := v_1
  4641  		if !(off == 0 && sym == nil) {
  4642  			break
  4643  		}
  4644  		v.reset(OpLOONG64MOVWUloadidx)
  4645  		v.AddArg3(ptr, idx, mem)
  4646  		return true
  4647  	}
  4648  	return false
  4649  }
  4650  func rewriteValueLOONG64_OpLOONG64MOVWUloadidx(v *Value) bool {
  4651  	v_2 := v.Args[2]
  4652  	v_1 := v.Args[1]
  4653  	v_0 := v.Args[0]
  4654  	// match: (MOVWUloadidx ptr (MOVVconst [c]) mem)
  4655  	// cond: is32Bit(c)
  4656  	// result: (MOVWUload [int32(c)] ptr mem)
  4657  	for {
  4658  		ptr := v_0
  4659  		if v_1.Op != OpLOONG64MOVVconst {
  4660  			break
  4661  		}
  4662  		c := auxIntToInt64(v_1.AuxInt)
  4663  		mem := v_2
  4664  		if !(is32Bit(c)) {
  4665  			break
  4666  		}
  4667  		v.reset(OpLOONG64MOVWUload)
  4668  		v.AuxInt = int32ToAuxInt(int32(c))
  4669  		v.AddArg2(ptr, mem)
  4670  		return true
  4671  	}
  4672  	// match: (MOVWUloadidx (MOVVconst [c]) ptr mem)
  4673  	// cond: is32Bit(c)
  4674  	// result: (MOVWUload [int32(c)] ptr mem)
  4675  	for {
  4676  		if v_0.Op != OpLOONG64MOVVconst {
  4677  			break
  4678  		}
  4679  		c := auxIntToInt64(v_0.AuxInt)
  4680  		ptr := v_1
  4681  		mem := v_2
  4682  		if !(is32Bit(c)) {
  4683  			break
  4684  		}
  4685  		v.reset(OpLOONG64MOVWUload)
  4686  		v.AuxInt = int32ToAuxInt(int32(c))
  4687  		v.AddArg2(ptr, mem)
  4688  		return true
  4689  	}
  4690  	return false
  4691  }
  4692  func rewriteValueLOONG64_OpLOONG64MOVWUreg(v *Value) bool {
  4693  	v_0 := v.Args[0]
  4694  	// match: (MOVWUreg (SRLVconst [rc] x))
  4695  	// cond: rc < 32
  4696  	// result: (BSTRPICKV [rc + (31+rc)<<6] x)
  4697  	for {
  4698  		if v_0.Op != OpLOONG64SRLVconst {
  4699  			break
  4700  		}
  4701  		rc := auxIntToInt64(v_0.AuxInt)
  4702  		x := v_0.Args[0]
  4703  		if !(rc < 32) {
  4704  			break
  4705  		}
  4706  		v.reset(OpLOONG64BSTRPICKV)
  4707  		v.AuxInt = int64ToAuxInt(rc + (31+rc)<<6)
  4708  		v.AddArg(x)
  4709  		return true
  4710  	}
  4711  	// match: (MOVWUreg x:(MOVBUload _ _))
  4712  	// result: (MOVVreg x)
  4713  	for {
  4714  		x := v_0
  4715  		if x.Op != OpLOONG64MOVBUload {
  4716  			break
  4717  		}
  4718  		v.reset(OpLOONG64MOVVreg)
  4719  		v.AddArg(x)
  4720  		return true
  4721  	}
  4722  	// match: (MOVWUreg x:(MOVHUload _ _))
  4723  	// result: (MOVVreg x)
  4724  	for {
  4725  		x := v_0
  4726  		if x.Op != OpLOONG64MOVHUload {
  4727  			break
  4728  		}
  4729  		v.reset(OpLOONG64MOVVreg)
  4730  		v.AddArg(x)
  4731  		return true
  4732  	}
  4733  	// match: (MOVWUreg x:(MOVWUload _ _))
  4734  	// result: (MOVVreg x)
  4735  	for {
  4736  		x := v_0
  4737  		if x.Op != OpLOONG64MOVWUload {
  4738  			break
  4739  		}
  4740  		v.reset(OpLOONG64MOVVreg)
  4741  		v.AddArg(x)
  4742  		return true
  4743  	}
  4744  	// match: (MOVWUreg x:(MOVBUreg _))
  4745  	// result: (MOVVreg x)
  4746  	for {
  4747  		x := v_0
  4748  		if x.Op != OpLOONG64MOVBUreg {
  4749  			break
  4750  		}
  4751  		v.reset(OpLOONG64MOVVreg)
  4752  		v.AddArg(x)
  4753  		return true
  4754  	}
  4755  	// match: (MOVWUreg x:(MOVHUreg _))
  4756  	// result: (MOVVreg x)
  4757  	for {
  4758  		x := v_0
  4759  		if x.Op != OpLOONG64MOVHUreg {
  4760  			break
  4761  		}
  4762  		v.reset(OpLOONG64MOVVreg)
  4763  		v.AddArg(x)
  4764  		return true
  4765  	}
  4766  	// match: (MOVWUreg x:(MOVWUreg _))
  4767  	// result: (MOVVreg x)
  4768  	for {
  4769  		x := v_0
  4770  		if x.Op != OpLOONG64MOVWUreg {
  4771  			break
  4772  		}
  4773  		v.reset(OpLOONG64MOVVreg)
  4774  		v.AddArg(x)
  4775  		return true
  4776  	}
  4777  	// match: (MOVWUreg (SLLVconst [lc] x))
  4778  	// cond: lc >= 32
  4779  	// result: (MOVVconst [0])
  4780  	for {
  4781  		if v_0.Op != OpLOONG64SLLVconst {
  4782  			break
  4783  		}
  4784  		lc := auxIntToInt64(v_0.AuxInt)
  4785  		if !(lc >= 32) {
  4786  			break
  4787  		}
  4788  		v.reset(OpLOONG64MOVVconst)
  4789  		v.AuxInt = int64ToAuxInt(0)
  4790  		return true
  4791  	}
  4792  	// match: (MOVWUreg (MOVVconst [c]))
  4793  	// result: (MOVVconst [int64(uint32(c))])
  4794  	for {
  4795  		if v_0.Op != OpLOONG64MOVVconst {
  4796  			break
  4797  		}
  4798  		c := auxIntToInt64(v_0.AuxInt)
  4799  		v.reset(OpLOONG64MOVVconst)
  4800  		v.AuxInt = int64ToAuxInt(int64(uint32(c)))
  4801  		return true
  4802  	}
  4803  	return false
  4804  }
  4805  func rewriteValueLOONG64_OpLOONG64MOVWload(v *Value) bool {
  4806  	v_1 := v.Args[1]
  4807  	v_0 := v.Args[0]
  4808  	b := v.Block
  4809  	config := b.Func.Config
  4810  	// match: (MOVWload [off1] {sym} (ADDVconst [off2] ptr) mem)
  4811  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  4812  	// result: (MOVWload [off1+int32(off2)] {sym} ptr mem)
  4813  	for {
  4814  		off1 := auxIntToInt32(v.AuxInt)
  4815  		sym := auxToSym(v.Aux)
  4816  		if v_0.Op != OpLOONG64ADDVconst {
  4817  			break
  4818  		}
  4819  		off2 := auxIntToInt64(v_0.AuxInt)
  4820  		ptr := v_0.Args[0]
  4821  		mem := v_1
  4822  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4823  			break
  4824  		}
  4825  		v.reset(OpLOONG64MOVWload)
  4826  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4827  		v.Aux = symToAux(sym)
  4828  		v.AddArg2(ptr, mem)
  4829  		return true
  4830  	}
  4831  	// match: (MOVWload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4832  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  4833  	// result: (MOVWload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  4834  	for {
  4835  		off1 := auxIntToInt32(v.AuxInt)
  4836  		sym1 := auxToSym(v.Aux)
  4837  		if v_0.Op != OpLOONG64MOVVaddr {
  4838  			break
  4839  		}
  4840  		off2 := auxIntToInt32(v_0.AuxInt)
  4841  		sym2 := auxToSym(v_0.Aux)
  4842  		ptr := v_0.Args[0]
  4843  		mem := v_1
  4844  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4845  			break
  4846  		}
  4847  		v.reset(OpLOONG64MOVWload)
  4848  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4849  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4850  		v.AddArg2(ptr, mem)
  4851  		return true
  4852  	}
  4853  	// match: (MOVWload [off] {sym} (ADDV ptr idx) mem)
  4854  	// cond: off == 0 && sym == nil
  4855  	// result: (MOVWloadidx ptr idx mem)
  4856  	for {
  4857  		off := auxIntToInt32(v.AuxInt)
  4858  		sym := auxToSym(v.Aux)
  4859  		if v_0.Op != OpLOONG64ADDV {
  4860  			break
  4861  		}
  4862  		idx := v_0.Args[1]
  4863  		ptr := v_0.Args[0]
  4864  		mem := v_1
  4865  		if !(off == 0 && sym == nil) {
  4866  			break
  4867  		}
  4868  		v.reset(OpLOONG64MOVWloadidx)
  4869  		v.AddArg3(ptr, idx, mem)
  4870  		return true
  4871  	}
  4872  	return false
  4873  }
  4874  func rewriteValueLOONG64_OpLOONG64MOVWloadidx(v *Value) bool {
  4875  	v_2 := v.Args[2]
  4876  	v_1 := v.Args[1]
  4877  	v_0 := v.Args[0]
  4878  	// match: (MOVWloadidx ptr (MOVVconst [c]) mem)
  4879  	// cond: is32Bit(c)
  4880  	// result: (MOVWload [int32(c)] ptr mem)
  4881  	for {
  4882  		ptr := v_0
  4883  		if v_1.Op != OpLOONG64MOVVconst {
  4884  			break
  4885  		}
  4886  		c := auxIntToInt64(v_1.AuxInt)
  4887  		mem := v_2
  4888  		if !(is32Bit(c)) {
  4889  			break
  4890  		}
  4891  		v.reset(OpLOONG64MOVWload)
  4892  		v.AuxInt = int32ToAuxInt(int32(c))
  4893  		v.AddArg2(ptr, mem)
  4894  		return true
  4895  	}
  4896  	// match: (MOVWloadidx (MOVVconst [c]) ptr mem)
  4897  	// cond: is32Bit(c)
  4898  	// result: (MOVWload [int32(c)] ptr mem)
  4899  	for {
  4900  		if v_0.Op != OpLOONG64MOVVconst {
  4901  			break
  4902  		}
  4903  		c := auxIntToInt64(v_0.AuxInt)
  4904  		ptr := v_1
  4905  		mem := v_2
  4906  		if !(is32Bit(c)) {
  4907  			break
  4908  		}
  4909  		v.reset(OpLOONG64MOVWload)
  4910  		v.AuxInt = int32ToAuxInt(int32(c))
  4911  		v.AddArg2(ptr, mem)
  4912  		return true
  4913  	}
  4914  	return false
  4915  }
  4916  func rewriteValueLOONG64_OpLOONG64MOVWreg(v *Value) bool {
  4917  	v_0 := v.Args[0]
  4918  	// match: (MOVWreg x:(MOVBload _ _))
  4919  	// result: (MOVVreg x)
  4920  	for {
  4921  		x := v_0
  4922  		if x.Op != OpLOONG64MOVBload {
  4923  			break
  4924  		}
  4925  		v.reset(OpLOONG64MOVVreg)
  4926  		v.AddArg(x)
  4927  		return true
  4928  	}
  4929  	// match: (MOVWreg x:(MOVBUload _ _))
  4930  	// result: (MOVVreg x)
  4931  	for {
  4932  		x := v_0
  4933  		if x.Op != OpLOONG64MOVBUload {
  4934  			break
  4935  		}
  4936  		v.reset(OpLOONG64MOVVreg)
  4937  		v.AddArg(x)
  4938  		return true
  4939  	}
  4940  	// match: (MOVWreg x:(MOVHload _ _))
  4941  	// result: (MOVVreg x)
  4942  	for {
  4943  		x := v_0
  4944  		if x.Op != OpLOONG64MOVHload {
  4945  			break
  4946  		}
  4947  		v.reset(OpLOONG64MOVVreg)
  4948  		v.AddArg(x)
  4949  		return true
  4950  	}
  4951  	// match: (MOVWreg x:(MOVHUload _ _))
  4952  	// result: (MOVVreg x)
  4953  	for {
  4954  		x := v_0
  4955  		if x.Op != OpLOONG64MOVHUload {
  4956  			break
  4957  		}
  4958  		v.reset(OpLOONG64MOVVreg)
  4959  		v.AddArg(x)
  4960  		return true
  4961  	}
  4962  	// match: (MOVWreg x:(MOVWload _ _))
  4963  	// result: (MOVVreg x)
  4964  	for {
  4965  		x := v_0
  4966  		if x.Op != OpLOONG64MOVWload {
  4967  			break
  4968  		}
  4969  		v.reset(OpLOONG64MOVVreg)
  4970  		v.AddArg(x)
  4971  		return true
  4972  	}
  4973  	// match: (MOVWreg x:(MOVBreg _))
  4974  	// result: (MOVVreg x)
  4975  	for {
  4976  		x := v_0
  4977  		if x.Op != OpLOONG64MOVBreg {
  4978  			break
  4979  		}
  4980  		v.reset(OpLOONG64MOVVreg)
  4981  		v.AddArg(x)
  4982  		return true
  4983  	}
  4984  	// match: (MOVWreg x:(MOVBUreg _))
  4985  	// result: (MOVVreg x)
  4986  	for {
  4987  		x := v_0
  4988  		if x.Op != OpLOONG64MOVBUreg {
  4989  			break
  4990  		}
  4991  		v.reset(OpLOONG64MOVVreg)
  4992  		v.AddArg(x)
  4993  		return true
  4994  	}
  4995  	// match: (MOVWreg x:(MOVHreg _))
  4996  	// result: (MOVVreg x)
  4997  	for {
  4998  		x := v_0
  4999  		if x.Op != OpLOONG64MOVHreg {
  5000  			break
  5001  		}
  5002  		v.reset(OpLOONG64MOVVreg)
  5003  		v.AddArg(x)
  5004  		return true
  5005  	}
  5006  	// match: (MOVWreg x:(MOVWreg _))
  5007  	// result: (MOVVreg x)
  5008  	for {
  5009  		x := v_0
  5010  		if x.Op != OpLOONG64MOVWreg {
  5011  			break
  5012  		}
  5013  		v.reset(OpLOONG64MOVVreg)
  5014  		v.AddArg(x)
  5015  		return true
  5016  	}
  5017  	// match: (MOVWreg (MOVVconst [c]))
  5018  	// result: (MOVVconst [int64(int32(c))])
  5019  	for {
  5020  		if v_0.Op != OpLOONG64MOVVconst {
  5021  			break
  5022  		}
  5023  		c := auxIntToInt64(v_0.AuxInt)
  5024  		v.reset(OpLOONG64MOVVconst)
  5025  		v.AuxInt = int64ToAuxInt(int64(int32(c)))
  5026  		return true
  5027  	}
  5028  	return false
  5029  }
  5030  func rewriteValueLOONG64_OpLOONG64MOVWstore(v *Value) bool {
  5031  	v_2 := v.Args[2]
  5032  	v_1 := v.Args[1]
  5033  	v_0 := v.Args[0]
  5034  	b := v.Block
  5035  	config := b.Func.Config
  5036  	// match: (MOVWstore [off] {sym} ptr (MOVWfpgp val) mem)
  5037  	// result: (MOVFstore [off] {sym} ptr val mem)
  5038  	for {
  5039  		off := auxIntToInt32(v.AuxInt)
  5040  		sym := auxToSym(v.Aux)
  5041  		ptr := v_0
  5042  		if v_1.Op != OpLOONG64MOVWfpgp {
  5043  			break
  5044  		}
  5045  		val := v_1.Args[0]
  5046  		mem := v_2
  5047  		v.reset(OpLOONG64MOVFstore)
  5048  		v.AuxInt = int32ToAuxInt(off)
  5049  		v.Aux = symToAux(sym)
  5050  		v.AddArg3(ptr, val, mem)
  5051  		return true
  5052  	}
  5053  	// match: (MOVWstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  5054  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  5055  	// result: (MOVWstore [off1+int32(off2)] {sym} ptr val mem)
  5056  	for {
  5057  		off1 := auxIntToInt32(v.AuxInt)
  5058  		sym := auxToSym(v.Aux)
  5059  		if v_0.Op != OpLOONG64ADDVconst {
  5060  			break
  5061  		}
  5062  		off2 := auxIntToInt64(v_0.AuxInt)
  5063  		ptr := v_0.Args[0]
  5064  		val := v_1
  5065  		mem := v_2
  5066  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  5067  			break
  5068  		}
  5069  		v.reset(OpLOONG64MOVWstore)
  5070  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5071  		v.Aux = symToAux(sym)
  5072  		v.AddArg3(ptr, val, mem)
  5073  		return true
  5074  	}
  5075  	// match: (MOVWstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  5076  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  5077  	// result: (MOVWstore [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr val mem)
  5078  	for {
  5079  		off1 := auxIntToInt32(v.AuxInt)
  5080  		sym1 := auxToSym(v.Aux)
  5081  		if v_0.Op != OpLOONG64MOVVaddr {
  5082  			break
  5083  		}
  5084  		off2 := auxIntToInt32(v_0.AuxInt)
  5085  		sym2 := auxToSym(v_0.Aux)
  5086  		ptr := v_0.Args[0]
  5087  		val := v_1
  5088  		mem := v_2
  5089  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  5090  			break
  5091  		}
  5092  		v.reset(OpLOONG64MOVWstore)
  5093  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5094  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5095  		v.AddArg3(ptr, val, mem)
  5096  		return true
  5097  	}
  5098  	// match: (MOVWstore [off] {sym} ptr (MOVWreg x) mem)
  5099  	// result: (MOVWstore [off] {sym} ptr x mem)
  5100  	for {
  5101  		off := auxIntToInt32(v.AuxInt)
  5102  		sym := auxToSym(v.Aux)
  5103  		ptr := v_0
  5104  		if v_1.Op != OpLOONG64MOVWreg {
  5105  			break
  5106  		}
  5107  		x := v_1.Args[0]
  5108  		mem := v_2
  5109  		v.reset(OpLOONG64MOVWstore)
  5110  		v.AuxInt = int32ToAuxInt(off)
  5111  		v.Aux = symToAux(sym)
  5112  		v.AddArg3(ptr, x, mem)
  5113  		return true
  5114  	}
  5115  	// match: (MOVWstore [off] {sym} ptr (MOVWUreg x) mem)
  5116  	// result: (MOVWstore [off] {sym} ptr x mem)
  5117  	for {
  5118  		off := auxIntToInt32(v.AuxInt)
  5119  		sym := auxToSym(v.Aux)
  5120  		ptr := v_0
  5121  		if v_1.Op != OpLOONG64MOVWUreg {
  5122  			break
  5123  		}
  5124  		x := v_1.Args[0]
  5125  		mem := v_2
  5126  		v.reset(OpLOONG64MOVWstore)
  5127  		v.AuxInt = int32ToAuxInt(off)
  5128  		v.Aux = symToAux(sym)
  5129  		v.AddArg3(ptr, x, mem)
  5130  		return true
  5131  	}
  5132  	// match: (MOVWstore [off] {sym} ptr (MOVVconst [0]) mem)
  5133  	// result: (MOVWstorezero [off] {sym} ptr mem)
  5134  	for {
  5135  		off := auxIntToInt32(v.AuxInt)
  5136  		sym := auxToSym(v.Aux)
  5137  		ptr := v_0
  5138  		if v_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
  5139  			break
  5140  		}
  5141  		mem := v_2
  5142  		v.reset(OpLOONG64MOVWstorezero)
  5143  		v.AuxInt = int32ToAuxInt(off)
  5144  		v.Aux = symToAux(sym)
  5145  		v.AddArg2(ptr, mem)
  5146  		return true
  5147  	}
  5148  	// match: (MOVWstore [off] {sym} (ADDV ptr idx) val mem)
  5149  	// cond: off == 0 && sym == nil
  5150  	// result: (MOVWstoreidx ptr idx val mem)
  5151  	for {
  5152  		off := auxIntToInt32(v.AuxInt)
  5153  		sym := auxToSym(v.Aux)
  5154  		if v_0.Op != OpLOONG64ADDV {
  5155  			break
  5156  		}
  5157  		idx := v_0.Args[1]
  5158  		ptr := v_0.Args[0]
  5159  		val := v_1
  5160  		mem := v_2
  5161  		if !(off == 0 && sym == nil) {
  5162  			break
  5163  		}
  5164  		v.reset(OpLOONG64MOVWstoreidx)
  5165  		v.AddArg4(ptr, idx, val, mem)
  5166  		return true
  5167  	}
  5168  	return false
  5169  }
  5170  func rewriteValueLOONG64_OpLOONG64MOVWstoreidx(v *Value) bool {
  5171  	v_3 := v.Args[3]
  5172  	v_2 := v.Args[2]
  5173  	v_1 := v.Args[1]
  5174  	v_0 := v.Args[0]
  5175  	// match: (MOVWstoreidx ptr (MOVVconst [c]) val mem)
  5176  	// cond: is32Bit(c)
  5177  	// result: (MOVWstore [int32(c)] ptr val mem)
  5178  	for {
  5179  		ptr := v_0
  5180  		if v_1.Op != OpLOONG64MOVVconst {
  5181  			break
  5182  		}
  5183  		c := auxIntToInt64(v_1.AuxInt)
  5184  		val := v_2
  5185  		mem := v_3
  5186  		if !(is32Bit(c)) {
  5187  			break
  5188  		}
  5189  		v.reset(OpLOONG64MOVWstore)
  5190  		v.AuxInt = int32ToAuxInt(int32(c))
  5191  		v.AddArg3(ptr, val, mem)
  5192  		return true
  5193  	}
  5194  	// match: (MOVWstoreidx (MOVVconst [c]) idx val mem)
  5195  	// cond: is32Bit(c)
  5196  	// result: (MOVWstore [int32(c)] idx val mem)
  5197  	for {
  5198  		if v_0.Op != OpLOONG64MOVVconst {
  5199  			break
  5200  		}
  5201  		c := auxIntToInt64(v_0.AuxInt)
  5202  		idx := v_1
  5203  		val := v_2
  5204  		mem := v_3
  5205  		if !(is32Bit(c)) {
  5206  			break
  5207  		}
  5208  		v.reset(OpLOONG64MOVWstore)
  5209  		v.AuxInt = int32ToAuxInt(int32(c))
  5210  		v.AddArg3(idx, val, mem)
  5211  		return true
  5212  	}
  5213  	// match: (MOVWstoreidx ptr idx (MOVVconst [0]) mem)
  5214  	// result: (MOVWstorezeroidx ptr idx mem)
  5215  	for {
  5216  		ptr := v_0
  5217  		idx := v_1
  5218  		if v_2.Op != OpLOONG64MOVVconst || auxIntToInt64(v_2.AuxInt) != 0 {
  5219  			break
  5220  		}
  5221  		mem := v_3
  5222  		v.reset(OpLOONG64MOVWstorezeroidx)
  5223  		v.AddArg3(ptr, idx, mem)
  5224  		return true
  5225  	}
  5226  	return false
  5227  }
  5228  func rewriteValueLOONG64_OpLOONG64MOVWstorezero(v *Value) bool {
  5229  	v_1 := v.Args[1]
  5230  	v_0 := v.Args[0]
  5231  	b := v.Block
  5232  	config := b.Func.Config
  5233  	// match: (MOVWstorezero [off1] {sym} (ADDVconst [off2] ptr) mem)
  5234  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  5235  	// result: (MOVWstorezero [off1+int32(off2)] {sym} ptr mem)
  5236  	for {
  5237  		off1 := auxIntToInt32(v.AuxInt)
  5238  		sym := auxToSym(v.Aux)
  5239  		if v_0.Op != OpLOONG64ADDVconst {
  5240  			break
  5241  		}
  5242  		off2 := auxIntToInt64(v_0.AuxInt)
  5243  		ptr := v_0.Args[0]
  5244  		mem := v_1
  5245  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  5246  			break
  5247  		}
  5248  		v.reset(OpLOONG64MOVWstorezero)
  5249  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5250  		v.Aux = symToAux(sym)
  5251  		v.AddArg2(ptr, mem)
  5252  		return true
  5253  	}
  5254  	// match: (MOVWstorezero [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  5255  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  5256  	// result: (MOVWstorezero [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  5257  	for {
  5258  		off1 := auxIntToInt32(v.AuxInt)
  5259  		sym1 := auxToSym(v.Aux)
  5260  		if v_0.Op != OpLOONG64MOVVaddr {
  5261  			break
  5262  		}
  5263  		off2 := auxIntToInt32(v_0.AuxInt)
  5264  		sym2 := auxToSym(v_0.Aux)
  5265  		ptr := v_0.Args[0]
  5266  		mem := v_1
  5267  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  5268  			break
  5269  		}
  5270  		v.reset(OpLOONG64MOVWstorezero)
  5271  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5272  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5273  		v.AddArg2(ptr, mem)
  5274  		return true
  5275  	}
  5276  	// match: (MOVWstorezero [off] {sym} (ADDV ptr idx) mem)
  5277  	// cond: off == 0 && sym == nil
  5278  	// result: (MOVWstorezeroidx ptr idx mem)
  5279  	for {
  5280  		off := auxIntToInt32(v.AuxInt)
  5281  		sym := auxToSym(v.Aux)
  5282  		if v_0.Op != OpLOONG64ADDV {
  5283  			break
  5284  		}
  5285  		idx := v_0.Args[1]
  5286  		ptr := v_0.Args[0]
  5287  		mem := v_1
  5288  		if !(off == 0 && sym == nil) {
  5289  			break
  5290  		}
  5291  		v.reset(OpLOONG64MOVWstorezeroidx)
  5292  		v.AddArg3(ptr, idx, mem)
  5293  		return true
  5294  	}
  5295  	return false
  5296  }
  5297  func rewriteValueLOONG64_OpLOONG64MOVWstorezeroidx(v *Value) bool {
  5298  	v_2 := v.Args[2]
  5299  	v_1 := v.Args[1]
  5300  	v_0 := v.Args[0]
  5301  	// match: (MOVWstorezeroidx ptr (MOVVconst [c]) mem)
  5302  	// cond: is32Bit(c)
  5303  	// result: (MOVWstorezero [int32(c)] ptr mem)
  5304  	for {
  5305  		ptr := v_0
  5306  		if v_1.Op != OpLOONG64MOVVconst {
  5307  			break
  5308  		}
  5309  		c := auxIntToInt64(v_1.AuxInt)
  5310  		mem := v_2
  5311  		if !(is32Bit(c)) {
  5312  			break
  5313  		}
  5314  		v.reset(OpLOONG64MOVWstorezero)
  5315  		v.AuxInt = int32ToAuxInt(int32(c))
  5316  		v.AddArg2(ptr, mem)
  5317  		return true
  5318  	}
  5319  	// match: (MOVWstorezeroidx (MOVVconst [c]) idx mem)
  5320  	// cond: is32Bit(c)
  5321  	// result: (MOVWstorezero [int32(c)] idx mem)
  5322  	for {
  5323  		if v_0.Op != OpLOONG64MOVVconst {
  5324  			break
  5325  		}
  5326  		c := auxIntToInt64(v_0.AuxInt)
  5327  		idx := v_1
  5328  		mem := v_2
  5329  		if !(is32Bit(c)) {
  5330  			break
  5331  		}
  5332  		v.reset(OpLOONG64MOVWstorezero)
  5333  		v.AuxInt = int32ToAuxInt(int32(c))
  5334  		v.AddArg2(idx, mem)
  5335  		return true
  5336  	}
  5337  	return false
  5338  }
  5339  func rewriteValueLOONG64_OpLOONG64MULV(v *Value) bool {
  5340  	v_1 := v.Args[1]
  5341  	v_0 := v.Args[0]
  5342  	// match: (MULV x (MOVVconst [-1]))
  5343  	// result: (NEGV x)
  5344  	for {
  5345  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  5346  			x := v_0
  5347  			if v_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_1.AuxInt) != -1 {
  5348  				continue
  5349  			}
  5350  			v.reset(OpLOONG64NEGV)
  5351  			v.AddArg(x)
  5352  			return true
  5353  		}
  5354  		break
  5355  	}
  5356  	// match: (MULV _ (MOVVconst [0]))
  5357  	// result: (MOVVconst [0])
  5358  	for {
  5359  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  5360  			if v_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
  5361  				continue
  5362  			}
  5363  			v.reset(OpLOONG64MOVVconst)
  5364  			v.AuxInt = int64ToAuxInt(0)
  5365  			return true
  5366  		}
  5367  		break
  5368  	}
  5369  	// match: (MULV x (MOVVconst [1]))
  5370  	// result: x
  5371  	for {
  5372  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  5373  			x := v_0
  5374  			if v_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_1.AuxInt) != 1 {
  5375  				continue
  5376  			}
  5377  			v.copyOf(x)
  5378  			return true
  5379  		}
  5380  		break
  5381  	}
  5382  	// match: (MULV x (MOVVconst [c]))
  5383  	// cond: isPowerOfTwo(c)
  5384  	// result: (SLLVconst [log64(c)] x)
  5385  	for {
  5386  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  5387  			x := v_0
  5388  			if v_1.Op != OpLOONG64MOVVconst {
  5389  				continue
  5390  			}
  5391  			c := auxIntToInt64(v_1.AuxInt)
  5392  			if !(isPowerOfTwo(c)) {
  5393  				continue
  5394  			}
  5395  			v.reset(OpLOONG64SLLVconst)
  5396  			v.AuxInt = int64ToAuxInt(log64(c))
  5397  			v.AddArg(x)
  5398  			return true
  5399  		}
  5400  		break
  5401  	}
  5402  	// match: (MULV (MOVVconst [c]) (MOVVconst [d]))
  5403  	// result: (MOVVconst [c*d])
  5404  	for {
  5405  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  5406  			if v_0.Op != OpLOONG64MOVVconst {
  5407  				continue
  5408  			}
  5409  			c := auxIntToInt64(v_0.AuxInt)
  5410  			if v_1.Op != OpLOONG64MOVVconst {
  5411  				continue
  5412  			}
  5413  			d := auxIntToInt64(v_1.AuxInt)
  5414  			v.reset(OpLOONG64MOVVconst)
  5415  			v.AuxInt = int64ToAuxInt(c * d)
  5416  			return true
  5417  		}
  5418  		break
  5419  	}
  5420  	return false
  5421  }
  5422  func rewriteValueLOONG64_OpLOONG64NEGV(v *Value) bool {
  5423  	v_0 := v.Args[0]
  5424  	// match: (NEGV (MOVVconst [c]))
  5425  	// result: (MOVVconst [-c])
  5426  	for {
  5427  		if v_0.Op != OpLOONG64MOVVconst {
  5428  			break
  5429  		}
  5430  		c := auxIntToInt64(v_0.AuxInt)
  5431  		v.reset(OpLOONG64MOVVconst)
  5432  		v.AuxInt = int64ToAuxInt(-c)
  5433  		return true
  5434  	}
  5435  	return false
  5436  }
  5437  func rewriteValueLOONG64_OpLOONG64NOR(v *Value) bool {
  5438  	v_1 := v.Args[1]
  5439  	v_0 := v.Args[0]
  5440  	// match: (NOR x (MOVVconst [c]))
  5441  	// cond: is32Bit(c)
  5442  	// result: (NORconst [c] x)
  5443  	for {
  5444  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  5445  			x := v_0
  5446  			if v_1.Op != OpLOONG64MOVVconst {
  5447  				continue
  5448  			}
  5449  			c := auxIntToInt64(v_1.AuxInt)
  5450  			if !(is32Bit(c)) {
  5451  				continue
  5452  			}
  5453  			v.reset(OpLOONG64NORconst)
  5454  			v.AuxInt = int64ToAuxInt(c)
  5455  			v.AddArg(x)
  5456  			return true
  5457  		}
  5458  		break
  5459  	}
  5460  	return false
  5461  }
  5462  func rewriteValueLOONG64_OpLOONG64NORconst(v *Value) bool {
  5463  	v_0 := v.Args[0]
  5464  	// match: (NORconst [c] (MOVVconst [d]))
  5465  	// result: (MOVVconst [^(c|d)])
  5466  	for {
  5467  		c := auxIntToInt64(v.AuxInt)
  5468  		if v_0.Op != OpLOONG64MOVVconst {
  5469  			break
  5470  		}
  5471  		d := auxIntToInt64(v_0.AuxInt)
  5472  		v.reset(OpLOONG64MOVVconst)
  5473  		v.AuxInt = int64ToAuxInt(^(c | d))
  5474  		return true
  5475  	}
  5476  	return false
  5477  }
  5478  func rewriteValueLOONG64_OpLOONG64OR(v *Value) bool {
  5479  	v_1 := v.Args[1]
  5480  	v_0 := v.Args[0]
  5481  	// match: (OR x (MOVVconst [c]))
  5482  	// cond: is32Bit(c)
  5483  	// result: (ORconst [c] x)
  5484  	for {
  5485  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  5486  			x := v_0
  5487  			if v_1.Op != OpLOONG64MOVVconst {
  5488  				continue
  5489  			}
  5490  			c := auxIntToInt64(v_1.AuxInt)
  5491  			if !(is32Bit(c)) {
  5492  				continue
  5493  			}
  5494  			v.reset(OpLOONG64ORconst)
  5495  			v.AuxInt = int64ToAuxInt(c)
  5496  			v.AddArg(x)
  5497  			return true
  5498  		}
  5499  		break
  5500  	}
  5501  	// match: (OR x x)
  5502  	// result: x
  5503  	for {
  5504  		x := v_0
  5505  		if x != v_1 {
  5506  			break
  5507  		}
  5508  		v.copyOf(x)
  5509  		return true
  5510  	}
  5511  	return false
  5512  }
  5513  func rewriteValueLOONG64_OpLOONG64ORconst(v *Value) bool {
  5514  	v_0 := v.Args[0]
  5515  	// match: (ORconst [0] x)
  5516  	// result: x
  5517  	for {
  5518  		if auxIntToInt64(v.AuxInt) != 0 {
  5519  			break
  5520  		}
  5521  		x := v_0
  5522  		v.copyOf(x)
  5523  		return true
  5524  	}
  5525  	// match: (ORconst [-1] _)
  5526  	// result: (MOVVconst [-1])
  5527  	for {
  5528  		if auxIntToInt64(v.AuxInt) != -1 {
  5529  			break
  5530  		}
  5531  		v.reset(OpLOONG64MOVVconst)
  5532  		v.AuxInt = int64ToAuxInt(-1)
  5533  		return true
  5534  	}
  5535  	// match: (ORconst [c] (MOVVconst [d]))
  5536  	// result: (MOVVconst [c|d])
  5537  	for {
  5538  		c := auxIntToInt64(v.AuxInt)
  5539  		if v_0.Op != OpLOONG64MOVVconst {
  5540  			break
  5541  		}
  5542  		d := auxIntToInt64(v_0.AuxInt)
  5543  		v.reset(OpLOONG64MOVVconst)
  5544  		v.AuxInt = int64ToAuxInt(c | d)
  5545  		return true
  5546  	}
  5547  	// match: (ORconst [c] (ORconst [d] x))
  5548  	// cond: is32Bit(c|d)
  5549  	// result: (ORconst [c|d] x)
  5550  	for {
  5551  		c := auxIntToInt64(v.AuxInt)
  5552  		if v_0.Op != OpLOONG64ORconst {
  5553  			break
  5554  		}
  5555  		d := auxIntToInt64(v_0.AuxInt)
  5556  		x := v_0.Args[0]
  5557  		if !(is32Bit(c | d)) {
  5558  			break
  5559  		}
  5560  		v.reset(OpLOONG64ORconst)
  5561  		v.AuxInt = int64ToAuxInt(c | d)
  5562  		v.AddArg(x)
  5563  		return true
  5564  	}
  5565  	return false
  5566  }
  5567  func rewriteValueLOONG64_OpLOONG64REMV(v *Value) bool {
  5568  	v_1 := v.Args[1]
  5569  	v_0 := v.Args[0]
  5570  	// match: (REMV (MOVVconst [c]) (MOVVconst [d]))
  5571  	// cond: d != 0
  5572  	// result: (MOVVconst [c%d])
  5573  	for {
  5574  		if v_0.Op != OpLOONG64MOVVconst {
  5575  			break
  5576  		}
  5577  		c := auxIntToInt64(v_0.AuxInt)
  5578  		if v_1.Op != OpLOONG64MOVVconst {
  5579  			break
  5580  		}
  5581  		d := auxIntToInt64(v_1.AuxInt)
  5582  		if !(d != 0) {
  5583  			break
  5584  		}
  5585  		v.reset(OpLOONG64MOVVconst)
  5586  		v.AuxInt = int64ToAuxInt(c % d)
  5587  		return true
  5588  	}
  5589  	return false
  5590  }
  5591  func rewriteValueLOONG64_OpLOONG64REMVU(v *Value) bool {
  5592  	v_1 := v.Args[1]
  5593  	v_0 := v.Args[0]
  5594  	// match: (REMVU _ (MOVVconst [1]))
  5595  	// result: (MOVVconst [0])
  5596  	for {
  5597  		if v_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_1.AuxInt) != 1 {
  5598  			break
  5599  		}
  5600  		v.reset(OpLOONG64MOVVconst)
  5601  		v.AuxInt = int64ToAuxInt(0)
  5602  		return true
  5603  	}
  5604  	// match: (REMVU x (MOVVconst [c]))
  5605  	// cond: isPowerOfTwo(c)
  5606  	// result: (ANDconst [c-1] x)
  5607  	for {
  5608  		x := v_0
  5609  		if v_1.Op != OpLOONG64MOVVconst {
  5610  			break
  5611  		}
  5612  		c := auxIntToInt64(v_1.AuxInt)
  5613  		if !(isPowerOfTwo(c)) {
  5614  			break
  5615  		}
  5616  		v.reset(OpLOONG64ANDconst)
  5617  		v.AuxInt = int64ToAuxInt(c - 1)
  5618  		v.AddArg(x)
  5619  		return true
  5620  	}
  5621  	// match: (REMVU (MOVVconst [c]) (MOVVconst [d]))
  5622  	// cond: d != 0
  5623  	// result: (MOVVconst [int64(uint64(c)%uint64(d))])
  5624  	for {
  5625  		if v_0.Op != OpLOONG64MOVVconst {
  5626  			break
  5627  		}
  5628  		c := auxIntToInt64(v_0.AuxInt)
  5629  		if v_1.Op != OpLOONG64MOVVconst {
  5630  			break
  5631  		}
  5632  		d := auxIntToInt64(v_1.AuxInt)
  5633  		if !(d != 0) {
  5634  			break
  5635  		}
  5636  		v.reset(OpLOONG64MOVVconst)
  5637  		v.AuxInt = int64ToAuxInt(int64(uint64(c) % uint64(d)))
  5638  		return true
  5639  	}
  5640  	return false
  5641  }
  5642  func rewriteValueLOONG64_OpLOONG64ROTR(v *Value) bool {
  5643  	v_1 := v.Args[1]
  5644  	v_0 := v.Args[0]
  5645  	// match: (ROTR x (MOVVconst [c]))
  5646  	// result: (ROTRconst x [c&31])
  5647  	for {
  5648  		x := v_0
  5649  		if v_1.Op != OpLOONG64MOVVconst {
  5650  			break
  5651  		}
  5652  		c := auxIntToInt64(v_1.AuxInt)
  5653  		v.reset(OpLOONG64ROTRconst)
  5654  		v.AuxInt = int64ToAuxInt(c & 31)
  5655  		v.AddArg(x)
  5656  		return true
  5657  	}
  5658  	return false
  5659  }
  5660  func rewriteValueLOONG64_OpLOONG64ROTRV(v *Value) bool {
  5661  	v_1 := v.Args[1]
  5662  	v_0 := v.Args[0]
  5663  	// match: (ROTRV x (MOVVconst [c]))
  5664  	// result: (ROTRVconst x [c&63])
  5665  	for {
  5666  		x := v_0
  5667  		if v_1.Op != OpLOONG64MOVVconst {
  5668  			break
  5669  		}
  5670  		c := auxIntToInt64(v_1.AuxInt)
  5671  		v.reset(OpLOONG64ROTRVconst)
  5672  		v.AuxInt = int64ToAuxInt(c & 63)
  5673  		v.AddArg(x)
  5674  		return true
  5675  	}
  5676  	return false
  5677  }
  5678  func rewriteValueLOONG64_OpLOONG64SGT(v *Value) bool {
  5679  	v_1 := v.Args[1]
  5680  	v_0 := v.Args[0]
  5681  	b := v.Block
  5682  	typ := &b.Func.Config.Types
  5683  	// match: (SGT (MOVVconst [c]) (NEGV (SUBVconst [d] x)))
  5684  	// cond: is32Bit(d-c)
  5685  	// result: (SGT x (MOVVconst [d-c]))
  5686  	for {
  5687  		if v_0.Op != OpLOONG64MOVVconst {
  5688  			break
  5689  		}
  5690  		c := auxIntToInt64(v_0.AuxInt)
  5691  		if v_1.Op != OpLOONG64NEGV {
  5692  			break
  5693  		}
  5694  		v_1_0 := v_1.Args[0]
  5695  		if v_1_0.Op != OpLOONG64SUBVconst {
  5696  			break
  5697  		}
  5698  		d := auxIntToInt64(v_1_0.AuxInt)
  5699  		x := v_1_0.Args[0]
  5700  		if !(is32Bit(d - c)) {
  5701  			break
  5702  		}
  5703  		v.reset(OpLOONG64SGT)
  5704  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  5705  		v0.AuxInt = int64ToAuxInt(d - c)
  5706  		v.AddArg2(x, v0)
  5707  		return true
  5708  	}
  5709  	// match: (SGT (MOVVconst [c]) x)
  5710  	// cond: is32Bit(c)
  5711  	// result: (SGTconst [c] x)
  5712  	for {
  5713  		if v_0.Op != OpLOONG64MOVVconst {
  5714  			break
  5715  		}
  5716  		c := auxIntToInt64(v_0.AuxInt)
  5717  		x := v_1
  5718  		if !(is32Bit(c)) {
  5719  			break
  5720  		}
  5721  		v.reset(OpLOONG64SGTconst)
  5722  		v.AuxInt = int64ToAuxInt(c)
  5723  		v.AddArg(x)
  5724  		return true
  5725  	}
  5726  	// match: (SGT x x)
  5727  	// result: (MOVVconst [0])
  5728  	for {
  5729  		x := v_0
  5730  		if x != v_1 {
  5731  			break
  5732  		}
  5733  		v.reset(OpLOONG64MOVVconst)
  5734  		v.AuxInt = int64ToAuxInt(0)
  5735  		return true
  5736  	}
  5737  	return false
  5738  }
  5739  func rewriteValueLOONG64_OpLOONG64SGTU(v *Value) bool {
  5740  	v_1 := v.Args[1]
  5741  	v_0 := v.Args[0]
  5742  	// match: (SGTU (MOVVconst [c]) x)
  5743  	// cond: is32Bit(c)
  5744  	// result: (SGTUconst [c] x)
  5745  	for {
  5746  		if v_0.Op != OpLOONG64MOVVconst {
  5747  			break
  5748  		}
  5749  		c := auxIntToInt64(v_0.AuxInt)
  5750  		x := v_1
  5751  		if !(is32Bit(c)) {
  5752  			break
  5753  		}
  5754  		v.reset(OpLOONG64SGTUconst)
  5755  		v.AuxInt = int64ToAuxInt(c)
  5756  		v.AddArg(x)
  5757  		return true
  5758  	}
  5759  	// match: (SGTU x x)
  5760  	// result: (MOVVconst [0])
  5761  	for {
  5762  		x := v_0
  5763  		if x != v_1 {
  5764  			break
  5765  		}
  5766  		v.reset(OpLOONG64MOVVconst)
  5767  		v.AuxInt = int64ToAuxInt(0)
  5768  		return true
  5769  	}
  5770  	return false
  5771  }
  5772  func rewriteValueLOONG64_OpLOONG64SGTUconst(v *Value) bool {
  5773  	v_0 := v.Args[0]
  5774  	// match: (SGTUconst [c] (MOVVconst [d]))
  5775  	// cond: uint64(c)>uint64(d)
  5776  	// result: (MOVVconst [1])
  5777  	for {
  5778  		c := auxIntToInt64(v.AuxInt)
  5779  		if v_0.Op != OpLOONG64MOVVconst {
  5780  			break
  5781  		}
  5782  		d := auxIntToInt64(v_0.AuxInt)
  5783  		if !(uint64(c) > uint64(d)) {
  5784  			break
  5785  		}
  5786  		v.reset(OpLOONG64MOVVconst)
  5787  		v.AuxInt = int64ToAuxInt(1)
  5788  		return true
  5789  	}
  5790  	// match: (SGTUconst [c] (MOVVconst [d]))
  5791  	// cond: uint64(c)<=uint64(d)
  5792  	// result: (MOVVconst [0])
  5793  	for {
  5794  		c := auxIntToInt64(v.AuxInt)
  5795  		if v_0.Op != OpLOONG64MOVVconst {
  5796  			break
  5797  		}
  5798  		d := auxIntToInt64(v_0.AuxInt)
  5799  		if !(uint64(c) <= uint64(d)) {
  5800  			break
  5801  		}
  5802  		v.reset(OpLOONG64MOVVconst)
  5803  		v.AuxInt = int64ToAuxInt(0)
  5804  		return true
  5805  	}
  5806  	// match: (SGTUconst [c] (MOVBUreg _))
  5807  	// cond: 0xff < uint64(c)
  5808  	// result: (MOVVconst [1])
  5809  	for {
  5810  		c := auxIntToInt64(v.AuxInt)
  5811  		if v_0.Op != OpLOONG64MOVBUreg || !(0xff < uint64(c)) {
  5812  			break
  5813  		}
  5814  		v.reset(OpLOONG64MOVVconst)
  5815  		v.AuxInt = int64ToAuxInt(1)
  5816  		return true
  5817  	}
  5818  	// match: (SGTUconst [c] (MOVHUreg _))
  5819  	// cond: 0xffff < uint64(c)
  5820  	// result: (MOVVconst [1])
  5821  	for {
  5822  		c := auxIntToInt64(v.AuxInt)
  5823  		if v_0.Op != OpLOONG64MOVHUreg || !(0xffff < uint64(c)) {
  5824  			break
  5825  		}
  5826  		v.reset(OpLOONG64MOVVconst)
  5827  		v.AuxInt = int64ToAuxInt(1)
  5828  		return true
  5829  	}
  5830  	// match: (SGTUconst [c] (ANDconst [m] _))
  5831  	// cond: uint64(m) < uint64(c)
  5832  	// result: (MOVVconst [1])
  5833  	for {
  5834  		c := auxIntToInt64(v.AuxInt)
  5835  		if v_0.Op != OpLOONG64ANDconst {
  5836  			break
  5837  		}
  5838  		m := auxIntToInt64(v_0.AuxInt)
  5839  		if !(uint64(m) < uint64(c)) {
  5840  			break
  5841  		}
  5842  		v.reset(OpLOONG64MOVVconst)
  5843  		v.AuxInt = int64ToAuxInt(1)
  5844  		return true
  5845  	}
  5846  	// match: (SGTUconst [c] (SRLVconst _ [d]))
  5847  	// cond: 0 < d && d <= 63 && 0xffffffffffffffff>>uint64(d) < uint64(c)
  5848  	// result: (MOVVconst [1])
  5849  	for {
  5850  		c := auxIntToInt64(v.AuxInt)
  5851  		if v_0.Op != OpLOONG64SRLVconst {
  5852  			break
  5853  		}
  5854  		d := auxIntToInt64(v_0.AuxInt)
  5855  		if !(0 < d && d <= 63 && 0xffffffffffffffff>>uint64(d) < uint64(c)) {
  5856  			break
  5857  		}
  5858  		v.reset(OpLOONG64MOVVconst)
  5859  		v.AuxInt = int64ToAuxInt(1)
  5860  		return true
  5861  	}
  5862  	return false
  5863  }
  5864  func rewriteValueLOONG64_OpLOONG64SGTconst(v *Value) bool {
  5865  	v_0 := v.Args[0]
  5866  	// match: (SGTconst [c] (MOVVconst [d]))
  5867  	// cond: c>d
  5868  	// result: (MOVVconst [1])
  5869  	for {
  5870  		c := auxIntToInt64(v.AuxInt)
  5871  		if v_0.Op != OpLOONG64MOVVconst {
  5872  			break
  5873  		}
  5874  		d := auxIntToInt64(v_0.AuxInt)
  5875  		if !(c > d) {
  5876  			break
  5877  		}
  5878  		v.reset(OpLOONG64MOVVconst)
  5879  		v.AuxInt = int64ToAuxInt(1)
  5880  		return true
  5881  	}
  5882  	// match: (SGTconst [c] (MOVVconst [d]))
  5883  	// cond: c<=d
  5884  	// result: (MOVVconst [0])
  5885  	for {
  5886  		c := auxIntToInt64(v.AuxInt)
  5887  		if v_0.Op != OpLOONG64MOVVconst {
  5888  			break
  5889  		}
  5890  		d := auxIntToInt64(v_0.AuxInt)
  5891  		if !(c <= d) {
  5892  			break
  5893  		}
  5894  		v.reset(OpLOONG64MOVVconst)
  5895  		v.AuxInt = int64ToAuxInt(0)
  5896  		return true
  5897  	}
  5898  	// match: (SGTconst [c] (MOVBreg _))
  5899  	// cond: 0x7f < c
  5900  	// result: (MOVVconst [1])
  5901  	for {
  5902  		c := auxIntToInt64(v.AuxInt)
  5903  		if v_0.Op != OpLOONG64MOVBreg || !(0x7f < c) {
  5904  			break
  5905  		}
  5906  		v.reset(OpLOONG64MOVVconst)
  5907  		v.AuxInt = int64ToAuxInt(1)
  5908  		return true
  5909  	}
  5910  	// match: (SGTconst [c] (MOVBreg _))
  5911  	// cond: c <= -0x80
  5912  	// result: (MOVVconst [0])
  5913  	for {
  5914  		c := auxIntToInt64(v.AuxInt)
  5915  		if v_0.Op != OpLOONG64MOVBreg || !(c <= -0x80) {
  5916  			break
  5917  		}
  5918  		v.reset(OpLOONG64MOVVconst)
  5919  		v.AuxInt = int64ToAuxInt(0)
  5920  		return true
  5921  	}
  5922  	// match: (SGTconst [c] (MOVBUreg _))
  5923  	// cond: 0xff < c
  5924  	// result: (MOVVconst [1])
  5925  	for {
  5926  		c := auxIntToInt64(v.AuxInt)
  5927  		if v_0.Op != OpLOONG64MOVBUreg || !(0xff < c) {
  5928  			break
  5929  		}
  5930  		v.reset(OpLOONG64MOVVconst)
  5931  		v.AuxInt = int64ToAuxInt(1)
  5932  		return true
  5933  	}
  5934  	// match: (SGTconst [c] (MOVBUreg _))
  5935  	// cond: c < 0
  5936  	// result: (MOVVconst [0])
  5937  	for {
  5938  		c := auxIntToInt64(v.AuxInt)
  5939  		if v_0.Op != OpLOONG64MOVBUreg || !(c < 0) {
  5940  			break
  5941  		}
  5942  		v.reset(OpLOONG64MOVVconst)
  5943  		v.AuxInt = int64ToAuxInt(0)
  5944  		return true
  5945  	}
  5946  	// match: (SGTconst [c] (MOVHreg _))
  5947  	// cond: 0x7fff < c
  5948  	// result: (MOVVconst [1])
  5949  	for {
  5950  		c := auxIntToInt64(v.AuxInt)
  5951  		if v_0.Op != OpLOONG64MOVHreg || !(0x7fff < c) {
  5952  			break
  5953  		}
  5954  		v.reset(OpLOONG64MOVVconst)
  5955  		v.AuxInt = int64ToAuxInt(1)
  5956  		return true
  5957  	}
  5958  	// match: (SGTconst [c] (MOVHreg _))
  5959  	// cond: c <= -0x8000
  5960  	// result: (MOVVconst [0])
  5961  	for {
  5962  		c := auxIntToInt64(v.AuxInt)
  5963  		if v_0.Op != OpLOONG64MOVHreg || !(c <= -0x8000) {
  5964  			break
  5965  		}
  5966  		v.reset(OpLOONG64MOVVconst)
  5967  		v.AuxInt = int64ToAuxInt(0)
  5968  		return true
  5969  	}
  5970  	// match: (SGTconst [c] (MOVHUreg _))
  5971  	// cond: 0xffff < c
  5972  	// result: (MOVVconst [1])
  5973  	for {
  5974  		c := auxIntToInt64(v.AuxInt)
  5975  		if v_0.Op != OpLOONG64MOVHUreg || !(0xffff < c) {
  5976  			break
  5977  		}
  5978  		v.reset(OpLOONG64MOVVconst)
  5979  		v.AuxInt = int64ToAuxInt(1)
  5980  		return true
  5981  	}
  5982  	// match: (SGTconst [c] (MOVHUreg _))
  5983  	// cond: c < 0
  5984  	// result: (MOVVconst [0])
  5985  	for {
  5986  		c := auxIntToInt64(v.AuxInt)
  5987  		if v_0.Op != OpLOONG64MOVHUreg || !(c < 0) {
  5988  			break
  5989  		}
  5990  		v.reset(OpLOONG64MOVVconst)
  5991  		v.AuxInt = int64ToAuxInt(0)
  5992  		return true
  5993  	}
  5994  	// match: (SGTconst [c] (MOVWUreg _))
  5995  	// cond: c < 0
  5996  	// result: (MOVVconst [0])
  5997  	for {
  5998  		c := auxIntToInt64(v.AuxInt)
  5999  		if v_0.Op != OpLOONG64MOVWUreg || !(c < 0) {
  6000  			break
  6001  		}
  6002  		v.reset(OpLOONG64MOVVconst)
  6003  		v.AuxInt = int64ToAuxInt(0)
  6004  		return true
  6005  	}
  6006  	// match: (SGTconst [c] (ANDconst [m] _))
  6007  	// cond: 0 <= m && m < c
  6008  	// result: (MOVVconst [1])
  6009  	for {
  6010  		c := auxIntToInt64(v.AuxInt)
  6011  		if v_0.Op != OpLOONG64ANDconst {
  6012  			break
  6013  		}
  6014  		m := auxIntToInt64(v_0.AuxInt)
  6015  		if !(0 <= m && m < c) {
  6016  			break
  6017  		}
  6018  		v.reset(OpLOONG64MOVVconst)
  6019  		v.AuxInt = int64ToAuxInt(1)
  6020  		return true
  6021  	}
  6022  	// match: (SGTconst [c] (SRLVconst _ [d]))
  6023  	// cond: 0 <= c && 0 < d && d <= 63 && 0xffffffffffffffff>>uint64(d) < uint64(c)
  6024  	// result: (MOVVconst [1])
  6025  	for {
  6026  		c := auxIntToInt64(v.AuxInt)
  6027  		if v_0.Op != OpLOONG64SRLVconst {
  6028  			break
  6029  		}
  6030  		d := auxIntToInt64(v_0.AuxInt)
  6031  		if !(0 <= c && 0 < d && d <= 63 && 0xffffffffffffffff>>uint64(d) < uint64(c)) {
  6032  			break
  6033  		}
  6034  		v.reset(OpLOONG64MOVVconst)
  6035  		v.AuxInt = int64ToAuxInt(1)
  6036  		return true
  6037  	}
  6038  	return false
  6039  }
  6040  func rewriteValueLOONG64_OpLOONG64SLL(v *Value) bool {
  6041  	v_1 := v.Args[1]
  6042  	v_0 := v.Args[0]
  6043  	// match: (SLL _ (MOVVconst [c]))
  6044  	// cond: uint64(c)>=32
  6045  	// result: (MOVVconst [0])
  6046  	for {
  6047  		if v_1.Op != OpLOONG64MOVVconst {
  6048  			break
  6049  		}
  6050  		c := auxIntToInt64(v_1.AuxInt)
  6051  		if !(uint64(c) >= 32) {
  6052  			break
  6053  		}
  6054  		v.reset(OpLOONG64MOVVconst)
  6055  		v.AuxInt = int64ToAuxInt(0)
  6056  		return true
  6057  	}
  6058  	// match: (SLL x (MOVVconst [c]))
  6059  	// cond: uint64(c) >=0 && uint64(c) <=31
  6060  	// result: (SLLconst x [c])
  6061  	for {
  6062  		x := v_0
  6063  		if v_1.Op != OpLOONG64MOVVconst {
  6064  			break
  6065  		}
  6066  		c := auxIntToInt64(v_1.AuxInt)
  6067  		if !(uint64(c) >= 0 && uint64(c) <= 31) {
  6068  			break
  6069  		}
  6070  		v.reset(OpLOONG64SLLconst)
  6071  		v.AuxInt = int64ToAuxInt(c)
  6072  		v.AddArg(x)
  6073  		return true
  6074  	}
  6075  	// match: (SLL x (ANDconst [31] y))
  6076  	// result: (SLL x y)
  6077  	for {
  6078  		x := v_0
  6079  		if v_1.Op != OpLOONG64ANDconst || auxIntToInt64(v_1.AuxInt) != 31 {
  6080  			break
  6081  		}
  6082  		y := v_1.Args[0]
  6083  		v.reset(OpLOONG64SLL)
  6084  		v.AddArg2(x, y)
  6085  		return true
  6086  	}
  6087  	return false
  6088  }
  6089  func rewriteValueLOONG64_OpLOONG64SLLV(v *Value) bool {
  6090  	v_1 := v.Args[1]
  6091  	v_0 := v.Args[0]
  6092  	// match: (SLLV _ (MOVVconst [c]))
  6093  	// cond: uint64(c)>=64
  6094  	// result: (MOVVconst [0])
  6095  	for {
  6096  		if v_1.Op != OpLOONG64MOVVconst {
  6097  			break
  6098  		}
  6099  		c := auxIntToInt64(v_1.AuxInt)
  6100  		if !(uint64(c) >= 64) {
  6101  			break
  6102  		}
  6103  		v.reset(OpLOONG64MOVVconst)
  6104  		v.AuxInt = int64ToAuxInt(0)
  6105  		return true
  6106  	}
  6107  	// match: (SLLV x (MOVVconst [c]))
  6108  	// result: (SLLVconst x [c])
  6109  	for {
  6110  		x := v_0
  6111  		if v_1.Op != OpLOONG64MOVVconst {
  6112  			break
  6113  		}
  6114  		c := auxIntToInt64(v_1.AuxInt)
  6115  		v.reset(OpLOONG64SLLVconst)
  6116  		v.AuxInt = int64ToAuxInt(c)
  6117  		v.AddArg(x)
  6118  		return true
  6119  	}
  6120  	// match: (SLLV x (ANDconst [63] y))
  6121  	// result: (SLLV x y)
  6122  	for {
  6123  		x := v_0
  6124  		if v_1.Op != OpLOONG64ANDconst || auxIntToInt64(v_1.AuxInt) != 63 {
  6125  			break
  6126  		}
  6127  		y := v_1.Args[0]
  6128  		v.reset(OpLOONG64SLLV)
  6129  		v.AddArg2(x, y)
  6130  		return true
  6131  	}
  6132  	return false
  6133  }
  6134  func rewriteValueLOONG64_OpLOONG64SLLVconst(v *Value) bool {
  6135  	v_0 := v.Args[0]
  6136  	// match: (SLLVconst [c] (MOVVconst [d]))
  6137  	// result: (MOVVconst [d<<uint64(c)])
  6138  	for {
  6139  		c := auxIntToInt64(v.AuxInt)
  6140  		if v_0.Op != OpLOONG64MOVVconst {
  6141  			break
  6142  		}
  6143  		d := auxIntToInt64(v_0.AuxInt)
  6144  		v.reset(OpLOONG64MOVVconst)
  6145  		v.AuxInt = int64ToAuxInt(d << uint64(c))
  6146  		return true
  6147  	}
  6148  	return false
  6149  }
  6150  func rewriteValueLOONG64_OpLOONG64SRA(v *Value) bool {
  6151  	v_1 := v.Args[1]
  6152  	v_0 := v.Args[0]
  6153  	// match: (SRA x (MOVVconst [c]))
  6154  	// cond: uint64(c)>=32
  6155  	// result: (SRAconst x [31])
  6156  	for {
  6157  		x := v_0
  6158  		if v_1.Op != OpLOONG64MOVVconst {
  6159  			break
  6160  		}
  6161  		c := auxIntToInt64(v_1.AuxInt)
  6162  		if !(uint64(c) >= 32) {
  6163  			break
  6164  		}
  6165  		v.reset(OpLOONG64SRAconst)
  6166  		v.AuxInt = int64ToAuxInt(31)
  6167  		v.AddArg(x)
  6168  		return true
  6169  	}
  6170  	// match: (SRA x (MOVVconst [c]))
  6171  	// cond: uint64(c) >=0 && uint64(c) <=31
  6172  	// result: (SRAconst x [c])
  6173  	for {
  6174  		x := v_0
  6175  		if v_1.Op != OpLOONG64MOVVconst {
  6176  			break
  6177  		}
  6178  		c := auxIntToInt64(v_1.AuxInt)
  6179  		if !(uint64(c) >= 0 && uint64(c) <= 31) {
  6180  			break
  6181  		}
  6182  		v.reset(OpLOONG64SRAconst)
  6183  		v.AuxInt = int64ToAuxInt(c)
  6184  		v.AddArg(x)
  6185  		return true
  6186  	}
  6187  	// match: (SRA x (ANDconst [31] y))
  6188  	// result: (SRA x y)
  6189  	for {
  6190  		x := v_0
  6191  		if v_1.Op != OpLOONG64ANDconst || auxIntToInt64(v_1.AuxInt) != 31 {
  6192  			break
  6193  		}
  6194  		y := v_1.Args[0]
  6195  		v.reset(OpLOONG64SRA)
  6196  		v.AddArg2(x, y)
  6197  		return true
  6198  	}
  6199  	return false
  6200  }
  6201  func rewriteValueLOONG64_OpLOONG64SRAV(v *Value) bool {
  6202  	v_1 := v.Args[1]
  6203  	v_0 := v.Args[0]
  6204  	// match: (SRAV x (MOVVconst [c]))
  6205  	// cond: uint64(c)>=64
  6206  	// result: (SRAVconst x [63])
  6207  	for {
  6208  		x := v_0
  6209  		if v_1.Op != OpLOONG64MOVVconst {
  6210  			break
  6211  		}
  6212  		c := auxIntToInt64(v_1.AuxInt)
  6213  		if !(uint64(c) >= 64) {
  6214  			break
  6215  		}
  6216  		v.reset(OpLOONG64SRAVconst)
  6217  		v.AuxInt = int64ToAuxInt(63)
  6218  		v.AddArg(x)
  6219  		return true
  6220  	}
  6221  	// match: (SRAV x (MOVVconst [c]))
  6222  	// result: (SRAVconst x [c])
  6223  	for {
  6224  		x := v_0
  6225  		if v_1.Op != OpLOONG64MOVVconst {
  6226  			break
  6227  		}
  6228  		c := auxIntToInt64(v_1.AuxInt)
  6229  		v.reset(OpLOONG64SRAVconst)
  6230  		v.AuxInt = int64ToAuxInt(c)
  6231  		v.AddArg(x)
  6232  		return true
  6233  	}
  6234  	// match: (SRAV x (ANDconst [63] y))
  6235  	// result: (SRAV x y)
  6236  	for {
  6237  		x := v_0
  6238  		if v_1.Op != OpLOONG64ANDconst || auxIntToInt64(v_1.AuxInt) != 63 {
  6239  			break
  6240  		}
  6241  		y := v_1.Args[0]
  6242  		v.reset(OpLOONG64SRAV)
  6243  		v.AddArg2(x, y)
  6244  		return true
  6245  	}
  6246  	return false
  6247  }
  6248  func rewriteValueLOONG64_OpLOONG64SRAVconst(v *Value) bool {
  6249  	v_0 := v.Args[0]
  6250  	b := v.Block
  6251  	// match: (SRAVconst [rc] (MOVWreg y))
  6252  	// cond: rc >= 0 && rc <= 31
  6253  	// result: (SRAconst [int64(rc)] y)
  6254  	for {
  6255  		rc := auxIntToInt64(v.AuxInt)
  6256  		if v_0.Op != OpLOONG64MOVWreg {
  6257  			break
  6258  		}
  6259  		y := v_0.Args[0]
  6260  		if !(rc >= 0 && rc <= 31) {
  6261  			break
  6262  		}
  6263  		v.reset(OpLOONG64SRAconst)
  6264  		v.AuxInt = int64ToAuxInt(int64(rc))
  6265  		v.AddArg(y)
  6266  		return true
  6267  	}
  6268  	// match: (SRAVconst <t> [rc] (MOVBreg y))
  6269  	// cond: rc >= 8
  6270  	// result: (SRAVconst [63] (SLLVconst <t> [56] y))
  6271  	for {
  6272  		t := v.Type
  6273  		rc := auxIntToInt64(v.AuxInt)
  6274  		if v_0.Op != OpLOONG64MOVBreg {
  6275  			break
  6276  		}
  6277  		y := v_0.Args[0]
  6278  		if !(rc >= 8) {
  6279  			break
  6280  		}
  6281  		v.reset(OpLOONG64SRAVconst)
  6282  		v.AuxInt = int64ToAuxInt(63)
  6283  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, t)
  6284  		v0.AuxInt = int64ToAuxInt(56)
  6285  		v0.AddArg(y)
  6286  		v.AddArg(v0)
  6287  		return true
  6288  	}
  6289  	// match: (SRAVconst <t> [rc] (MOVHreg y))
  6290  	// cond: rc >= 16
  6291  	// result: (SRAVconst [63] (SLLVconst <t> [48] y))
  6292  	for {
  6293  		t := v.Type
  6294  		rc := auxIntToInt64(v.AuxInt)
  6295  		if v_0.Op != OpLOONG64MOVHreg {
  6296  			break
  6297  		}
  6298  		y := v_0.Args[0]
  6299  		if !(rc >= 16) {
  6300  			break
  6301  		}
  6302  		v.reset(OpLOONG64SRAVconst)
  6303  		v.AuxInt = int64ToAuxInt(63)
  6304  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, t)
  6305  		v0.AuxInt = int64ToAuxInt(48)
  6306  		v0.AddArg(y)
  6307  		v.AddArg(v0)
  6308  		return true
  6309  	}
  6310  	// match: (SRAVconst <t> [rc] (MOVWreg y))
  6311  	// cond: rc >= 32
  6312  	// result: (SRAconst [31] y)
  6313  	for {
  6314  		rc := auxIntToInt64(v.AuxInt)
  6315  		if v_0.Op != OpLOONG64MOVWreg {
  6316  			break
  6317  		}
  6318  		y := v_0.Args[0]
  6319  		if !(rc >= 32) {
  6320  			break
  6321  		}
  6322  		v.reset(OpLOONG64SRAconst)
  6323  		v.AuxInt = int64ToAuxInt(31)
  6324  		v.AddArg(y)
  6325  		return true
  6326  	}
  6327  	// match: (SRAVconst [c] (MOVVconst [d]))
  6328  	// result: (MOVVconst [d>>uint64(c)])
  6329  	for {
  6330  		c := auxIntToInt64(v.AuxInt)
  6331  		if v_0.Op != OpLOONG64MOVVconst {
  6332  			break
  6333  		}
  6334  		d := auxIntToInt64(v_0.AuxInt)
  6335  		v.reset(OpLOONG64MOVVconst)
  6336  		v.AuxInt = int64ToAuxInt(d >> uint64(c))
  6337  		return true
  6338  	}
  6339  	return false
  6340  }
  6341  func rewriteValueLOONG64_OpLOONG64SRL(v *Value) bool {
  6342  	v_1 := v.Args[1]
  6343  	v_0 := v.Args[0]
  6344  	// match: (SRL _ (MOVVconst [c]))
  6345  	// cond: uint64(c)>=32
  6346  	// result: (MOVVconst [0])
  6347  	for {
  6348  		if v_1.Op != OpLOONG64MOVVconst {
  6349  			break
  6350  		}
  6351  		c := auxIntToInt64(v_1.AuxInt)
  6352  		if !(uint64(c) >= 32) {
  6353  			break
  6354  		}
  6355  		v.reset(OpLOONG64MOVVconst)
  6356  		v.AuxInt = int64ToAuxInt(0)
  6357  		return true
  6358  	}
  6359  	// match: (SRL x (MOVVconst [c]))
  6360  	// cond: uint64(c) >=0 && uint64(c) <=31
  6361  	// result: (SRLconst x [c])
  6362  	for {
  6363  		x := v_0
  6364  		if v_1.Op != OpLOONG64MOVVconst {
  6365  			break
  6366  		}
  6367  		c := auxIntToInt64(v_1.AuxInt)
  6368  		if !(uint64(c) >= 0 && uint64(c) <= 31) {
  6369  			break
  6370  		}
  6371  		v.reset(OpLOONG64SRLconst)
  6372  		v.AuxInt = int64ToAuxInt(c)
  6373  		v.AddArg(x)
  6374  		return true
  6375  	}
  6376  	// match: (SRL x (ANDconst [31] y))
  6377  	// result: (SRL x y)
  6378  	for {
  6379  		x := v_0
  6380  		if v_1.Op != OpLOONG64ANDconst || auxIntToInt64(v_1.AuxInt) != 31 {
  6381  			break
  6382  		}
  6383  		y := v_1.Args[0]
  6384  		v.reset(OpLOONG64SRL)
  6385  		v.AddArg2(x, y)
  6386  		return true
  6387  	}
  6388  	return false
  6389  }
  6390  func rewriteValueLOONG64_OpLOONG64SRLV(v *Value) bool {
  6391  	v_1 := v.Args[1]
  6392  	v_0 := v.Args[0]
  6393  	// match: (SRLV _ (MOVVconst [c]))
  6394  	// cond: uint64(c)>=64
  6395  	// result: (MOVVconst [0])
  6396  	for {
  6397  		if v_1.Op != OpLOONG64MOVVconst {
  6398  			break
  6399  		}
  6400  		c := auxIntToInt64(v_1.AuxInt)
  6401  		if !(uint64(c) >= 64) {
  6402  			break
  6403  		}
  6404  		v.reset(OpLOONG64MOVVconst)
  6405  		v.AuxInt = int64ToAuxInt(0)
  6406  		return true
  6407  	}
  6408  	// match: (SRLV x (MOVVconst [c]))
  6409  	// result: (SRLVconst x [c])
  6410  	for {
  6411  		x := v_0
  6412  		if v_1.Op != OpLOONG64MOVVconst {
  6413  			break
  6414  		}
  6415  		c := auxIntToInt64(v_1.AuxInt)
  6416  		v.reset(OpLOONG64SRLVconst)
  6417  		v.AuxInt = int64ToAuxInt(c)
  6418  		v.AddArg(x)
  6419  		return true
  6420  	}
  6421  	// match: (SRLV x (ANDconst [63] y))
  6422  	// result: (SRLV x y)
  6423  	for {
  6424  		x := v_0
  6425  		if v_1.Op != OpLOONG64ANDconst || auxIntToInt64(v_1.AuxInt) != 63 {
  6426  			break
  6427  		}
  6428  		y := v_1.Args[0]
  6429  		v.reset(OpLOONG64SRLV)
  6430  		v.AddArg2(x, y)
  6431  		return true
  6432  	}
  6433  	return false
  6434  }
  6435  func rewriteValueLOONG64_OpLOONG64SRLVconst(v *Value) bool {
  6436  	v_0 := v.Args[0]
  6437  	// match: (SRLVconst [rc] (SLLVconst [lc] x))
  6438  	// cond: lc <= rc
  6439  	// result: (BSTRPICKV [rc-lc + ((64-lc)-1)<<6] x)
  6440  	for {
  6441  		rc := auxIntToInt64(v.AuxInt)
  6442  		if v_0.Op != OpLOONG64SLLVconst {
  6443  			break
  6444  		}
  6445  		lc := auxIntToInt64(v_0.AuxInt)
  6446  		x := v_0.Args[0]
  6447  		if !(lc <= rc) {
  6448  			break
  6449  		}
  6450  		v.reset(OpLOONG64BSTRPICKV)
  6451  		v.AuxInt = int64ToAuxInt(rc - lc + ((64-lc)-1)<<6)
  6452  		v.AddArg(x)
  6453  		return true
  6454  	}
  6455  	// match: (SRLVconst [rc] (MOVWUreg x))
  6456  	// cond: rc < 32
  6457  	// result: (BSTRPICKV [rc + 31<<6] x)
  6458  	for {
  6459  		rc := auxIntToInt64(v.AuxInt)
  6460  		if v_0.Op != OpLOONG64MOVWUreg {
  6461  			break
  6462  		}
  6463  		x := v_0.Args[0]
  6464  		if !(rc < 32) {
  6465  			break
  6466  		}
  6467  		v.reset(OpLOONG64BSTRPICKV)
  6468  		v.AuxInt = int64ToAuxInt(rc + 31<<6)
  6469  		v.AddArg(x)
  6470  		return true
  6471  	}
  6472  	// match: (SRLVconst [rc] (MOVHUreg x))
  6473  	// cond: rc < 16
  6474  	// result: (BSTRPICKV [rc + 15<<6] x)
  6475  	for {
  6476  		rc := auxIntToInt64(v.AuxInt)
  6477  		if v_0.Op != OpLOONG64MOVHUreg {
  6478  			break
  6479  		}
  6480  		x := v_0.Args[0]
  6481  		if !(rc < 16) {
  6482  			break
  6483  		}
  6484  		v.reset(OpLOONG64BSTRPICKV)
  6485  		v.AuxInt = int64ToAuxInt(rc + 15<<6)
  6486  		v.AddArg(x)
  6487  		return true
  6488  	}
  6489  	// match: (SRLVconst [rc] (MOVBUreg x))
  6490  	// cond: rc < 8
  6491  	// result: (BSTRPICKV [rc + 7<<6] x)
  6492  	for {
  6493  		rc := auxIntToInt64(v.AuxInt)
  6494  		if v_0.Op != OpLOONG64MOVBUreg {
  6495  			break
  6496  		}
  6497  		x := v_0.Args[0]
  6498  		if !(rc < 8) {
  6499  			break
  6500  		}
  6501  		v.reset(OpLOONG64BSTRPICKV)
  6502  		v.AuxInt = int64ToAuxInt(rc + 7<<6)
  6503  		v.AddArg(x)
  6504  		return true
  6505  	}
  6506  	// match: (SRLVconst [rc] (MOVWUreg y))
  6507  	// cond: rc >= 0 && rc <= 31
  6508  	// result: (SRLconst [int64(rc)] y)
  6509  	for {
  6510  		rc := auxIntToInt64(v.AuxInt)
  6511  		if v_0.Op != OpLOONG64MOVWUreg {
  6512  			break
  6513  		}
  6514  		y := v_0.Args[0]
  6515  		if !(rc >= 0 && rc <= 31) {
  6516  			break
  6517  		}
  6518  		v.reset(OpLOONG64SRLconst)
  6519  		v.AuxInt = int64ToAuxInt(int64(rc))
  6520  		v.AddArg(y)
  6521  		return true
  6522  	}
  6523  	// match: (SRLVconst [rc] (MOVWUreg x))
  6524  	// cond: rc >= 32
  6525  	// result: (MOVVconst [0])
  6526  	for {
  6527  		rc := auxIntToInt64(v.AuxInt)
  6528  		if v_0.Op != OpLOONG64MOVWUreg {
  6529  			break
  6530  		}
  6531  		if !(rc >= 32) {
  6532  			break
  6533  		}
  6534  		v.reset(OpLOONG64MOVVconst)
  6535  		v.AuxInt = int64ToAuxInt(0)
  6536  		return true
  6537  	}
  6538  	// match: (SRLVconst [rc] (MOVHUreg x))
  6539  	// cond: rc >= 16
  6540  	// result: (MOVVconst [0])
  6541  	for {
  6542  		rc := auxIntToInt64(v.AuxInt)
  6543  		if v_0.Op != OpLOONG64MOVHUreg {
  6544  			break
  6545  		}
  6546  		if !(rc >= 16) {
  6547  			break
  6548  		}
  6549  		v.reset(OpLOONG64MOVVconst)
  6550  		v.AuxInt = int64ToAuxInt(0)
  6551  		return true
  6552  	}
  6553  	// match: (SRLVconst [rc] (MOVBUreg x))
  6554  	// cond: rc >= 8
  6555  	// result: (MOVVconst [0])
  6556  	for {
  6557  		rc := auxIntToInt64(v.AuxInt)
  6558  		if v_0.Op != OpLOONG64MOVBUreg {
  6559  			break
  6560  		}
  6561  		if !(rc >= 8) {
  6562  			break
  6563  		}
  6564  		v.reset(OpLOONG64MOVVconst)
  6565  		v.AuxInt = int64ToAuxInt(0)
  6566  		return true
  6567  	}
  6568  	// match: (SRLVconst [c] (MOVVconst [d]))
  6569  	// result: (MOVVconst [int64(uint64(d)>>uint64(c))])
  6570  	for {
  6571  		c := auxIntToInt64(v.AuxInt)
  6572  		if v_0.Op != OpLOONG64MOVVconst {
  6573  			break
  6574  		}
  6575  		d := auxIntToInt64(v_0.AuxInt)
  6576  		v.reset(OpLOONG64MOVVconst)
  6577  		v.AuxInt = int64ToAuxInt(int64(uint64(d) >> uint64(c)))
  6578  		return true
  6579  	}
  6580  	return false
  6581  }
  6582  func rewriteValueLOONG64_OpLOONG64SUBD(v *Value) bool {
  6583  	v_1 := v.Args[1]
  6584  	v_0 := v.Args[0]
  6585  	// match: (SUBD (MULD x y) z)
  6586  	// cond: z.Block.Func.useFMA(v)
  6587  	// result: (FMSUBD x y z)
  6588  	for {
  6589  		if v_0.Op != OpLOONG64MULD {
  6590  			break
  6591  		}
  6592  		y := v_0.Args[1]
  6593  		x := v_0.Args[0]
  6594  		z := v_1
  6595  		if !(z.Block.Func.useFMA(v)) {
  6596  			break
  6597  		}
  6598  		v.reset(OpLOONG64FMSUBD)
  6599  		v.AddArg3(x, y, z)
  6600  		return true
  6601  	}
  6602  	// match: (SUBD z (MULD x y))
  6603  	// cond: z.Block.Func.useFMA(v)
  6604  	// result: (FNMSUBD x y z)
  6605  	for {
  6606  		z := v_0
  6607  		if v_1.Op != OpLOONG64MULD {
  6608  			break
  6609  		}
  6610  		y := v_1.Args[1]
  6611  		x := v_1.Args[0]
  6612  		if !(z.Block.Func.useFMA(v)) {
  6613  			break
  6614  		}
  6615  		v.reset(OpLOONG64FNMSUBD)
  6616  		v.AddArg3(x, y, z)
  6617  		return true
  6618  	}
  6619  	// match: (SUBD z (NEGD (MULD x y)))
  6620  	// cond: z.Block.Func.useFMA(v)
  6621  	// result: (FMADDD x y z)
  6622  	for {
  6623  		z := v_0
  6624  		if v_1.Op != OpLOONG64NEGD {
  6625  			break
  6626  		}
  6627  		v_1_0 := v_1.Args[0]
  6628  		if v_1_0.Op != OpLOONG64MULD {
  6629  			break
  6630  		}
  6631  		y := v_1_0.Args[1]
  6632  		x := v_1_0.Args[0]
  6633  		if !(z.Block.Func.useFMA(v)) {
  6634  			break
  6635  		}
  6636  		v.reset(OpLOONG64FMADDD)
  6637  		v.AddArg3(x, y, z)
  6638  		return true
  6639  	}
  6640  	// match: (SUBD (NEGD (MULD x y)) z)
  6641  	// cond: z.Block.Func.useFMA(v)
  6642  	// result: (FNMADDD x y z)
  6643  	for {
  6644  		if v_0.Op != OpLOONG64NEGD {
  6645  			break
  6646  		}
  6647  		v_0_0 := v_0.Args[0]
  6648  		if v_0_0.Op != OpLOONG64MULD {
  6649  			break
  6650  		}
  6651  		y := v_0_0.Args[1]
  6652  		x := v_0_0.Args[0]
  6653  		z := v_1
  6654  		if !(z.Block.Func.useFMA(v)) {
  6655  			break
  6656  		}
  6657  		v.reset(OpLOONG64FNMADDD)
  6658  		v.AddArg3(x, y, z)
  6659  		return true
  6660  	}
  6661  	return false
  6662  }
  6663  func rewriteValueLOONG64_OpLOONG64SUBF(v *Value) bool {
  6664  	v_1 := v.Args[1]
  6665  	v_0 := v.Args[0]
  6666  	// match: (SUBF (MULF x y) z)
  6667  	// cond: z.Block.Func.useFMA(v)
  6668  	// result: (FMSUBF x y z)
  6669  	for {
  6670  		if v_0.Op != OpLOONG64MULF {
  6671  			break
  6672  		}
  6673  		y := v_0.Args[1]
  6674  		x := v_0.Args[0]
  6675  		z := v_1
  6676  		if !(z.Block.Func.useFMA(v)) {
  6677  			break
  6678  		}
  6679  		v.reset(OpLOONG64FMSUBF)
  6680  		v.AddArg3(x, y, z)
  6681  		return true
  6682  	}
  6683  	// match: (SUBF z (MULF x y))
  6684  	// cond: z.Block.Func.useFMA(v)
  6685  	// result: (FNMSUBF x y z)
  6686  	for {
  6687  		z := v_0
  6688  		if v_1.Op != OpLOONG64MULF {
  6689  			break
  6690  		}
  6691  		y := v_1.Args[1]
  6692  		x := v_1.Args[0]
  6693  		if !(z.Block.Func.useFMA(v)) {
  6694  			break
  6695  		}
  6696  		v.reset(OpLOONG64FNMSUBF)
  6697  		v.AddArg3(x, y, z)
  6698  		return true
  6699  	}
  6700  	// match: (SUBF z (NEGF (MULF x y)))
  6701  	// cond: z.Block.Func.useFMA(v)
  6702  	// result: (FMADDF x y z)
  6703  	for {
  6704  		z := v_0
  6705  		if v_1.Op != OpLOONG64NEGF {
  6706  			break
  6707  		}
  6708  		v_1_0 := v_1.Args[0]
  6709  		if v_1_0.Op != OpLOONG64MULF {
  6710  			break
  6711  		}
  6712  		y := v_1_0.Args[1]
  6713  		x := v_1_0.Args[0]
  6714  		if !(z.Block.Func.useFMA(v)) {
  6715  			break
  6716  		}
  6717  		v.reset(OpLOONG64FMADDF)
  6718  		v.AddArg3(x, y, z)
  6719  		return true
  6720  	}
  6721  	// match: (SUBF (NEGF (MULF x y)) z)
  6722  	// cond: z.Block.Func.useFMA(v)
  6723  	// result: (FNMADDF x y z)
  6724  	for {
  6725  		if v_0.Op != OpLOONG64NEGF {
  6726  			break
  6727  		}
  6728  		v_0_0 := v_0.Args[0]
  6729  		if v_0_0.Op != OpLOONG64MULF {
  6730  			break
  6731  		}
  6732  		y := v_0_0.Args[1]
  6733  		x := v_0_0.Args[0]
  6734  		z := v_1
  6735  		if !(z.Block.Func.useFMA(v)) {
  6736  			break
  6737  		}
  6738  		v.reset(OpLOONG64FNMADDF)
  6739  		v.AddArg3(x, y, z)
  6740  		return true
  6741  	}
  6742  	return false
  6743  }
  6744  func rewriteValueLOONG64_OpLOONG64SUBV(v *Value) bool {
  6745  	v_1 := v.Args[1]
  6746  	v_0 := v.Args[0]
  6747  	// match: (SUBV x (MOVVconst [c]))
  6748  	// cond: is32Bit(c)
  6749  	// result: (SUBVconst [c] x)
  6750  	for {
  6751  		x := v_0
  6752  		if v_1.Op != OpLOONG64MOVVconst {
  6753  			break
  6754  		}
  6755  		c := auxIntToInt64(v_1.AuxInt)
  6756  		if !(is32Bit(c)) {
  6757  			break
  6758  		}
  6759  		v.reset(OpLOONG64SUBVconst)
  6760  		v.AuxInt = int64ToAuxInt(c)
  6761  		v.AddArg(x)
  6762  		return true
  6763  	}
  6764  	// match: (SUBV x x)
  6765  	// result: (MOVVconst [0])
  6766  	for {
  6767  		x := v_0
  6768  		if x != v_1 {
  6769  			break
  6770  		}
  6771  		v.reset(OpLOONG64MOVVconst)
  6772  		v.AuxInt = int64ToAuxInt(0)
  6773  		return true
  6774  	}
  6775  	// match: (SUBV (MOVVconst [0]) x)
  6776  	// result: (NEGV x)
  6777  	for {
  6778  		if v_0.Op != OpLOONG64MOVVconst || auxIntToInt64(v_0.AuxInt) != 0 {
  6779  			break
  6780  		}
  6781  		x := v_1
  6782  		v.reset(OpLOONG64NEGV)
  6783  		v.AddArg(x)
  6784  		return true
  6785  	}
  6786  	// match: (SUBV (MOVVconst [c]) (NEGV (SUBVconst [d] x)))
  6787  	// result: (ADDVconst [c-d] x)
  6788  	for {
  6789  		if v_0.Op != OpLOONG64MOVVconst {
  6790  			break
  6791  		}
  6792  		c := auxIntToInt64(v_0.AuxInt)
  6793  		if v_1.Op != OpLOONG64NEGV {
  6794  			break
  6795  		}
  6796  		v_1_0 := v_1.Args[0]
  6797  		if v_1_0.Op != OpLOONG64SUBVconst {
  6798  			break
  6799  		}
  6800  		d := auxIntToInt64(v_1_0.AuxInt)
  6801  		x := v_1_0.Args[0]
  6802  		v.reset(OpLOONG64ADDVconst)
  6803  		v.AuxInt = int64ToAuxInt(c - d)
  6804  		v.AddArg(x)
  6805  		return true
  6806  	}
  6807  	return false
  6808  }
  6809  func rewriteValueLOONG64_OpLOONG64SUBVconst(v *Value) bool {
  6810  	v_0 := v.Args[0]
  6811  	// match: (SUBVconst [0] x)
  6812  	// result: x
  6813  	for {
  6814  		if auxIntToInt64(v.AuxInt) != 0 {
  6815  			break
  6816  		}
  6817  		x := v_0
  6818  		v.copyOf(x)
  6819  		return true
  6820  	}
  6821  	// match: (SUBVconst [c] (MOVVconst [d]))
  6822  	// result: (MOVVconst [d-c])
  6823  	for {
  6824  		c := auxIntToInt64(v.AuxInt)
  6825  		if v_0.Op != OpLOONG64MOVVconst {
  6826  			break
  6827  		}
  6828  		d := auxIntToInt64(v_0.AuxInt)
  6829  		v.reset(OpLOONG64MOVVconst)
  6830  		v.AuxInt = int64ToAuxInt(d - c)
  6831  		return true
  6832  	}
  6833  	// match: (SUBVconst [c] (SUBVconst [d] x))
  6834  	// cond: is32Bit(-c-d)
  6835  	// result: (ADDVconst [-c-d] x)
  6836  	for {
  6837  		c := auxIntToInt64(v.AuxInt)
  6838  		if v_0.Op != OpLOONG64SUBVconst {
  6839  			break
  6840  		}
  6841  		d := auxIntToInt64(v_0.AuxInt)
  6842  		x := v_0.Args[0]
  6843  		if !(is32Bit(-c - d)) {
  6844  			break
  6845  		}
  6846  		v.reset(OpLOONG64ADDVconst)
  6847  		v.AuxInt = int64ToAuxInt(-c - d)
  6848  		v.AddArg(x)
  6849  		return true
  6850  	}
  6851  	// match: (SUBVconst [c] (ADDVconst [d] x))
  6852  	// cond: is32Bit(-c+d)
  6853  	// result: (ADDVconst [-c+d] x)
  6854  	for {
  6855  		c := auxIntToInt64(v.AuxInt)
  6856  		if v_0.Op != OpLOONG64ADDVconst {
  6857  			break
  6858  		}
  6859  		d := auxIntToInt64(v_0.AuxInt)
  6860  		x := v_0.Args[0]
  6861  		if !(is32Bit(-c + d)) {
  6862  			break
  6863  		}
  6864  		v.reset(OpLOONG64ADDVconst)
  6865  		v.AuxInt = int64ToAuxInt(-c + d)
  6866  		v.AddArg(x)
  6867  		return true
  6868  	}
  6869  	return false
  6870  }
  6871  func rewriteValueLOONG64_OpLOONG64XOR(v *Value) bool {
  6872  	v_1 := v.Args[1]
  6873  	v_0 := v.Args[0]
  6874  	// match: (XOR x (MOVVconst [c]))
  6875  	// cond: is32Bit(c)
  6876  	// result: (XORconst [c] x)
  6877  	for {
  6878  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  6879  			x := v_0
  6880  			if v_1.Op != OpLOONG64MOVVconst {
  6881  				continue
  6882  			}
  6883  			c := auxIntToInt64(v_1.AuxInt)
  6884  			if !(is32Bit(c)) {
  6885  				continue
  6886  			}
  6887  			v.reset(OpLOONG64XORconst)
  6888  			v.AuxInt = int64ToAuxInt(c)
  6889  			v.AddArg(x)
  6890  			return true
  6891  		}
  6892  		break
  6893  	}
  6894  	// match: (XOR x x)
  6895  	// result: (MOVVconst [0])
  6896  	for {
  6897  		x := v_0
  6898  		if x != v_1 {
  6899  			break
  6900  		}
  6901  		v.reset(OpLOONG64MOVVconst)
  6902  		v.AuxInt = int64ToAuxInt(0)
  6903  		return true
  6904  	}
  6905  	return false
  6906  }
  6907  func rewriteValueLOONG64_OpLOONG64XORconst(v *Value) bool {
  6908  	v_0 := v.Args[0]
  6909  	// match: (XORconst [0] x)
  6910  	// result: x
  6911  	for {
  6912  		if auxIntToInt64(v.AuxInt) != 0 {
  6913  			break
  6914  		}
  6915  		x := v_0
  6916  		v.copyOf(x)
  6917  		return true
  6918  	}
  6919  	// match: (XORconst [-1] x)
  6920  	// result: (NORconst [0] x)
  6921  	for {
  6922  		if auxIntToInt64(v.AuxInt) != -1 {
  6923  			break
  6924  		}
  6925  		x := v_0
  6926  		v.reset(OpLOONG64NORconst)
  6927  		v.AuxInt = int64ToAuxInt(0)
  6928  		v.AddArg(x)
  6929  		return true
  6930  	}
  6931  	// match: (XORconst [c] (MOVVconst [d]))
  6932  	// result: (MOVVconst [c^d])
  6933  	for {
  6934  		c := auxIntToInt64(v.AuxInt)
  6935  		if v_0.Op != OpLOONG64MOVVconst {
  6936  			break
  6937  		}
  6938  		d := auxIntToInt64(v_0.AuxInt)
  6939  		v.reset(OpLOONG64MOVVconst)
  6940  		v.AuxInt = int64ToAuxInt(c ^ d)
  6941  		return true
  6942  	}
  6943  	// match: (XORconst [c] (XORconst [d] x))
  6944  	// cond: is32Bit(c^d)
  6945  	// result: (XORconst [c^d] x)
  6946  	for {
  6947  		c := auxIntToInt64(v.AuxInt)
  6948  		if v_0.Op != OpLOONG64XORconst {
  6949  			break
  6950  		}
  6951  		d := auxIntToInt64(v_0.AuxInt)
  6952  		x := v_0.Args[0]
  6953  		if !(is32Bit(c ^ d)) {
  6954  			break
  6955  		}
  6956  		v.reset(OpLOONG64XORconst)
  6957  		v.AuxInt = int64ToAuxInt(c ^ d)
  6958  		v.AddArg(x)
  6959  		return true
  6960  	}
  6961  	return false
  6962  }
  6963  func rewriteValueLOONG64_OpLeq16(v *Value) bool {
  6964  	v_1 := v.Args[1]
  6965  	v_0 := v.Args[0]
  6966  	b := v.Block
  6967  	typ := &b.Func.Config.Types
  6968  	// match: (Leq16 x y)
  6969  	// result: (XOR (MOVVconst [1]) (SGT (SignExt16to64 x) (SignExt16to64 y)))
  6970  	for {
  6971  		x := v_0
  6972  		y := v_1
  6973  		v.reset(OpLOONG64XOR)
  6974  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  6975  		v0.AuxInt = int64ToAuxInt(1)
  6976  		v1 := b.NewValue0(v.Pos, OpLOONG64SGT, typ.Bool)
  6977  		v2 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  6978  		v2.AddArg(x)
  6979  		v3 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  6980  		v3.AddArg(y)
  6981  		v1.AddArg2(v2, v3)
  6982  		v.AddArg2(v0, v1)
  6983  		return true
  6984  	}
  6985  }
  6986  func rewriteValueLOONG64_OpLeq16U(v *Value) bool {
  6987  	v_1 := v.Args[1]
  6988  	v_0 := v.Args[0]
  6989  	b := v.Block
  6990  	typ := &b.Func.Config.Types
  6991  	// match: (Leq16U x y)
  6992  	// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt16to64 x) (ZeroExt16to64 y)))
  6993  	for {
  6994  		x := v_0
  6995  		y := v_1
  6996  		v.reset(OpLOONG64XOR)
  6997  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  6998  		v0.AuxInt = int64ToAuxInt(1)
  6999  		v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  7000  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7001  		v2.AddArg(x)
  7002  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7003  		v3.AddArg(y)
  7004  		v1.AddArg2(v2, v3)
  7005  		v.AddArg2(v0, v1)
  7006  		return true
  7007  	}
  7008  }
  7009  func rewriteValueLOONG64_OpLeq32(v *Value) bool {
  7010  	v_1 := v.Args[1]
  7011  	v_0 := v.Args[0]
  7012  	b := v.Block
  7013  	typ := &b.Func.Config.Types
  7014  	// match: (Leq32 x y)
  7015  	// result: (XOR (MOVVconst [1]) (SGT (SignExt32to64 x) (SignExt32to64 y)))
  7016  	for {
  7017  		x := v_0
  7018  		y := v_1
  7019  		v.reset(OpLOONG64XOR)
  7020  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  7021  		v0.AuxInt = int64ToAuxInt(1)
  7022  		v1 := b.NewValue0(v.Pos, OpLOONG64SGT, typ.Bool)
  7023  		v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  7024  		v2.AddArg(x)
  7025  		v3 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  7026  		v3.AddArg(y)
  7027  		v1.AddArg2(v2, v3)
  7028  		v.AddArg2(v0, v1)
  7029  		return true
  7030  	}
  7031  }
  7032  func rewriteValueLOONG64_OpLeq32F(v *Value) bool {
  7033  	v_1 := v.Args[1]
  7034  	v_0 := v.Args[0]
  7035  	b := v.Block
  7036  	// match: (Leq32F x y)
  7037  	// result: (FPFlagTrue (CMPGEF y x))
  7038  	for {
  7039  		x := v_0
  7040  		y := v_1
  7041  		v.reset(OpLOONG64FPFlagTrue)
  7042  		v0 := b.NewValue0(v.Pos, OpLOONG64CMPGEF, types.TypeFlags)
  7043  		v0.AddArg2(y, x)
  7044  		v.AddArg(v0)
  7045  		return true
  7046  	}
  7047  }
  7048  func rewriteValueLOONG64_OpLeq32U(v *Value) bool {
  7049  	v_1 := v.Args[1]
  7050  	v_0 := v.Args[0]
  7051  	b := v.Block
  7052  	typ := &b.Func.Config.Types
  7053  	// match: (Leq32U x y)
  7054  	// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt32to64 x) (ZeroExt32to64 y)))
  7055  	for {
  7056  		x := v_0
  7057  		y := v_1
  7058  		v.reset(OpLOONG64XOR)
  7059  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  7060  		v0.AuxInt = int64ToAuxInt(1)
  7061  		v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  7062  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7063  		v2.AddArg(x)
  7064  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7065  		v3.AddArg(y)
  7066  		v1.AddArg2(v2, v3)
  7067  		v.AddArg2(v0, v1)
  7068  		return true
  7069  	}
  7070  }
  7071  func rewriteValueLOONG64_OpLeq64(v *Value) bool {
  7072  	v_1 := v.Args[1]
  7073  	v_0 := v.Args[0]
  7074  	b := v.Block
  7075  	typ := &b.Func.Config.Types
  7076  	// match: (Leq64 x y)
  7077  	// result: (XOR (MOVVconst [1]) (SGT x y))
  7078  	for {
  7079  		x := v_0
  7080  		y := v_1
  7081  		v.reset(OpLOONG64XOR)
  7082  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  7083  		v0.AuxInt = int64ToAuxInt(1)
  7084  		v1 := b.NewValue0(v.Pos, OpLOONG64SGT, typ.Bool)
  7085  		v1.AddArg2(x, y)
  7086  		v.AddArg2(v0, v1)
  7087  		return true
  7088  	}
  7089  }
  7090  func rewriteValueLOONG64_OpLeq64F(v *Value) bool {
  7091  	v_1 := v.Args[1]
  7092  	v_0 := v.Args[0]
  7093  	b := v.Block
  7094  	// match: (Leq64F x y)
  7095  	// result: (FPFlagTrue (CMPGED y x))
  7096  	for {
  7097  		x := v_0
  7098  		y := v_1
  7099  		v.reset(OpLOONG64FPFlagTrue)
  7100  		v0 := b.NewValue0(v.Pos, OpLOONG64CMPGED, types.TypeFlags)
  7101  		v0.AddArg2(y, x)
  7102  		v.AddArg(v0)
  7103  		return true
  7104  	}
  7105  }
  7106  func rewriteValueLOONG64_OpLeq64U(v *Value) bool {
  7107  	v_1 := v.Args[1]
  7108  	v_0 := v.Args[0]
  7109  	b := v.Block
  7110  	typ := &b.Func.Config.Types
  7111  	// match: (Leq64U x y)
  7112  	// result: (XOR (MOVVconst [1]) (SGTU x y))
  7113  	for {
  7114  		x := v_0
  7115  		y := v_1
  7116  		v.reset(OpLOONG64XOR)
  7117  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  7118  		v0.AuxInt = int64ToAuxInt(1)
  7119  		v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  7120  		v1.AddArg2(x, y)
  7121  		v.AddArg2(v0, v1)
  7122  		return true
  7123  	}
  7124  }
  7125  func rewriteValueLOONG64_OpLeq8(v *Value) bool {
  7126  	v_1 := v.Args[1]
  7127  	v_0 := v.Args[0]
  7128  	b := v.Block
  7129  	typ := &b.Func.Config.Types
  7130  	// match: (Leq8 x y)
  7131  	// result: (XOR (MOVVconst [1]) (SGT (SignExt8to64 x) (SignExt8to64 y)))
  7132  	for {
  7133  		x := v_0
  7134  		y := v_1
  7135  		v.reset(OpLOONG64XOR)
  7136  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  7137  		v0.AuxInt = int64ToAuxInt(1)
  7138  		v1 := b.NewValue0(v.Pos, OpLOONG64SGT, typ.Bool)
  7139  		v2 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  7140  		v2.AddArg(x)
  7141  		v3 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  7142  		v3.AddArg(y)
  7143  		v1.AddArg2(v2, v3)
  7144  		v.AddArg2(v0, v1)
  7145  		return true
  7146  	}
  7147  }
  7148  func rewriteValueLOONG64_OpLeq8U(v *Value) bool {
  7149  	v_1 := v.Args[1]
  7150  	v_0 := v.Args[0]
  7151  	b := v.Block
  7152  	typ := &b.Func.Config.Types
  7153  	// match: (Leq8U x y)
  7154  	// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt8to64 x) (ZeroExt8to64 y)))
  7155  	for {
  7156  		x := v_0
  7157  		y := v_1
  7158  		v.reset(OpLOONG64XOR)
  7159  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  7160  		v0.AuxInt = int64ToAuxInt(1)
  7161  		v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  7162  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7163  		v2.AddArg(x)
  7164  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7165  		v3.AddArg(y)
  7166  		v1.AddArg2(v2, v3)
  7167  		v.AddArg2(v0, v1)
  7168  		return true
  7169  	}
  7170  }
  7171  func rewriteValueLOONG64_OpLess16(v *Value) bool {
  7172  	v_1 := v.Args[1]
  7173  	v_0 := v.Args[0]
  7174  	b := v.Block
  7175  	typ := &b.Func.Config.Types
  7176  	// match: (Less16 x y)
  7177  	// result: (SGT (SignExt16to64 y) (SignExt16to64 x))
  7178  	for {
  7179  		x := v_0
  7180  		y := v_1
  7181  		v.reset(OpLOONG64SGT)
  7182  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  7183  		v0.AddArg(y)
  7184  		v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  7185  		v1.AddArg(x)
  7186  		v.AddArg2(v0, v1)
  7187  		return true
  7188  	}
  7189  }
  7190  func rewriteValueLOONG64_OpLess16U(v *Value) bool {
  7191  	v_1 := v.Args[1]
  7192  	v_0 := v.Args[0]
  7193  	b := v.Block
  7194  	typ := &b.Func.Config.Types
  7195  	// match: (Less16U x y)
  7196  	// result: (SGTU (ZeroExt16to64 y) (ZeroExt16to64 x))
  7197  	for {
  7198  		x := v_0
  7199  		y := v_1
  7200  		v.reset(OpLOONG64SGTU)
  7201  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7202  		v0.AddArg(y)
  7203  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7204  		v1.AddArg(x)
  7205  		v.AddArg2(v0, v1)
  7206  		return true
  7207  	}
  7208  }
  7209  func rewriteValueLOONG64_OpLess32(v *Value) bool {
  7210  	v_1 := v.Args[1]
  7211  	v_0 := v.Args[0]
  7212  	b := v.Block
  7213  	typ := &b.Func.Config.Types
  7214  	// match: (Less32 x y)
  7215  	// result: (SGT (SignExt32to64 y) (SignExt32to64 x))
  7216  	for {
  7217  		x := v_0
  7218  		y := v_1
  7219  		v.reset(OpLOONG64SGT)
  7220  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  7221  		v0.AddArg(y)
  7222  		v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  7223  		v1.AddArg(x)
  7224  		v.AddArg2(v0, v1)
  7225  		return true
  7226  	}
  7227  }
  7228  func rewriteValueLOONG64_OpLess32F(v *Value) bool {
  7229  	v_1 := v.Args[1]
  7230  	v_0 := v.Args[0]
  7231  	b := v.Block
  7232  	// match: (Less32F x y)
  7233  	// result: (FPFlagTrue (CMPGTF y x))
  7234  	for {
  7235  		x := v_0
  7236  		y := v_1
  7237  		v.reset(OpLOONG64FPFlagTrue)
  7238  		v0 := b.NewValue0(v.Pos, OpLOONG64CMPGTF, types.TypeFlags)
  7239  		v0.AddArg2(y, x)
  7240  		v.AddArg(v0)
  7241  		return true
  7242  	}
  7243  }
  7244  func rewriteValueLOONG64_OpLess32U(v *Value) bool {
  7245  	v_1 := v.Args[1]
  7246  	v_0 := v.Args[0]
  7247  	b := v.Block
  7248  	typ := &b.Func.Config.Types
  7249  	// match: (Less32U x y)
  7250  	// result: (SGTU (ZeroExt32to64 y) (ZeroExt32to64 x))
  7251  	for {
  7252  		x := v_0
  7253  		y := v_1
  7254  		v.reset(OpLOONG64SGTU)
  7255  		v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7256  		v0.AddArg(y)
  7257  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7258  		v1.AddArg(x)
  7259  		v.AddArg2(v0, v1)
  7260  		return true
  7261  	}
  7262  }
  7263  func rewriteValueLOONG64_OpLess64(v *Value) bool {
  7264  	v_1 := v.Args[1]
  7265  	v_0 := v.Args[0]
  7266  	// match: (Less64 x y)
  7267  	// result: (SGT y x)
  7268  	for {
  7269  		x := v_0
  7270  		y := v_1
  7271  		v.reset(OpLOONG64SGT)
  7272  		v.AddArg2(y, x)
  7273  		return true
  7274  	}
  7275  }
  7276  func rewriteValueLOONG64_OpLess64F(v *Value) bool {
  7277  	v_1 := v.Args[1]
  7278  	v_0 := v.Args[0]
  7279  	b := v.Block
  7280  	// match: (Less64F x y)
  7281  	// result: (FPFlagTrue (CMPGTD y x))
  7282  	for {
  7283  		x := v_0
  7284  		y := v_1
  7285  		v.reset(OpLOONG64FPFlagTrue)
  7286  		v0 := b.NewValue0(v.Pos, OpLOONG64CMPGTD, types.TypeFlags)
  7287  		v0.AddArg2(y, x)
  7288  		v.AddArg(v0)
  7289  		return true
  7290  	}
  7291  }
  7292  func rewriteValueLOONG64_OpLess64U(v *Value) bool {
  7293  	v_1 := v.Args[1]
  7294  	v_0 := v.Args[0]
  7295  	// match: (Less64U x y)
  7296  	// result: (SGTU y x)
  7297  	for {
  7298  		x := v_0
  7299  		y := v_1
  7300  		v.reset(OpLOONG64SGTU)
  7301  		v.AddArg2(y, x)
  7302  		return true
  7303  	}
  7304  }
  7305  func rewriteValueLOONG64_OpLess8(v *Value) bool {
  7306  	v_1 := v.Args[1]
  7307  	v_0 := v.Args[0]
  7308  	b := v.Block
  7309  	typ := &b.Func.Config.Types
  7310  	// match: (Less8 x y)
  7311  	// result: (SGT (SignExt8to64 y) (SignExt8to64 x))
  7312  	for {
  7313  		x := v_0
  7314  		y := v_1
  7315  		v.reset(OpLOONG64SGT)
  7316  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  7317  		v0.AddArg(y)
  7318  		v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  7319  		v1.AddArg(x)
  7320  		v.AddArg2(v0, v1)
  7321  		return true
  7322  	}
  7323  }
  7324  func rewriteValueLOONG64_OpLess8U(v *Value) bool {
  7325  	v_1 := v.Args[1]
  7326  	v_0 := v.Args[0]
  7327  	b := v.Block
  7328  	typ := &b.Func.Config.Types
  7329  	// match: (Less8U x y)
  7330  	// result: (SGTU (ZeroExt8to64 y) (ZeroExt8to64 x))
  7331  	for {
  7332  		x := v_0
  7333  		y := v_1
  7334  		v.reset(OpLOONG64SGTU)
  7335  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7336  		v0.AddArg(y)
  7337  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7338  		v1.AddArg(x)
  7339  		v.AddArg2(v0, v1)
  7340  		return true
  7341  	}
  7342  }
  7343  func rewriteValueLOONG64_OpLoad(v *Value) bool {
  7344  	v_1 := v.Args[1]
  7345  	v_0 := v.Args[0]
  7346  	// match: (Load <t> ptr mem)
  7347  	// cond: t.IsBoolean()
  7348  	// result: (MOVBUload ptr mem)
  7349  	for {
  7350  		t := v.Type
  7351  		ptr := v_0
  7352  		mem := v_1
  7353  		if !(t.IsBoolean()) {
  7354  			break
  7355  		}
  7356  		v.reset(OpLOONG64MOVBUload)
  7357  		v.AddArg2(ptr, mem)
  7358  		return true
  7359  	}
  7360  	// match: (Load <t> ptr mem)
  7361  	// cond: (is8BitInt(t) && t.IsSigned())
  7362  	// result: (MOVBload ptr mem)
  7363  	for {
  7364  		t := v.Type
  7365  		ptr := v_0
  7366  		mem := v_1
  7367  		if !(is8BitInt(t) && t.IsSigned()) {
  7368  			break
  7369  		}
  7370  		v.reset(OpLOONG64MOVBload)
  7371  		v.AddArg2(ptr, mem)
  7372  		return true
  7373  	}
  7374  	// match: (Load <t> ptr mem)
  7375  	// cond: (is8BitInt(t) && !t.IsSigned())
  7376  	// result: (MOVBUload ptr mem)
  7377  	for {
  7378  		t := v.Type
  7379  		ptr := v_0
  7380  		mem := v_1
  7381  		if !(is8BitInt(t) && !t.IsSigned()) {
  7382  			break
  7383  		}
  7384  		v.reset(OpLOONG64MOVBUload)
  7385  		v.AddArg2(ptr, mem)
  7386  		return true
  7387  	}
  7388  	// match: (Load <t> ptr mem)
  7389  	// cond: (is16BitInt(t) && t.IsSigned())
  7390  	// result: (MOVHload ptr mem)
  7391  	for {
  7392  		t := v.Type
  7393  		ptr := v_0
  7394  		mem := v_1
  7395  		if !(is16BitInt(t) && t.IsSigned()) {
  7396  			break
  7397  		}
  7398  		v.reset(OpLOONG64MOVHload)
  7399  		v.AddArg2(ptr, mem)
  7400  		return true
  7401  	}
  7402  	// match: (Load <t> ptr mem)
  7403  	// cond: (is16BitInt(t) && !t.IsSigned())
  7404  	// result: (MOVHUload ptr mem)
  7405  	for {
  7406  		t := v.Type
  7407  		ptr := v_0
  7408  		mem := v_1
  7409  		if !(is16BitInt(t) && !t.IsSigned()) {
  7410  			break
  7411  		}
  7412  		v.reset(OpLOONG64MOVHUload)
  7413  		v.AddArg2(ptr, mem)
  7414  		return true
  7415  	}
  7416  	// match: (Load <t> ptr mem)
  7417  	// cond: (is32BitInt(t) && t.IsSigned())
  7418  	// result: (MOVWload ptr mem)
  7419  	for {
  7420  		t := v.Type
  7421  		ptr := v_0
  7422  		mem := v_1
  7423  		if !(is32BitInt(t) && t.IsSigned()) {
  7424  			break
  7425  		}
  7426  		v.reset(OpLOONG64MOVWload)
  7427  		v.AddArg2(ptr, mem)
  7428  		return true
  7429  	}
  7430  	// match: (Load <t> ptr mem)
  7431  	// cond: (is32BitInt(t) && !t.IsSigned())
  7432  	// result: (MOVWUload ptr mem)
  7433  	for {
  7434  		t := v.Type
  7435  		ptr := v_0
  7436  		mem := v_1
  7437  		if !(is32BitInt(t) && !t.IsSigned()) {
  7438  			break
  7439  		}
  7440  		v.reset(OpLOONG64MOVWUload)
  7441  		v.AddArg2(ptr, mem)
  7442  		return true
  7443  	}
  7444  	// match: (Load <t> ptr mem)
  7445  	// cond: (is64BitInt(t) || isPtr(t))
  7446  	// result: (MOVVload ptr mem)
  7447  	for {
  7448  		t := v.Type
  7449  		ptr := v_0
  7450  		mem := v_1
  7451  		if !(is64BitInt(t) || isPtr(t)) {
  7452  			break
  7453  		}
  7454  		v.reset(OpLOONG64MOVVload)
  7455  		v.AddArg2(ptr, mem)
  7456  		return true
  7457  	}
  7458  	// match: (Load <t> ptr mem)
  7459  	// cond: is32BitFloat(t)
  7460  	// result: (MOVFload ptr mem)
  7461  	for {
  7462  		t := v.Type
  7463  		ptr := v_0
  7464  		mem := v_1
  7465  		if !(is32BitFloat(t)) {
  7466  			break
  7467  		}
  7468  		v.reset(OpLOONG64MOVFload)
  7469  		v.AddArg2(ptr, mem)
  7470  		return true
  7471  	}
  7472  	// match: (Load <t> ptr mem)
  7473  	// cond: is64BitFloat(t)
  7474  	// result: (MOVDload ptr mem)
  7475  	for {
  7476  		t := v.Type
  7477  		ptr := v_0
  7478  		mem := v_1
  7479  		if !(is64BitFloat(t)) {
  7480  			break
  7481  		}
  7482  		v.reset(OpLOONG64MOVDload)
  7483  		v.AddArg2(ptr, mem)
  7484  		return true
  7485  	}
  7486  	return false
  7487  }
  7488  func rewriteValueLOONG64_OpLocalAddr(v *Value) bool {
  7489  	v_1 := v.Args[1]
  7490  	v_0 := v.Args[0]
  7491  	b := v.Block
  7492  	typ := &b.Func.Config.Types
  7493  	// match: (LocalAddr <t> {sym} base mem)
  7494  	// cond: t.Elem().HasPointers()
  7495  	// result: (MOVVaddr {sym} (SPanchored base mem))
  7496  	for {
  7497  		t := v.Type
  7498  		sym := auxToSym(v.Aux)
  7499  		base := v_0
  7500  		mem := v_1
  7501  		if !(t.Elem().HasPointers()) {
  7502  			break
  7503  		}
  7504  		v.reset(OpLOONG64MOVVaddr)
  7505  		v.Aux = symToAux(sym)
  7506  		v0 := b.NewValue0(v.Pos, OpSPanchored, typ.Uintptr)
  7507  		v0.AddArg2(base, mem)
  7508  		v.AddArg(v0)
  7509  		return true
  7510  	}
  7511  	// match: (LocalAddr <t> {sym} base _)
  7512  	// cond: !t.Elem().HasPointers()
  7513  	// result: (MOVVaddr {sym} base)
  7514  	for {
  7515  		t := v.Type
  7516  		sym := auxToSym(v.Aux)
  7517  		base := v_0
  7518  		if !(!t.Elem().HasPointers()) {
  7519  			break
  7520  		}
  7521  		v.reset(OpLOONG64MOVVaddr)
  7522  		v.Aux = symToAux(sym)
  7523  		v.AddArg(base)
  7524  		return true
  7525  	}
  7526  	return false
  7527  }
  7528  func rewriteValueLOONG64_OpLsh16x16(v *Value) bool {
  7529  	v_1 := v.Args[1]
  7530  	v_0 := v.Args[0]
  7531  	b := v.Block
  7532  	typ := &b.Func.Config.Types
  7533  	// match: (Lsh16x16 x y)
  7534  	// cond: shiftIsBounded(v)
  7535  	// result: (SLLV x y)
  7536  	for {
  7537  		x := v_0
  7538  		y := v_1
  7539  		if !(shiftIsBounded(v)) {
  7540  			break
  7541  		}
  7542  		v.reset(OpLOONG64SLLV)
  7543  		v.AddArg2(x, y)
  7544  		return true
  7545  	}
  7546  	// match: (Lsh16x16 <t> x y)
  7547  	// cond: !shiftIsBounded(v)
  7548  	// result: (MASKEQZ (SLLV <t> x (ZeroExt16to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y)))
  7549  	for {
  7550  		t := v.Type
  7551  		x := v_0
  7552  		y := v_1
  7553  		if !(!shiftIsBounded(v)) {
  7554  			break
  7555  		}
  7556  		v.reset(OpLOONG64MASKEQZ)
  7557  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  7558  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7559  		v1.AddArg(y)
  7560  		v0.AddArg2(x, v1)
  7561  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  7562  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  7563  		v3.AuxInt = int64ToAuxInt(64)
  7564  		v2.AddArg2(v3, v1)
  7565  		v.AddArg2(v0, v2)
  7566  		return true
  7567  	}
  7568  	return false
  7569  }
  7570  func rewriteValueLOONG64_OpLsh16x32(v *Value) bool {
  7571  	v_1 := v.Args[1]
  7572  	v_0 := v.Args[0]
  7573  	b := v.Block
  7574  	typ := &b.Func.Config.Types
  7575  	// match: (Lsh16x32 x y)
  7576  	// cond: shiftIsBounded(v)
  7577  	// result: (SLLV x y)
  7578  	for {
  7579  		x := v_0
  7580  		y := v_1
  7581  		if !(shiftIsBounded(v)) {
  7582  			break
  7583  		}
  7584  		v.reset(OpLOONG64SLLV)
  7585  		v.AddArg2(x, y)
  7586  		return true
  7587  	}
  7588  	// match: (Lsh16x32 <t> x y)
  7589  	// cond: !shiftIsBounded(v)
  7590  	// result: (MASKEQZ (SLLV <t> x (ZeroExt32to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y)))
  7591  	for {
  7592  		t := v.Type
  7593  		x := v_0
  7594  		y := v_1
  7595  		if !(!shiftIsBounded(v)) {
  7596  			break
  7597  		}
  7598  		v.reset(OpLOONG64MASKEQZ)
  7599  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  7600  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7601  		v1.AddArg(y)
  7602  		v0.AddArg2(x, v1)
  7603  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  7604  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  7605  		v3.AuxInt = int64ToAuxInt(64)
  7606  		v2.AddArg2(v3, v1)
  7607  		v.AddArg2(v0, v2)
  7608  		return true
  7609  	}
  7610  	return false
  7611  }
  7612  func rewriteValueLOONG64_OpLsh16x64(v *Value) bool {
  7613  	v_1 := v.Args[1]
  7614  	v_0 := v.Args[0]
  7615  	b := v.Block
  7616  	typ := &b.Func.Config.Types
  7617  	// match: (Lsh16x64 x y)
  7618  	// cond: shiftIsBounded(v)
  7619  	// result: (SLLV x y)
  7620  	for {
  7621  		x := v_0
  7622  		y := v_1
  7623  		if !(shiftIsBounded(v)) {
  7624  			break
  7625  		}
  7626  		v.reset(OpLOONG64SLLV)
  7627  		v.AddArg2(x, y)
  7628  		return true
  7629  	}
  7630  	// match: (Lsh16x64 <t> x y)
  7631  	// cond: !shiftIsBounded(v)
  7632  	// result: (MASKEQZ (SLLV <t> x y) (SGTU (MOVVconst <typ.UInt64> [64]) y))
  7633  	for {
  7634  		t := v.Type
  7635  		x := v_0
  7636  		y := v_1
  7637  		if !(!shiftIsBounded(v)) {
  7638  			break
  7639  		}
  7640  		v.reset(OpLOONG64MASKEQZ)
  7641  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  7642  		v0.AddArg2(x, y)
  7643  		v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  7644  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  7645  		v2.AuxInt = int64ToAuxInt(64)
  7646  		v1.AddArg2(v2, y)
  7647  		v.AddArg2(v0, v1)
  7648  		return true
  7649  	}
  7650  	return false
  7651  }
  7652  func rewriteValueLOONG64_OpLsh16x8(v *Value) bool {
  7653  	v_1 := v.Args[1]
  7654  	v_0 := v.Args[0]
  7655  	b := v.Block
  7656  	typ := &b.Func.Config.Types
  7657  	// match: (Lsh16x8 x y)
  7658  	// cond: shiftIsBounded(v)
  7659  	// result: (SLLV x y)
  7660  	for {
  7661  		x := v_0
  7662  		y := v_1
  7663  		if !(shiftIsBounded(v)) {
  7664  			break
  7665  		}
  7666  		v.reset(OpLOONG64SLLV)
  7667  		v.AddArg2(x, y)
  7668  		return true
  7669  	}
  7670  	// match: (Lsh16x8 <t> x y)
  7671  	// cond: !shiftIsBounded(v)
  7672  	// result: (MASKEQZ (SLLV <t> x (ZeroExt8to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y)))
  7673  	for {
  7674  		t := v.Type
  7675  		x := v_0
  7676  		y := v_1
  7677  		if !(!shiftIsBounded(v)) {
  7678  			break
  7679  		}
  7680  		v.reset(OpLOONG64MASKEQZ)
  7681  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  7682  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7683  		v1.AddArg(y)
  7684  		v0.AddArg2(x, v1)
  7685  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  7686  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  7687  		v3.AuxInt = int64ToAuxInt(64)
  7688  		v2.AddArg2(v3, v1)
  7689  		v.AddArg2(v0, v2)
  7690  		return true
  7691  	}
  7692  	return false
  7693  }
  7694  func rewriteValueLOONG64_OpLsh32x16(v *Value) bool {
  7695  	v_1 := v.Args[1]
  7696  	v_0 := v.Args[0]
  7697  	b := v.Block
  7698  	typ := &b.Func.Config.Types
  7699  	// match: (Lsh32x16 x y)
  7700  	// cond: shiftIsBounded(v)
  7701  	// result: (SLL x y)
  7702  	for {
  7703  		x := v_0
  7704  		y := v_1
  7705  		if !(shiftIsBounded(v)) {
  7706  			break
  7707  		}
  7708  		v.reset(OpLOONG64SLL)
  7709  		v.AddArg2(x, y)
  7710  		return true
  7711  	}
  7712  	// match: (Lsh32x16 <t> x y)
  7713  	// cond: !shiftIsBounded(v)
  7714  	// result: (MASKEQZ (SLL <t> x (ZeroExt16to64 y)) (SGTU (MOVVconst <typ.UInt64> [32]) (ZeroExt16to64 y)))
  7715  	for {
  7716  		t := v.Type
  7717  		x := v_0
  7718  		y := v_1
  7719  		if !(!shiftIsBounded(v)) {
  7720  			break
  7721  		}
  7722  		v.reset(OpLOONG64MASKEQZ)
  7723  		v0 := b.NewValue0(v.Pos, OpLOONG64SLL, t)
  7724  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7725  		v1.AddArg(y)
  7726  		v0.AddArg2(x, v1)
  7727  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  7728  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  7729  		v3.AuxInt = int64ToAuxInt(32)
  7730  		v2.AddArg2(v3, v1)
  7731  		v.AddArg2(v0, v2)
  7732  		return true
  7733  	}
  7734  	return false
  7735  }
  7736  func rewriteValueLOONG64_OpLsh32x32(v *Value) bool {
  7737  	v_1 := v.Args[1]
  7738  	v_0 := v.Args[0]
  7739  	b := v.Block
  7740  	typ := &b.Func.Config.Types
  7741  	// match: (Lsh32x32 x y)
  7742  	// cond: shiftIsBounded(v)
  7743  	// result: (SLL x y)
  7744  	for {
  7745  		x := v_0
  7746  		y := v_1
  7747  		if !(shiftIsBounded(v)) {
  7748  			break
  7749  		}
  7750  		v.reset(OpLOONG64SLL)
  7751  		v.AddArg2(x, y)
  7752  		return true
  7753  	}
  7754  	// match: (Lsh32x32 <t> x y)
  7755  	// cond: !shiftIsBounded(v)
  7756  	// result: (MASKEQZ (SLL <t> x (ZeroExt32to64 y)) (SGTU (MOVVconst <typ.UInt64> [32]) (ZeroExt32to64 y)))
  7757  	for {
  7758  		t := v.Type
  7759  		x := v_0
  7760  		y := v_1
  7761  		if !(!shiftIsBounded(v)) {
  7762  			break
  7763  		}
  7764  		v.reset(OpLOONG64MASKEQZ)
  7765  		v0 := b.NewValue0(v.Pos, OpLOONG64SLL, t)
  7766  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7767  		v1.AddArg(y)
  7768  		v0.AddArg2(x, v1)
  7769  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  7770  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  7771  		v3.AuxInt = int64ToAuxInt(32)
  7772  		v2.AddArg2(v3, v1)
  7773  		v.AddArg2(v0, v2)
  7774  		return true
  7775  	}
  7776  	return false
  7777  }
  7778  func rewriteValueLOONG64_OpLsh32x64(v *Value) bool {
  7779  	v_1 := v.Args[1]
  7780  	v_0 := v.Args[0]
  7781  	b := v.Block
  7782  	typ := &b.Func.Config.Types
  7783  	// match: (Lsh32x64 x y)
  7784  	// cond: shiftIsBounded(v)
  7785  	// result: (SLL x y)
  7786  	for {
  7787  		x := v_0
  7788  		y := v_1
  7789  		if !(shiftIsBounded(v)) {
  7790  			break
  7791  		}
  7792  		v.reset(OpLOONG64SLL)
  7793  		v.AddArg2(x, y)
  7794  		return true
  7795  	}
  7796  	// match: (Lsh32x64 <t> x y)
  7797  	// cond: !shiftIsBounded(v)
  7798  	// result: (MASKEQZ (SLL <t> x y) (SGTU (MOVVconst <typ.UInt64> [32]) y))
  7799  	for {
  7800  		t := v.Type
  7801  		x := v_0
  7802  		y := v_1
  7803  		if !(!shiftIsBounded(v)) {
  7804  			break
  7805  		}
  7806  		v.reset(OpLOONG64MASKEQZ)
  7807  		v0 := b.NewValue0(v.Pos, OpLOONG64SLL, t)
  7808  		v0.AddArg2(x, y)
  7809  		v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  7810  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  7811  		v2.AuxInt = int64ToAuxInt(32)
  7812  		v1.AddArg2(v2, y)
  7813  		v.AddArg2(v0, v1)
  7814  		return true
  7815  	}
  7816  	return false
  7817  }
  7818  func rewriteValueLOONG64_OpLsh32x8(v *Value) bool {
  7819  	v_1 := v.Args[1]
  7820  	v_0 := v.Args[0]
  7821  	b := v.Block
  7822  	typ := &b.Func.Config.Types
  7823  	// match: (Lsh32x8 x y)
  7824  	// cond: shiftIsBounded(v)
  7825  	// result: (SLL x y)
  7826  	for {
  7827  		x := v_0
  7828  		y := v_1
  7829  		if !(shiftIsBounded(v)) {
  7830  			break
  7831  		}
  7832  		v.reset(OpLOONG64SLL)
  7833  		v.AddArg2(x, y)
  7834  		return true
  7835  	}
  7836  	// match: (Lsh32x8 <t> x y)
  7837  	// cond: !shiftIsBounded(v)
  7838  	// result: (MASKEQZ (SLL <t> x (ZeroExt8to64 y)) (SGTU (MOVVconst <typ.UInt64> [32]) (ZeroExt8to64 y)))
  7839  	for {
  7840  		t := v.Type
  7841  		x := v_0
  7842  		y := v_1
  7843  		if !(!shiftIsBounded(v)) {
  7844  			break
  7845  		}
  7846  		v.reset(OpLOONG64MASKEQZ)
  7847  		v0 := b.NewValue0(v.Pos, OpLOONG64SLL, t)
  7848  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7849  		v1.AddArg(y)
  7850  		v0.AddArg2(x, v1)
  7851  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  7852  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  7853  		v3.AuxInt = int64ToAuxInt(32)
  7854  		v2.AddArg2(v3, v1)
  7855  		v.AddArg2(v0, v2)
  7856  		return true
  7857  	}
  7858  	return false
  7859  }
  7860  func rewriteValueLOONG64_OpLsh64x16(v *Value) bool {
  7861  	v_1 := v.Args[1]
  7862  	v_0 := v.Args[0]
  7863  	b := v.Block
  7864  	typ := &b.Func.Config.Types
  7865  	// match: (Lsh64x16 x y)
  7866  	// cond: shiftIsBounded(v)
  7867  	// result: (SLLV x y)
  7868  	for {
  7869  		x := v_0
  7870  		y := v_1
  7871  		if !(shiftIsBounded(v)) {
  7872  			break
  7873  		}
  7874  		v.reset(OpLOONG64SLLV)
  7875  		v.AddArg2(x, y)
  7876  		return true
  7877  	}
  7878  	// match: (Lsh64x16 <t> x y)
  7879  	// cond: !shiftIsBounded(v)
  7880  	// result: (MASKEQZ (SLLV <t> x (ZeroExt16to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y)))
  7881  	for {
  7882  		t := v.Type
  7883  		x := v_0
  7884  		y := v_1
  7885  		if !(!shiftIsBounded(v)) {
  7886  			break
  7887  		}
  7888  		v.reset(OpLOONG64MASKEQZ)
  7889  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  7890  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7891  		v1.AddArg(y)
  7892  		v0.AddArg2(x, v1)
  7893  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  7894  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  7895  		v3.AuxInt = int64ToAuxInt(64)
  7896  		v2.AddArg2(v3, v1)
  7897  		v.AddArg2(v0, v2)
  7898  		return true
  7899  	}
  7900  	return false
  7901  }
  7902  func rewriteValueLOONG64_OpLsh64x32(v *Value) bool {
  7903  	v_1 := v.Args[1]
  7904  	v_0 := v.Args[0]
  7905  	b := v.Block
  7906  	typ := &b.Func.Config.Types
  7907  	// match: (Lsh64x32 x y)
  7908  	// cond: shiftIsBounded(v)
  7909  	// result: (SLLV x y)
  7910  	for {
  7911  		x := v_0
  7912  		y := v_1
  7913  		if !(shiftIsBounded(v)) {
  7914  			break
  7915  		}
  7916  		v.reset(OpLOONG64SLLV)
  7917  		v.AddArg2(x, y)
  7918  		return true
  7919  	}
  7920  	// match: (Lsh64x32 <t> x y)
  7921  	// cond: !shiftIsBounded(v)
  7922  	// result: (MASKEQZ (SLLV <t> x (ZeroExt32to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y)))
  7923  	for {
  7924  		t := v.Type
  7925  		x := v_0
  7926  		y := v_1
  7927  		if !(!shiftIsBounded(v)) {
  7928  			break
  7929  		}
  7930  		v.reset(OpLOONG64MASKEQZ)
  7931  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  7932  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7933  		v1.AddArg(y)
  7934  		v0.AddArg2(x, v1)
  7935  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  7936  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  7937  		v3.AuxInt = int64ToAuxInt(64)
  7938  		v2.AddArg2(v3, v1)
  7939  		v.AddArg2(v0, v2)
  7940  		return true
  7941  	}
  7942  	return false
  7943  }
  7944  func rewriteValueLOONG64_OpLsh64x64(v *Value) bool {
  7945  	v_1 := v.Args[1]
  7946  	v_0 := v.Args[0]
  7947  	b := v.Block
  7948  	typ := &b.Func.Config.Types
  7949  	// match: (Lsh64x64 x y)
  7950  	// cond: shiftIsBounded(v)
  7951  	// result: (SLLV x y)
  7952  	for {
  7953  		x := v_0
  7954  		y := v_1
  7955  		if !(shiftIsBounded(v)) {
  7956  			break
  7957  		}
  7958  		v.reset(OpLOONG64SLLV)
  7959  		v.AddArg2(x, y)
  7960  		return true
  7961  	}
  7962  	// match: (Lsh64x64 <t> x y)
  7963  	// cond: !shiftIsBounded(v)
  7964  	// result: (MASKEQZ (SLLV <t> x y) (SGTU (MOVVconst <typ.UInt64> [64]) y))
  7965  	for {
  7966  		t := v.Type
  7967  		x := v_0
  7968  		y := v_1
  7969  		if !(!shiftIsBounded(v)) {
  7970  			break
  7971  		}
  7972  		v.reset(OpLOONG64MASKEQZ)
  7973  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  7974  		v0.AddArg2(x, y)
  7975  		v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  7976  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  7977  		v2.AuxInt = int64ToAuxInt(64)
  7978  		v1.AddArg2(v2, y)
  7979  		v.AddArg2(v0, v1)
  7980  		return true
  7981  	}
  7982  	return false
  7983  }
  7984  func rewriteValueLOONG64_OpLsh64x8(v *Value) bool {
  7985  	v_1 := v.Args[1]
  7986  	v_0 := v.Args[0]
  7987  	b := v.Block
  7988  	typ := &b.Func.Config.Types
  7989  	// match: (Lsh64x8 x y)
  7990  	// cond: shiftIsBounded(v)
  7991  	// result: (SLLV x y)
  7992  	for {
  7993  		x := v_0
  7994  		y := v_1
  7995  		if !(shiftIsBounded(v)) {
  7996  			break
  7997  		}
  7998  		v.reset(OpLOONG64SLLV)
  7999  		v.AddArg2(x, y)
  8000  		return true
  8001  	}
  8002  	// match: (Lsh64x8 <t> x y)
  8003  	// cond: !shiftIsBounded(v)
  8004  	// result: (MASKEQZ (SLLV <t> x (ZeroExt8to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y)))
  8005  	for {
  8006  		t := v.Type
  8007  		x := v_0
  8008  		y := v_1
  8009  		if !(!shiftIsBounded(v)) {
  8010  			break
  8011  		}
  8012  		v.reset(OpLOONG64MASKEQZ)
  8013  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  8014  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8015  		v1.AddArg(y)
  8016  		v0.AddArg2(x, v1)
  8017  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  8018  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  8019  		v3.AuxInt = int64ToAuxInt(64)
  8020  		v2.AddArg2(v3, v1)
  8021  		v.AddArg2(v0, v2)
  8022  		return true
  8023  	}
  8024  	return false
  8025  }
  8026  func rewriteValueLOONG64_OpLsh8x16(v *Value) bool {
  8027  	v_1 := v.Args[1]
  8028  	v_0 := v.Args[0]
  8029  	b := v.Block
  8030  	typ := &b.Func.Config.Types
  8031  	// match: (Lsh8x16 x y)
  8032  	// cond: shiftIsBounded(v)
  8033  	// result: (SLLV x y)
  8034  	for {
  8035  		x := v_0
  8036  		y := v_1
  8037  		if !(shiftIsBounded(v)) {
  8038  			break
  8039  		}
  8040  		v.reset(OpLOONG64SLLV)
  8041  		v.AddArg2(x, y)
  8042  		return true
  8043  	}
  8044  	// match: (Lsh8x16 <t> x y)
  8045  	// cond: !shiftIsBounded(v)
  8046  	// result: (MASKEQZ (SLLV <t> x (ZeroExt16to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y)))
  8047  	for {
  8048  		t := v.Type
  8049  		x := v_0
  8050  		y := v_1
  8051  		if !(!shiftIsBounded(v)) {
  8052  			break
  8053  		}
  8054  		v.reset(OpLOONG64MASKEQZ)
  8055  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  8056  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8057  		v1.AddArg(y)
  8058  		v0.AddArg2(x, v1)
  8059  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  8060  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  8061  		v3.AuxInt = int64ToAuxInt(64)
  8062  		v2.AddArg2(v3, v1)
  8063  		v.AddArg2(v0, v2)
  8064  		return true
  8065  	}
  8066  	return false
  8067  }
  8068  func rewriteValueLOONG64_OpLsh8x32(v *Value) bool {
  8069  	v_1 := v.Args[1]
  8070  	v_0 := v.Args[0]
  8071  	b := v.Block
  8072  	typ := &b.Func.Config.Types
  8073  	// match: (Lsh8x32 x y)
  8074  	// cond: shiftIsBounded(v)
  8075  	// result: (SLLV x y)
  8076  	for {
  8077  		x := v_0
  8078  		y := v_1
  8079  		if !(shiftIsBounded(v)) {
  8080  			break
  8081  		}
  8082  		v.reset(OpLOONG64SLLV)
  8083  		v.AddArg2(x, y)
  8084  		return true
  8085  	}
  8086  	// match: (Lsh8x32 <t> x y)
  8087  	// cond: !shiftIsBounded(v)
  8088  	// result: (MASKEQZ (SLLV <t> x (ZeroExt32to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y)))
  8089  	for {
  8090  		t := v.Type
  8091  		x := v_0
  8092  		y := v_1
  8093  		if !(!shiftIsBounded(v)) {
  8094  			break
  8095  		}
  8096  		v.reset(OpLOONG64MASKEQZ)
  8097  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  8098  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8099  		v1.AddArg(y)
  8100  		v0.AddArg2(x, v1)
  8101  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  8102  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  8103  		v3.AuxInt = int64ToAuxInt(64)
  8104  		v2.AddArg2(v3, v1)
  8105  		v.AddArg2(v0, v2)
  8106  		return true
  8107  	}
  8108  	return false
  8109  }
  8110  func rewriteValueLOONG64_OpLsh8x64(v *Value) bool {
  8111  	v_1 := v.Args[1]
  8112  	v_0 := v.Args[0]
  8113  	b := v.Block
  8114  	typ := &b.Func.Config.Types
  8115  	// match: (Lsh8x64 x y)
  8116  	// cond: shiftIsBounded(v)
  8117  	// result: (SLLV x y)
  8118  	for {
  8119  		x := v_0
  8120  		y := v_1
  8121  		if !(shiftIsBounded(v)) {
  8122  			break
  8123  		}
  8124  		v.reset(OpLOONG64SLLV)
  8125  		v.AddArg2(x, y)
  8126  		return true
  8127  	}
  8128  	// match: (Lsh8x64 <t> x y)
  8129  	// cond: !shiftIsBounded(v)
  8130  	// result: (MASKEQZ (SLLV <t> x y) (SGTU (MOVVconst <typ.UInt64> [64]) y))
  8131  	for {
  8132  		t := v.Type
  8133  		x := v_0
  8134  		y := v_1
  8135  		if !(!shiftIsBounded(v)) {
  8136  			break
  8137  		}
  8138  		v.reset(OpLOONG64MASKEQZ)
  8139  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  8140  		v0.AddArg2(x, y)
  8141  		v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  8142  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  8143  		v2.AuxInt = int64ToAuxInt(64)
  8144  		v1.AddArg2(v2, y)
  8145  		v.AddArg2(v0, v1)
  8146  		return true
  8147  	}
  8148  	return false
  8149  }
  8150  func rewriteValueLOONG64_OpLsh8x8(v *Value) bool {
  8151  	v_1 := v.Args[1]
  8152  	v_0 := v.Args[0]
  8153  	b := v.Block
  8154  	typ := &b.Func.Config.Types
  8155  	// match: (Lsh8x8 x y)
  8156  	// cond: shiftIsBounded(v)
  8157  	// result: (SLLV x y)
  8158  	for {
  8159  		x := v_0
  8160  		y := v_1
  8161  		if !(shiftIsBounded(v)) {
  8162  			break
  8163  		}
  8164  		v.reset(OpLOONG64SLLV)
  8165  		v.AddArg2(x, y)
  8166  		return true
  8167  	}
  8168  	// match: (Lsh8x8 <t> x y)
  8169  	// cond: !shiftIsBounded(v)
  8170  	// result: (MASKEQZ (SLLV <t> x (ZeroExt8to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y)))
  8171  	for {
  8172  		t := v.Type
  8173  		x := v_0
  8174  		y := v_1
  8175  		if !(!shiftIsBounded(v)) {
  8176  			break
  8177  		}
  8178  		v.reset(OpLOONG64MASKEQZ)
  8179  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  8180  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8181  		v1.AddArg(y)
  8182  		v0.AddArg2(x, v1)
  8183  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  8184  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  8185  		v3.AuxInt = int64ToAuxInt(64)
  8186  		v2.AddArg2(v3, v1)
  8187  		v.AddArg2(v0, v2)
  8188  		return true
  8189  	}
  8190  	return false
  8191  }
  8192  func rewriteValueLOONG64_OpMod16(v *Value) bool {
  8193  	v_1 := v.Args[1]
  8194  	v_0 := v.Args[0]
  8195  	b := v.Block
  8196  	typ := &b.Func.Config.Types
  8197  	// match: (Mod16 x y)
  8198  	// result: (REMV (SignExt16to64 x) (SignExt16to64 y))
  8199  	for {
  8200  		x := v_0
  8201  		y := v_1
  8202  		v.reset(OpLOONG64REMV)
  8203  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  8204  		v0.AddArg(x)
  8205  		v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  8206  		v1.AddArg(y)
  8207  		v.AddArg2(v0, v1)
  8208  		return true
  8209  	}
  8210  }
  8211  func rewriteValueLOONG64_OpMod16u(v *Value) bool {
  8212  	v_1 := v.Args[1]
  8213  	v_0 := v.Args[0]
  8214  	b := v.Block
  8215  	typ := &b.Func.Config.Types
  8216  	// match: (Mod16u x y)
  8217  	// result: (REMVU (ZeroExt16to64 x) (ZeroExt16to64 y))
  8218  	for {
  8219  		x := v_0
  8220  		y := v_1
  8221  		v.reset(OpLOONG64REMVU)
  8222  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8223  		v0.AddArg(x)
  8224  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8225  		v1.AddArg(y)
  8226  		v.AddArg2(v0, v1)
  8227  		return true
  8228  	}
  8229  }
  8230  func rewriteValueLOONG64_OpMod32(v *Value) bool {
  8231  	v_1 := v.Args[1]
  8232  	v_0 := v.Args[0]
  8233  	b := v.Block
  8234  	typ := &b.Func.Config.Types
  8235  	// match: (Mod32 x y)
  8236  	// result: (REMV (SignExt32to64 x) (SignExt32to64 y))
  8237  	for {
  8238  		x := v_0
  8239  		y := v_1
  8240  		v.reset(OpLOONG64REMV)
  8241  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  8242  		v0.AddArg(x)
  8243  		v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  8244  		v1.AddArg(y)
  8245  		v.AddArg2(v0, v1)
  8246  		return true
  8247  	}
  8248  }
  8249  func rewriteValueLOONG64_OpMod32u(v *Value) bool {
  8250  	v_1 := v.Args[1]
  8251  	v_0 := v.Args[0]
  8252  	b := v.Block
  8253  	typ := &b.Func.Config.Types
  8254  	// match: (Mod32u x y)
  8255  	// result: (REMVU (ZeroExt32to64 x) (ZeroExt32to64 y))
  8256  	for {
  8257  		x := v_0
  8258  		y := v_1
  8259  		v.reset(OpLOONG64REMVU)
  8260  		v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8261  		v0.AddArg(x)
  8262  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8263  		v1.AddArg(y)
  8264  		v.AddArg2(v0, v1)
  8265  		return true
  8266  	}
  8267  }
  8268  func rewriteValueLOONG64_OpMod64(v *Value) bool {
  8269  	v_1 := v.Args[1]
  8270  	v_0 := v.Args[0]
  8271  	// match: (Mod64 x y)
  8272  	// result: (REMV x y)
  8273  	for {
  8274  		x := v_0
  8275  		y := v_1
  8276  		v.reset(OpLOONG64REMV)
  8277  		v.AddArg2(x, y)
  8278  		return true
  8279  	}
  8280  }
  8281  func rewriteValueLOONG64_OpMod8(v *Value) bool {
  8282  	v_1 := v.Args[1]
  8283  	v_0 := v.Args[0]
  8284  	b := v.Block
  8285  	typ := &b.Func.Config.Types
  8286  	// match: (Mod8 x y)
  8287  	// result: (REMV (SignExt8to64 x) (SignExt8to64 y))
  8288  	for {
  8289  		x := v_0
  8290  		y := v_1
  8291  		v.reset(OpLOONG64REMV)
  8292  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  8293  		v0.AddArg(x)
  8294  		v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  8295  		v1.AddArg(y)
  8296  		v.AddArg2(v0, v1)
  8297  		return true
  8298  	}
  8299  }
  8300  func rewriteValueLOONG64_OpMod8u(v *Value) bool {
  8301  	v_1 := v.Args[1]
  8302  	v_0 := v.Args[0]
  8303  	b := v.Block
  8304  	typ := &b.Func.Config.Types
  8305  	// match: (Mod8u x y)
  8306  	// result: (REMVU (ZeroExt8to64 x) (ZeroExt8to64 y))
  8307  	for {
  8308  		x := v_0
  8309  		y := v_1
  8310  		v.reset(OpLOONG64REMVU)
  8311  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8312  		v0.AddArg(x)
  8313  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8314  		v1.AddArg(y)
  8315  		v.AddArg2(v0, v1)
  8316  		return true
  8317  	}
  8318  }
  8319  func rewriteValueLOONG64_OpMove(v *Value) bool {
  8320  	v_2 := v.Args[2]
  8321  	v_1 := v.Args[1]
  8322  	v_0 := v.Args[0]
  8323  	b := v.Block
  8324  	typ := &b.Func.Config.Types
  8325  	// match: (Move [0] _ _ mem)
  8326  	// result: mem
  8327  	for {
  8328  		if auxIntToInt64(v.AuxInt) != 0 {
  8329  			break
  8330  		}
  8331  		mem := v_2
  8332  		v.copyOf(mem)
  8333  		return true
  8334  	}
  8335  	// match: (Move [1] dst src mem)
  8336  	// result: (MOVBstore dst (MOVBUload src mem) mem)
  8337  	for {
  8338  		if auxIntToInt64(v.AuxInt) != 1 {
  8339  			break
  8340  		}
  8341  		dst := v_0
  8342  		src := v_1
  8343  		mem := v_2
  8344  		v.reset(OpLOONG64MOVBstore)
  8345  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVBUload, typ.UInt8)
  8346  		v0.AddArg2(src, mem)
  8347  		v.AddArg3(dst, v0, mem)
  8348  		return true
  8349  	}
  8350  	// match: (Move [2] dst src mem)
  8351  	// result: (MOVHstore dst (MOVHUload src mem) mem)
  8352  	for {
  8353  		if auxIntToInt64(v.AuxInt) != 2 {
  8354  			break
  8355  		}
  8356  		dst := v_0
  8357  		src := v_1
  8358  		mem := v_2
  8359  		v.reset(OpLOONG64MOVHstore)
  8360  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVHUload, typ.UInt16)
  8361  		v0.AddArg2(src, mem)
  8362  		v.AddArg3(dst, v0, mem)
  8363  		return true
  8364  	}
  8365  	// match: (Move [3] dst src mem)
  8366  	// result: (MOVBstore [2] dst (MOVBUload [2] src mem) (MOVHstore dst (MOVHUload src mem) mem))
  8367  	for {
  8368  		if auxIntToInt64(v.AuxInt) != 3 {
  8369  			break
  8370  		}
  8371  		dst := v_0
  8372  		src := v_1
  8373  		mem := v_2
  8374  		v.reset(OpLOONG64MOVBstore)
  8375  		v.AuxInt = int32ToAuxInt(2)
  8376  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVBUload, typ.UInt8)
  8377  		v0.AuxInt = int32ToAuxInt(2)
  8378  		v0.AddArg2(src, mem)
  8379  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVHstore, types.TypeMem)
  8380  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVHUload, typ.UInt16)
  8381  		v2.AddArg2(src, mem)
  8382  		v1.AddArg3(dst, v2, mem)
  8383  		v.AddArg3(dst, v0, v1)
  8384  		return true
  8385  	}
  8386  	// match: (Move [4] dst src mem)
  8387  	// result: (MOVWstore dst (MOVWUload src mem) mem)
  8388  	for {
  8389  		if auxIntToInt64(v.AuxInt) != 4 {
  8390  			break
  8391  		}
  8392  		dst := v_0
  8393  		src := v_1
  8394  		mem := v_2
  8395  		v.reset(OpLOONG64MOVWstore)
  8396  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVWUload, typ.UInt32)
  8397  		v0.AddArg2(src, mem)
  8398  		v.AddArg3(dst, v0, mem)
  8399  		return true
  8400  	}
  8401  	// match: (Move [5] dst src mem)
  8402  	// result: (MOVBstore [4] dst (MOVBUload [4] src mem) (MOVWstore dst (MOVWUload src mem) mem))
  8403  	for {
  8404  		if auxIntToInt64(v.AuxInt) != 5 {
  8405  			break
  8406  		}
  8407  		dst := v_0
  8408  		src := v_1
  8409  		mem := v_2
  8410  		v.reset(OpLOONG64MOVBstore)
  8411  		v.AuxInt = int32ToAuxInt(4)
  8412  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVBUload, typ.UInt8)
  8413  		v0.AuxInt = int32ToAuxInt(4)
  8414  		v0.AddArg2(src, mem)
  8415  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVWstore, types.TypeMem)
  8416  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVWUload, typ.UInt32)
  8417  		v2.AddArg2(src, mem)
  8418  		v1.AddArg3(dst, v2, mem)
  8419  		v.AddArg3(dst, v0, v1)
  8420  		return true
  8421  	}
  8422  	// match: (Move [6] dst src mem)
  8423  	// result: (MOVHstore [4] dst (MOVHUload [4] src mem) (MOVWstore dst (MOVWUload src mem) mem))
  8424  	for {
  8425  		if auxIntToInt64(v.AuxInt) != 6 {
  8426  			break
  8427  		}
  8428  		dst := v_0
  8429  		src := v_1
  8430  		mem := v_2
  8431  		v.reset(OpLOONG64MOVHstore)
  8432  		v.AuxInt = int32ToAuxInt(4)
  8433  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVHUload, typ.UInt16)
  8434  		v0.AuxInt = int32ToAuxInt(4)
  8435  		v0.AddArg2(src, mem)
  8436  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVWstore, types.TypeMem)
  8437  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVWUload, typ.UInt32)
  8438  		v2.AddArg2(src, mem)
  8439  		v1.AddArg3(dst, v2, mem)
  8440  		v.AddArg3(dst, v0, v1)
  8441  		return true
  8442  	}
  8443  	// match: (Move [7] dst src mem)
  8444  	// result: (MOVWstore [3] dst (MOVWUload [3] src mem) (MOVWstore dst (MOVWUload src mem) mem))
  8445  	for {
  8446  		if auxIntToInt64(v.AuxInt) != 7 {
  8447  			break
  8448  		}
  8449  		dst := v_0
  8450  		src := v_1
  8451  		mem := v_2
  8452  		v.reset(OpLOONG64MOVWstore)
  8453  		v.AuxInt = int32ToAuxInt(3)
  8454  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVWUload, typ.UInt32)
  8455  		v0.AuxInt = int32ToAuxInt(3)
  8456  		v0.AddArg2(src, mem)
  8457  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVWstore, types.TypeMem)
  8458  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVWUload, typ.UInt32)
  8459  		v2.AddArg2(src, mem)
  8460  		v1.AddArg3(dst, v2, mem)
  8461  		v.AddArg3(dst, v0, v1)
  8462  		return true
  8463  	}
  8464  	// match: (Move [8] dst src mem)
  8465  	// result: (MOVVstore dst (MOVVload src mem) mem)
  8466  	for {
  8467  		if auxIntToInt64(v.AuxInt) != 8 {
  8468  			break
  8469  		}
  8470  		dst := v_0
  8471  		src := v_1
  8472  		mem := v_2
  8473  		v.reset(OpLOONG64MOVVstore)
  8474  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  8475  		v0.AddArg2(src, mem)
  8476  		v.AddArg3(dst, v0, mem)
  8477  		return true
  8478  	}
  8479  	// match: (Move [9] dst src mem)
  8480  	// result: (MOVBstore [8] dst (MOVBUload [8] src mem) (MOVVstore dst (MOVVload src mem) mem))
  8481  	for {
  8482  		if auxIntToInt64(v.AuxInt) != 9 {
  8483  			break
  8484  		}
  8485  		dst := v_0
  8486  		src := v_1
  8487  		mem := v_2
  8488  		v.reset(OpLOONG64MOVBstore)
  8489  		v.AuxInt = int32ToAuxInt(8)
  8490  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVBUload, typ.UInt8)
  8491  		v0.AuxInt = int32ToAuxInt(8)
  8492  		v0.AddArg2(src, mem)
  8493  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
  8494  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  8495  		v2.AddArg2(src, mem)
  8496  		v1.AddArg3(dst, v2, mem)
  8497  		v.AddArg3(dst, v0, v1)
  8498  		return true
  8499  	}
  8500  	// match: (Move [10] dst src mem)
  8501  	// result: (MOVHstore [8] dst (MOVHUload [8] src mem) (MOVVstore dst (MOVVload src mem) mem))
  8502  	for {
  8503  		if auxIntToInt64(v.AuxInt) != 10 {
  8504  			break
  8505  		}
  8506  		dst := v_0
  8507  		src := v_1
  8508  		mem := v_2
  8509  		v.reset(OpLOONG64MOVHstore)
  8510  		v.AuxInt = int32ToAuxInt(8)
  8511  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVHUload, typ.UInt16)
  8512  		v0.AuxInt = int32ToAuxInt(8)
  8513  		v0.AddArg2(src, mem)
  8514  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
  8515  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  8516  		v2.AddArg2(src, mem)
  8517  		v1.AddArg3(dst, v2, mem)
  8518  		v.AddArg3(dst, v0, v1)
  8519  		return true
  8520  	}
  8521  	// match: (Move [11] dst src mem)
  8522  	// result: (MOVWstore [7] dst (MOVWload [7] src mem) (MOVVstore dst (MOVVload src mem) mem))
  8523  	for {
  8524  		if auxIntToInt64(v.AuxInt) != 11 {
  8525  			break
  8526  		}
  8527  		dst := v_0
  8528  		src := v_1
  8529  		mem := v_2
  8530  		v.reset(OpLOONG64MOVWstore)
  8531  		v.AuxInt = int32ToAuxInt(7)
  8532  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVWload, typ.Int32)
  8533  		v0.AuxInt = int32ToAuxInt(7)
  8534  		v0.AddArg2(src, mem)
  8535  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
  8536  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  8537  		v2.AddArg2(src, mem)
  8538  		v1.AddArg3(dst, v2, mem)
  8539  		v.AddArg3(dst, v0, v1)
  8540  		return true
  8541  	}
  8542  	// match: (Move [12] dst src mem)
  8543  	// result: (MOVWstore [8] dst (MOVWUload [8] src mem) (MOVVstore dst (MOVVload src mem) mem))
  8544  	for {
  8545  		if auxIntToInt64(v.AuxInt) != 12 {
  8546  			break
  8547  		}
  8548  		dst := v_0
  8549  		src := v_1
  8550  		mem := v_2
  8551  		v.reset(OpLOONG64MOVWstore)
  8552  		v.AuxInt = int32ToAuxInt(8)
  8553  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVWUload, typ.UInt32)
  8554  		v0.AuxInt = int32ToAuxInt(8)
  8555  		v0.AddArg2(src, mem)
  8556  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
  8557  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  8558  		v2.AddArg2(src, mem)
  8559  		v1.AddArg3(dst, v2, mem)
  8560  		v.AddArg3(dst, v0, v1)
  8561  		return true
  8562  	}
  8563  	// match: (Move [13] dst src mem)
  8564  	// result: (MOVVstore [5] dst (MOVVload [5] src mem) (MOVVstore dst (MOVVload src mem) mem))
  8565  	for {
  8566  		if auxIntToInt64(v.AuxInt) != 13 {
  8567  			break
  8568  		}
  8569  		dst := v_0
  8570  		src := v_1
  8571  		mem := v_2
  8572  		v.reset(OpLOONG64MOVVstore)
  8573  		v.AuxInt = int32ToAuxInt(5)
  8574  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  8575  		v0.AuxInt = int32ToAuxInt(5)
  8576  		v0.AddArg2(src, mem)
  8577  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
  8578  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  8579  		v2.AddArg2(src, mem)
  8580  		v1.AddArg3(dst, v2, mem)
  8581  		v.AddArg3(dst, v0, v1)
  8582  		return true
  8583  	}
  8584  	// match: (Move [14] dst src mem)
  8585  	// result: (MOVVstore [6] dst (MOVVload [6] src mem) (MOVVstore dst (MOVVload src mem) mem))
  8586  	for {
  8587  		if auxIntToInt64(v.AuxInt) != 14 {
  8588  			break
  8589  		}
  8590  		dst := v_0
  8591  		src := v_1
  8592  		mem := v_2
  8593  		v.reset(OpLOONG64MOVVstore)
  8594  		v.AuxInt = int32ToAuxInt(6)
  8595  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  8596  		v0.AuxInt = int32ToAuxInt(6)
  8597  		v0.AddArg2(src, mem)
  8598  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
  8599  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  8600  		v2.AddArg2(src, mem)
  8601  		v1.AddArg3(dst, v2, mem)
  8602  		v.AddArg3(dst, v0, v1)
  8603  		return true
  8604  	}
  8605  	// match: (Move [15] dst src mem)
  8606  	// result: (MOVVstore [7] dst (MOVVload [7] src mem) (MOVVstore dst (MOVVload src mem) mem))
  8607  	for {
  8608  		if auxIntToInt64(v.AuxInt) != 15 {
  8609  			break
  8610  		}
  8611  		dst := v_0
  8612  		src := v_1
  8613  		mem := v_2
  8614  		v.reset(OpLOONG64MOVVstore)
  8615  		v.AuxInt = int32ToAuxInt(7)
  8616  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  8617  		v0.AuxInt = int32ToAuxInt(7)
  8618  		v0.AddArg2(src, mem)
  8619  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
  8620  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  8621  		v2.AddArg2(src, mem)
  8622  		v1.AddArg3(dst, v2, mem)
  8623  		v.AddArg3(dst, v0, v1)
  8624  		return true
  8625  	}
  8626  	// match: (Move [16] dst src mem)
  8627  	// result: (MOVVstore [8] dst (MOVVload [8] src mem) (MOVVstore dst (MOVVload src mem) mem))
  8628  	for {
  8629  		if auxIntToInt64(v.AuxInt) != 16 {
  8630  			break
  8631  		}
  8632  		dst := v_0
  8633  		src := v_1
  8634  		mem := v_2
  8635  		v.reset(OpLOONG64MOVVstore)
  8636  		v.AuxInt = int32ToAuxInt(8)
  8637  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  8638  		v0.AuxInt = int32ToAuxInt(8)
  8639  		v0.AddArg2(src, mem)
  8640  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
  8641  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  8642  		v2.AddArg2(src, mem)
  8643  		v1.AddArg3(dst, v2, mem)
  8644  		v.AddArg3(dst, v0, v1)
  8645  		return true
  8646  	}
  8647  	// match: (Move [s] dst src mem)
  8648  	// cond: s%8 != 0 && s > 16
  8649  	// result: (Move [s%8] (OffPtr <dst.Type> dst [s-s%8]) (OffPtr <src.Type> src [s-s%8]) (Move [s-s%8] dst src mem))
  8650  	for {
  8651  		s := auxIntToInt64(v.AuxInt)
  8652  		dst := v_0
  8653  		src := v_1
  8654  		mem := v_2
  8655  		if !(s%8 != 0 && s > 16) {
  8656  			break
  8657  		}
  8658  		v.reset(OpMove)
  8659  		v.AuxInt = int64ToAuxInt(s % 8)
  8660  		v0 := b.NewValue0(v.Pos, OpOffPtr, dst.Type)
  8661  		v0.AuxInt = int64ToAuxInt(s - s%8)
  8662  		v0.AddArg(dst)
  8663  		v1 := b.NewValue0(v.Pos, OpOffPtr, src.Type)
  8664  		v1.AuxInt = int64ToAuxInt(s - s%8)
  8665  		v1.AddArg(src)
  8666  		v2 := b.NewValue0(v.Pos, OpMove, types.TypeMem)
  8667  		v2.AuxInt = int64ToAuxInt(s - s%8)
  8668  		v2.AddArg3(dst, src, mem)
  8669  		v.AddArg3(v0, v1, v2)
  8670  		return true
  8671  	}
  8672  	// match: (Move [s] dst src mem)
  8673  	// cond: s%8 == 0 && s > 16 && s <= 8*128 && logLargeCopy(v, s)
  8674  	// result: (DUFFCOPY [16 * (128 - s/8)] dst src mem)
  8675  	for {
  8676  		s := auxIntToInt64(v.AuxInt)
  8677  		dst := v_0
  8678  		src := v_1
  8679  		mem := v_2
  8680  		if !(s%8 == 0 && s > 16 && s <= 8*128 && logLargeCopy(v, s)) {
  8681  			break
  8682  		}
  8683  		v.reset(OpLOONG64DUFFCOPY)
  8684  		v.AuxInt = int64ToAuxInt(16 * (128 - s/8))
  8685  		v.AddArg3(dst, src, mem)
  8686  		return true
  8687  	}
  8688  	// match: (Move [s] dst src mem)
  8689  	// cond: s%8 == 0 && s > 1024 && logLargeCopy(v, s)
  8690  	// result: (LoweredMove dst src (ADDVconst <src.Type> src [s-8]) mem)
  8691  	for {
  8692  		s := auxIntToInt64(v.AuxInt)
  8693  		dst := v_0
  8694  		src := v_1
  8695  		mem := v_2
  8696  		if !(s%8 == 0 && s > 1024 && logLargeCopy(v, s)) {
  8697  			break
  8698  		}
  8699  		v.reset(OpLOONG64LoweredMove)
  8700  		v0 := b.NewValue0(v.Pos, OpLOONG64ADDVconst, src.Type)
  8701  		v0.AuxInt = int64ToAuxInt(s - 8)
  8702  		v0.AddArg(src)
  8703  		v.AddArg4(dst, src, v0, mem)
  8704  		return true
  8705  	}
  8706  	return false
  8707  }
  8708  func rewriteValueLOONG64_OpNeq16(v *Value) bool {
  8709  	v_1 := v.Args[1]
  8710  	v_0 := v.Args[0]
  8711  	b := v.Block
  8712  	typ := &b.Func.Config.Types
  8713  	// match: (Neq16 x y)
  8714  	// result: (SGTU (XOR (ZeroExt16to32 x) (ZeroExt16to64 y)) (MOVVconst [0]))
  8715  	for {
  8716  		x := v_0
  8717  		y := v_1
  8718  		v.reset(OpLOONG64SGTU)
  8719  		v0 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.UInt64)
  8720  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  8721  		v1.AddArg(x)
  8722  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8723  		v2.AddArg(y)
  8724  		v0.AddArg2(v1, v2)
  8725  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  8726  		v3.AuxInt = int64ToAuxInt(0)
  8727  		v.AddArg2(v0, v3)
  8728  		return true
  8729  	}
  8730  }
  8731  func rewriteValueLOONG64_OpNeq32(v *Value) bool {
  8732  	v_1 := v.Args[1]
  8733  	v_0 := v.Args[0]
  8734  	b := v.Block
  8735  	typ := &b.Func.Config.Types
  8736  	// match: (Neq32 x y)
  8737  	// result: (SGTU (XOR (ZeroExt32to64 x) (ZeroExt32to64 y)) (MOVVconst [0]))
  8738  	for {
  8739  		x := v_0
  8740  		y := v_1
  8741  		v.reset(OpLOONG64SGTU)
  8742  		v0 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.UInt64)
  8743  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8744  		v1.AddArg(x)
  8745  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8746  		v2.AddArg(y)
  8747  		v0.AddArg2(v1, v2)
  8748  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  8749  		v3.AuxInt = int64ToAuxInt(0)
  8750  		v.AddArg2(v0, v3)
  8751  		return true
  8752  	}
  8753  }
  8754  func rewriteValueLOONG64_OpNeq32F(v *Value) bool {
  8755  	v_1 := v.Args[1]
  8756  	v_0 := v.Args[0]
  8757  	b := v.Block
  8758  	// match: (Neq32F x y)
  8759  	// result: (FPFlagFalse (CMPEQF x y))
  8760  	for {
  8761  		x := v_0
  8762  		y := v_1
  8763  		v.reset(OpLOONG64FPFlagFalse)
  8764  		v0 := b.NewValue0(v.Pos, OpLOONG64CMPEQF, types.TypeFlags)
  8765  		v0.AddArg2(x, y)
  8766  		v.AddArg(v0)
  8767  		return true
  8768  	}
  8769  }
  8770  func rewriteValueLOONG64_OpNeq64(v *Value) bool {
  8771  	v_1 := v.Args[1]
  8772  	v_0 := v.Args[0]
  8773  	b := v.Block
  8774  	typ := &b.Func.Config.Types
  8775  	// match: (Neq64 x y)
  8776  	// result: (SGTU (XOR x y) (MOVVconst [0]))
  8777  	for {
  8778  		x := v_0
  8779  		y := v_1
  8780  		v.reset(OpLOONG64SGTU)
  8781  		v0 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.UInt64)
  8782  		v0.AddArg2(x, y)
  8783  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  8784  		v1.AuxInt = int64ToAuxInt(0)
  8785  		v.AddArg2(v0, v1)
  8786  		return true
  8787  	}
  8788  }
  8789  func rewriteValueLOONG64_OpNeq64F(v *Value) bool {
  8790  	v_1 := v.Args[1]
  8791  	v_0 := v.Args[0]
  8792  	b := v.Block
  8793  	// match: (Neq64F x y)
  8794  	// result: (FPFlagFalse (CMPEQD x y))
  8795  	for {
  8796  		x := v_0
  8797  		y := v_1
  8798  		v.reset(OpLOONG64FPFlagFalse)
  8799  		v0 := b.NewValue0(v.Pos, OpLOONG64CMPEQD, types.TypeFlags)
  8800  		v0.AddArg2(x, y)
  8801  		v.AddArg(v0)
  8802  		return true
  8803  	}
  8804  }
  8805  func rewriteValueLOONG64_OpNeq8(v *Value) bool {
  8806  	v_1 := v.Args[1]
  8807  	v_0 := v.Args[0]
  8808  	b := v.Block
  8809  	typ := &b.Func.Config.Types
  8810  	// match: (Neq8 x y)
  8811  	// result: (SGTU (XOR (ZeroExt8to64 x) (ZeroExt8to64 y)) (MOVVconst [0]))
  8812  	for {
  8813  		x := v_0
  8814  		y := v_1
  8815  		v.reset(OpLOONG64SGTU)
  8816  		v0 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.UInt64)
  8817  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8818  		v1.AddArg(x)
  8819  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8820  		v2.AddArg(y)
  8821  		v0.AddArg2(v1, v2)
  8822  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  8823  		v3.AuxInt = int64ToAuxInt(0)
  8824  		v.AddArg2(v0, v3)
  8825  		return true
  8826  	}
  8827  }
  8828  func rewriteValueLOONG64_OpNeqPtr(v *Value) bool {
  8829  	v_1 := v.Args[1]
  8830  	v_0 := v.Args[0]
  8831  	b := v.Block
  8832  	typ := &b.Func.Config.Types
  8833  	// match: (NeqPtr x y)
  8834  	// result: (SGTU (XOR x y) (MOVVconst [0]))
  8835  	for {
  8836  		x := v_0
  8837  		y := v_1
  8838  		v.reset(OpLOONG64SGTU)
  8839  		v0 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.UInt64)
  8840  		v0.AddArg2(x, y)
  8841  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  8842  		v1.AuxInt = int64ToAuxInt(0)
  8843  		v.AddArg2(v0, v1)
  8844  		return true
  8845  	}
  8846  }
  8847  func rewriteValueLOONG64_OpNot(v *Value) bool {
  8848  	v_0 := v.Args[0]
  8849  	// match: (Not x)
  8850  	// result: (XORconst [1] x)
  8851  	for {
  8852  		x := v_0
  8853  		v.reset(OpLOONG64XORconst)
  8854  		v.AuxInt = int64ToAuxInt(1)
  8855  		v.AddArg(x)
  8856  		return true
  8857  	}
  8858  }
  8859  func rewriteValueLOONG64_OpOffPtr(v *Value) bool {
  8860  	v_0 := v.Args[0]
  8861  	// match: (OffPtr [off] ptr:(SP))
  8862  	// result: (MOVVaddr [int32(off)] ptr)
  8863  	for {
  8864  		off := auxIntToInt64(v.AuxInt)
  8865  		ptr := v_0
  8866  		if ptr.Op != OpSP {
  8867  			break
  8868  		}
  8869  		v.reset(OpLOONG64MOVVaddr)
  8870  		v.AuxInt = int32ToAuxInt(int32(off))
  8871  		v.AddArg(ptr)
  8872  		return true
  8873  	}
  8874  	// match: (OffPtr [off] ptr)
  8875  	// result: (ADDVconst [off] ptr)
  8876  	for {
  8877  		off := auxIntToInt64(v.AuxInt)
  8878  		ptr := v_0
  8879  		v.reset(OpLOONG64ADDVconst)
  8880  		v.AuxInt = int64ToAuxInt(off)
  8881  		v.AddArg(ptr)
  8882  		return true
  8883  	}
  8884  }
  8885  func rewriteValueLOONG64_OpPanicBounds(v *Value) bool {
  8886  	v_2 := v.Args[2]
  8887  	v_1 := v.Args[1]
  8888  	v_0 := v.Args[0]
  8889  	// match: (PanicBounds [kind] x y mem)
  8890  	// cond: boundsABI(kind) == 0
  8891  	// result: (LoweredPanicBoundsA [kind] x y mem)
  8892  	for {
  8893  		kind := auxIntToInt64(v.AuxInt)
  8894  		x := v_0
  8895  		y := v_1
  8896  		mem := v_2
  8897  		if !(boundsABI(kind) == 0) {
  8898  			break
  8899  		}
  8900  		v.reset(OpLOONG64LoweredPanicBoundsA)
  8901  		v.AuxInt = int64ToAuxInt(kind)
  8902  		v.AddArg3(x, y, mem)
  8903  		return true
  8904  	}
  8905  	// match: (PanicBounds [kind] x y mem)
  8906  	// cond: boundsABI(kind) == 1
  8907  	// result: (LoweredPanicBoundsB [kind] x y mem)
  8908  	for {
  8909  		kind := auxIntToInt64(v.AuxInt)
  8910  		x := v_0
  8911  		y := v_1
  8912  		mem := v_2
  8913  		if !(boundsABI(kind) == 1) {
  8914  			break
  8915  		}
  8916  		v.reset(OpLOONG64LoweredPanicBoundsB)
  8917  		v.AuxInt = int64ToAuxInt(kind)
  8918  		v.AddArg3(x, y, mem)
  8919  		return true
  8920  	}
  8921  	// match: (PanicBounds [kind] x y mem)
  8922  	// cond: boundsABI(kind) == 2
  8923  	// result: (LoweredPanicBoundsC [kind] x y mem)
  8924  	for {
  8925  		kind := auxIntToInt64(v.AuxInt)
  8926  		x := v_0
  8927  		y := v_1
  8928  		mem := v_2
  8929  		if !(boundsABI(kind) == 2) {
  8930  			break
  8931  		}
  8932  		v.reset(OpLOONG64LoweredPanicBoundsC)
  8933  		v.AuxInt = int64ToAuxInt(kind)
  8934  		v.AddArg3(x, y, mem)
  8935  		return true
  8936  	}
  8937  	return false
  8938  }
  8939  func rewriteValueLOONG64_OpPopCount16(v *Value) bool {
  8940  	v_0 := v.Args[0]
  8941  	b := v.Block
  8942  	typ := &b.Func.Config.Types
  8943  	// match: (PopCount16 <t> x)
  8944  	// result: (MOVWfpgp <t> (VPCNT16 <typ.Float32> (MOVWgpfp <typ.Float32> (ZeroExt16to32 x))))
  8945  	for {
  8946  		t := v.Type
  8947  		x := v_0
  8948  		v.reset(OpLOONG64MOVWfpgp)
  8949  		v.Type = t
  8950  		v0 := b.NewValue0(v.Pos, OpLOONG64VPCNT16, typ.Float32)
  8951  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVWgpfp, typ.Float32)
  8952  		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  8953  		v2.AddArg(x)
  8954  		v1.AddArg(v2)
  8955  		v0.AddArg(v1)
  8956  		v.AddArg(v0)
  8957  		return true
  8958  	}
  8959  }
  8960  func rewriteValueLOONG64_OpPopCount32(v *Value) bool {
  8961  	v_0 := v.Args[0]
  8962  	b := v.Block
  8963  	typ := &b.Func.Config.Types
  8964  	// match: (PopCount32 <t> x)
  8965  	// result: (MOVWfpgp <t> (VPCNT32 <typ.Float32> (MOVWgpfp <typ.Float32> x)))
  8966  	for {
  8967  		t := v.Type
  8968  		x := v_0
  8969  		v.reset(OpLOONG64MOVWfpgp)
  8970  		v.Type = t
  8971  		v0 := b.NewValue0(v.Pos, OpLOONG64VPCNT32, typ.Float32)
  8972  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVWgpfp, typ.Float32)
  8973  		v1.AddArg(x)
  8974  		v0.AddArg(v1)
  8975  		v.AddArg(v0)
  8976  		return true
  8977  	}
  8978  }
  8979  func rewriteValueLOONG64_OpPopCount64(v *Value) bool {
  8980  	v_0 := v.Args[0]
  8981  	b := v.Block
  8982  	typ := &b.Func.Config.Types
  8983  	// match: (PopCount64 <t> x)
  8984  	// result: (MOVVfpgp <t> (VPCNT64 <typ.Float64> (MOVVgpfp <typ.Float64> x)))
  8985  	for {
  8986  		t := v.Type
  8987  		x := v_0
  8988  		v.reset(OpLOONG64MOVVfpgp)
  8989  		v.Type = t
  8990  		v0 := b.NewValue0(v.Pos, OpLOONG64VPCNT64, typ.Float64)
  8991  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVgpfp, typ.Float64)
  8992  		v1.AddArg(x)
  8993  		v0.AddArg(v1)
  8994  		v.AddArg(v0)
  8995  		return true
  8996  	}
  8997  }
  8998  func rewriteValueLOONG64_OpRotateLeft16(v *Value) bool {
  8999  	v_1 := v.Args[1]
  9000  	v_0 := v.Args[0]
  9001  	b := v.Block
  9002  	typ := &b.Func.Config.Types
  9003  	// match: (RotateLeft16 <t> x (MOVVconst [c]))
  9004  	// result: (Or16 (Lsh16x64 <t> x (MOVVconst [c&15])) (Rsh16Ux64 <t> x (MOVVconst [-c&15])))
  9005  	for {
  9006  		t := v.Type
  9007  		x := v_0
  9008  		if v_1.Op != OpLOONG64MOVVconst {
  9009  			break
  9010  		}
  9011  		c := auxIntToInt64(v_1.AuxInt)
  9012  		v.reset(OpOr16)
  9013  		v0 := b.NewValue0(v.Pos, OpLsh16x64, t)
  9014  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9015  		v1.AuxInt = int64ToAuxInt(c & 15)
  9016  		v0.AddArg2(x, v1)
  9017  		v2 := b.NewValue0(v.Pos, OpRsh16Ux64, t)
  9018  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9019  		v3.AuxInt = int64ToAuxInt(-c & 15)
  9020  		v2.AddArg2(x, v3)
  9021  		v.AddArg2(v0, v2)
  9022  		return true
  9023  	}
  9024  	// match: (RotateLeft16 <t> x y)
  9025  	// result: (ROTR <t> (OR <typ.UInt32> (ZeroExt16to32 x) (SLLVconst <t> (ZeroExt16to32 x) [16])) (NEGV <typ.Int64> y))
  9026  	for {
  9027  		t := v.Type
  9028  		x := v_0
  9029  		y := v_1
  9030  		v.reset(OpLOONG64ROTR)
  9031  		v.Type = t
  9032  		v0 := b.NewValue0(v.Pos, OpLOONG64OR, typ.UInt32)
  9033  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  9034  		v1.AddArg(x)
  9035  		v2 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, t)
  9036  		v2.AuxInt = int64ToAuxInt(16)
  9037  		v2.AddArg(v1)
  9038  		v0.AddArg2(v1, v2)
  9039  		v3 := b.NewValue0(v.Pos, OpLOONG64NEGV, typ.Int64)
  9040  		v3.AddArg(y)
  9041  		v.AddArg2(v0, v3)
  9042  		return true
  9043  	}
  9044  }
  9045  func rewriteValueLOONG64_OpRotateLeft32(v *Value) bool {
  9046  	v_1 := v.Args[1]
  9047  	v_0 := v.Args[0]
  9048  	b := v.Block
  9049  	// match: (RotateLeft32 x y)
  9050  	// result: (ROTR x (NEGV <y.Type> y))
  9051  	for {
  9052  		x := v_0
  9053  		y := v_1
  9054  		v.reset(OpLOONG64ROTR)
  9055  		v0 := b.NewValue0(v.Pos, OpLOONG64NEGV, y.Type)
  9056  		v0.AddArg(y)
  9057  		v.AddArg2(x, v0)
  9058  		return true
  9059  	}
  9060  }
  9061  func rewriteValueLOONG64_OpRotateLeft64(v *Value) bool {
  9062  	v_1 := v.Args[1]
  9063  	v_0 := v.Args[0]
  9064  	b := v.Block
  9065  	// match: (RotateLeft64 x y)
  9066  	// result: (ROTRV x (NEGV <y.Type> y))
  9067  	for {
  9068  		x := v_0
  9069  		y := v_1
  9070  		v.reset(OpLOONG64ROTRV)
  9071  		v0 := b.NewValue0(v.Pos, OpLOONG64NEGV, y.Type)
  9072  		v0.AddArg(y)
  9073  		v.AddArg2(x, v0)
  9074  		return true
  9075  	}
  9076  }
  9077  func rewriteValueLOONG64_OpRotateLeft8(v *Value) bool {
  9078  	v_1 := v.Args[1]
  9079  	v_0 := v.Args[0]
  9080  	b := v.Block
  9081  	typ := &b.Func.Config.Types
  9082  	// match: (RotateLeft8 <t> x (MOVVconst [c]))
  9083  	// result: (Or8 (Lsh8x64 <t> x (MOVVconst [c&7])) (Rsh8Ux64 <t> x (MOVVconst [-c&7])))
  9084  	for {
  9085  		t := v.Type
  9086  		x := v_0
  9087  		if v_1.Op != OpLOONG64MOVVconst {
  9088  			break
  9089  		}
  9090  		c := auxIntToInt64(v_1.AuxInt)
  9091  		v.reset(OpOr8)
  9092  		v0 := b.NewValue0(v.Pos, OpLsh8x64, t)
  9093  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9094  		v1.AuxInt = int64ToAuxInt(c & 7)
  9095  		v0.AddArg2(x, v1)
  9096  		v2 := b.NewValue0(v.Pos, OpRsh8Ux64, t)
  9097  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9098  		v3.AuxInt = int64ToAuxInt(-c & 7)
  9099  		v2.AddArg2(x, v3)
  9100  		v.AddArg2(v0, v2)
  9101  		return true
  9102  	}
  9103  	// match: (RotateLeft8 <t> x y)
  9104  	// result: (OR <t> (SLLV <t> x (ANDconst <typ.Int64> [7] y)) (SRLV <t> (ZeroExt8to64 x) (ANDconst <typ.Int64> [7] (NEGV <typ.Int64> y))))
  9105  	for {
  9106  		t := v.Type
  9107  		x := v_0
  9108  		y := v_1
  9109  		v.reset(OpLOONG64OR)
  9110  		v.Type = t
  9111  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  9112  		v1 := b.NewValue0(v.Pos, OpLOONG64ANDconst, typ.Int64)
  9113  		v1.AuxInt = int64ToAuxInt(7)
  9114  		v1.AddArg(y)
  9115  		v0.AddArg2(x, v1)
  9116  		v2 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
  9117  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  9118  		v3.AddArg(x)
  9119  		v4 := b.NewValue0(v.Pos, OpLOONG64ANDconst, typ.Int64)
  9120  		v4.AuxInt = int64ToAuxInt(7)
  9121  		v5 := b.NewValue0(v.Pos, OpLOONG64NEGV, typ.Int64)
  9122  		v5.AddArg(y)
  9123  		v4.AddArg(v5)
  9124  		v2.AddArg2(v3, v4)
  9125  		v.AddArg2(v0, v2)
  9126  		return true
  9127  	}
  9128  }
  9129  func rewriteValueLOONG64_OpRsh16Ux16(v *Value) bool {
  9130  	v_1 := v.Args[1]
  9131  	v_0 := v.Args[0]
  9132  	b := v.Block
  9133  	typ := &b.Func.Config.Types
  9134  	// match: (Rsh16Ux16 x y)
  9135  	// cond: shiftIsBounded(v)
  9136  	// result: (SRLV (ZeroExt16to64 x) y)
  9137  	for {
  9138  		x := v_0
  9139  		y := v_1
  9140  		if !(shiftIsBounded(v)) {
  9141  			break
  9142  		}
  9143  		v.reset(OpLOONG64SRLV)
  9144  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  9145  		v0.AddArg(x)
  9146  		v.AddArg2(v0, y)
  9147  		return true
  9148  	}
  9149  	// match: (Rsh16Ux16 <t> x y)
  9150  	// cond: !shiftIsBounded(v)
  9151  	// result: (MASKEQZ (SRLV <t> (ZeroExt16to64 x) (ZeroExt16to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y)))
  9152  	for {
  9153  		t := v.Type
  9154  		x := v_0
  9155  		y := v_1
  9156  		if !(!shiftIsBounded(v)) {
  9157  			break
  9158  		}
  9159  		v.reset(OpLOONG64MASKEQZ)
  9160  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
  9161  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  9162  		v1.AddArg(x)
  9163  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  9164  		v2.AddArg(y)
  9165  		v0.AddArg2(v1, v2)
  9166  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  9167  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9168  		v4.AuxInt = int64ToAuxInt(64)
  9169  		v3.AddArg2(v4, v2)
  9170  		v.AddArg2(v0, v3)
  9171  		return true
  9172  	}
  9173  	return false
  9174  }
  9175  func rewriteValueLOONG64_OpRsh16Ux32(v *Value) bool {
  9176  	v_1 := v.Args[1]
  9177  	v_0 := v.Args[0]
  9178  	b := v.Block
  9179  	typ := &b.Func.Config.Types
  9180  	// match: (Rsh16Ux32 x y)
  9181  	// cond: shiftIsBounded(v)
  9182  	// result: (SRLV (ZeroExt16to64 x) y)
  9183  	for {
  9184  		x := v_0
  9185  		y := v_1
  9186  		if !(shiftIsBounded(v)) {
  9187  			break
  9188  		}
  9189  		v.reset(OpLOONG64SRLV)
  9190  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  9191  		v0.AddArg(x)
  9192  		v.AddArg2(v0, y)
  9193  		return true
  9194  	}
  9195  	// match: (Rsh16Ux32 <t> x y)
  9196  	// cond: !shiftIsBounded(v)
  9197  	// result: (MASKEQZ (SRLV <t> (ZeroExt16to64 x) (ZeroExt32to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y)))
  9198  	for {
  9199  		t := v.Type
  9200  		x := v_0
  9201  		y := v_1
  9202  		if !(!shiftIsBounded(v)) {
  9203  			break
  9204  		}
  9205  		v.reset(OpLOONG64MASKEQZ)
  9206  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
  9207  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  9208  		v1.AddArg(x)
  9209  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  9210  		v2.AddArg(y)
  9211  		v0.AddArg2(v1, v2)
  9212  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  9213  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9214  		v4.AuxInt = int64ToAuxInt(64)
  9215  		v3.AddArg2(v4, v2)
  9216  		v.AddArg2(v0, v3)
  9217  		return true
  9218  	}
  9219  	return false
  9220  }
  9221  func rewriteValueLOONG64_OpRsh16Ux64(v *Value) bool {
  9222  	v_1 := v.Args[1]
  9223  	v_0 := v.Args[0]
  9224  	b := v.Block
  9225  	typ := &b.Func.Config.Types
  9226  	// match: (Rsh16Ux64 x y)
  9227  	// cond: shiftIsBounded(v)
  9228  	// result: (SRLV (ZeroExt16to64 x) y)
  9229  	for {
  9230  		x := v_0
  9231  		y := v_1
  9232  		if !(shiftIsBounded(v)) {
  9233  			break
  9234  		}
  9235  		v.reset(OpLOONG64SRLV)
  9236  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  9237  		v0.AddArg(x)
  9238  		v.AddArg2(v0, y)
  9239  		return true
  9240  	}
  9241  	// match: (Rsh16Ux64 <t> x y)
  9242  	// cond: !shiftIsBounded(v)
  9243  	// result: (MASKEQZ (SRLV <t> (ZeroExt16to64 x) y) (SGTU (MOVVconst <typ.UInt64> [64]) y))
  9244  	for {
  9245  		t := v.Type
  9246  		x := v_0
  9247  		y := v_1
  9248  		if !(!shiftIsBounded(v)) {
  9249  			break
  9250  		}
  9251  		v.reset(OpLOONG64MASKEQZ)
  9252  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
  9253  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  9254  		v1.AddArg(x)
  9255  		v0.AddArg2(v1, y)
  9256  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  9257  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9258  		v3.AuxInt = int64ToAuxInt(64)
  9259  		v2.AddArg2(v3, y)
  9260  		v.AddArg2(v0, v2)
  9261  		return true
  9262  	}
  9263  	return false
  9264  }
  9265  func rewriteValueLOONG64_OpRsh16Ux8(v *Value) bool {
  9266  	v_1 := v.Args[1]
  9267  	v_0 := v.Args[0]
  9268  	b := v.Block
  9269  	typ := &b.Func.Config.Types
  9270  	// match: (Rsh16Ux8 x y)
  9271  	// cond: shiftIsBounded(v)
  9272  	// result: (SRLV (ZeroExt16to64 x) y)
  9273  	for {
  9274  		x := v_0
  9275  		y := v_1
  9276  		if !(shiftIsBounded(v)) {
  9277  			break
  9278  		}
  9279  		v.reset(OpLOONG64SRLV)
  9280  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  9281  		v0.AddArg(x)
  9282  		v.AddArg2(v0, y)
  9283  		return true
  9284  	}
  9285  	// match: (Rsh16Ux8 <t> x y)
  9286  	// cond: !shiftIsBounded(v)
  9287  	// result: (MASKEQZ (SRLV <t> (ZeroExt16to64 x) (ZeroExt8to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y)))
  9288  	for {
  9289  		t := v.Type
  9290  		x := v_0
  9291  		y := v_1
  9292  		if !(!shiftIsBounded(v)) {
  9293  			break
  9294  		}
  9295  		v.reset(OpLOONG64MASKEQZ)
  9296  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
  9297  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  9298  		v1.AddArg(x)
  9299  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  9300  		v2.AddArg(y)
  9301  		v0.AddArg2(v1, v2)
  9302  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  9303  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9304  		v4.AuxInt = int64ToAuxInt(64)
  9305  		v3.AddArg2(v4, v2)
  9306  		v.AddArg2(v0, v3)
  9307  		return true
  9308  	}
  9309  	return false
  9310  }
  9311  func rewriteValueLOONG64_OpRsh16x16(v *Value) bool {
  9312  	v_1 := v.Args[1]
  9313  	v_0 := v.Args[0]
  9314  	b := v.Block
  9315  	typ := &b.Func.Config.Types
  9316  	// match: (Rsh16x16 x y)
  9317  	// cond: shiftIsBounded(v)
  9318  	// result: (SRAV (SignExt16to64 x) y)
  9319  	for {
  9320  		x := v_0
  9321  		y := v_1
  9322  		if !(shiftIsBounded(v)) {
  9323  			break
  9324  		}
  9325  		v.reset(OpLOONG64SRAV)
  9326  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  9327  		v0.AddArg(x)
  9328  		v.AddArg2(v0, y)
  9329  		return true
  9330  	}
  9331  	// match: (Rsh16x16 <t> x y)
  9332  	// cond: !shiftIsBounded(v)
  9333  	// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y)))
  9334  	for {
  9335  		t := v.Type
  9336  		x := v_0
  9337  		y := v_1
  9338  		if !(!shiftIsBounded(v)) {
  9339  			break
  9340  		}
  9341  		v.reset(OpLOONG64SRAV)
  9342  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  9343  		v0.AddArg(x)
  9344  		v1 := b.NewValue0(v.Pos, OpLOONG64OR, t)
  9345  		v2 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
  9346  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  9347  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  9348  		v4.AddArg(y)
  9349  		v5 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9350  		v5.AuxInt = int64ToAuxInt(63)
  9351  		v3.AddArg2(v4, v5)
  9352  		v2.AddArg(v3)
  9353  		v1.AddArg2(v2, v4)
  9354  		v.AddArg2(v0, v1)
  9355  		return true
  9356  	}
  9357  	return false
  9358  }
  9359  func rewriteValueLOONG64_OpRsh16x32(v *Value) bool {
  9360  	v_1 := v.Args[1]
  9361  	v_0 := v.Args[0]
  9362  	b := v.Block
  9363  	typ := &b.Func.Config.Types
  9364  	// match: (Rsh16x32 x y)
  9365  	// cond: shiftIsBounded(v)
  9366  	// result: (SRAV (SignExt16to64 x) y)
  9367  	for {
  9368  		x := v_0
  9369  		y := v_1
  9370  		if !(shiftIsBounded(v)) {
  9371  			break
  9372  		}
  9373  		v.reset(OpLOONG64SRAV)
  9374  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  9375  		v0.AddArg(x)
  9376  		v.AddArg2(v0, y)
  9377  		return true
  9378  	}
  9379  	// match: (Rsh16x32 <t> x y)
  9380  	// cond: !shiftIsBounded(v)
  9381  	// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y)))
  9382  	for {
  9383  		t := v.Type
  9384  		x := v_0
  9385  		y := v_1
  9386  		if !(!shiftIsBounded(v)) {
  9387  			break
  9388  		}
  9389  		v.reset(OpLOONG64SRAV)
  9390  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  9391  		v0.AddArg(x)
  9392  		v1 := b.NewValue0(v.Pos, OpLOONG64OR, t)
  9393  		v2 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
  9394  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  9395  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  9396  		v4.AddArg(y)
  9397  		v5 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9398  		v5.AuxInt = int64ToAuxInt(63)
  9399  		v3.AddArg2(v4, v5)
  9400  		v2.AddArg(v3)
  9401  		v1.AddArg2(v2, v4)
  9402  		v.AddArg2(v0, v1)
  9403  		return true
  9404  	}
  9405  	return false
  9406  }
  9407  func rewriteValueLOONG64_OpRsh16x64(v *Value) bool {
  9408  	v_1 := v.Args[1]
  9409  	v_0 := v.Args[0]
  9410  	b := v.Block
  9411  	typ := &b.Func.Config.Types
  9412  	// match: (Rsh16x64 x y)
  9413  	// cond: shiftIsBounded(v)
  9414  	// result: (SRAV (SignExt16to64 x) y)
  9415  	for {
  9416  		x := v_0
  9417  		y := v_1
  9418  		if !(shiftIsBounded(v)) {
  9419  			break
  9420  		}
  9421  		v.reset(OpLOONG64SRAV)
  9422  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  9423  		v0.AddArg(x)
  9424  		v.AddArg2(v0, y)
  9425  		return true
  9426  	}
  9427  	// match: (Rsh16x64 <t> x y)
  9428  	// cond: !shiftIsBounded(v)
  9429  	// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y))
  9430  	for {
  9431  		t := v.Type
  9432  		x := v_0
  9433  		y := v_1
  9434  		if !(!shiftIsBounded(v)) {
  9435  			break
  9436  		}
  9437  		v.reset(OpLOONG64SRAV)
  9438  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  9439  		v0.AddArg(x)
  9440  		v1 := b.NewValue0(v.Pos, OpLOONG64OR, t)
  9441  		v2 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
  9442  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  9443  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9444  		v4.AuxInt = int64ToAuxInt(63)
  9445  		v3.AddArg2(y, v4)
  9446  		v2.AddArg(v3)
  9447  		v1.AddArg2(v2, y)
  9448  		v.AddArg2(v0, v1)
  9449  		return true
  9450  	}
  9451  	return false
  9452  }
  9453  func rewriteValueLOONG64_OpRsh16x8(v *Value) bool {
  9454  	v_1 := v.Args[1]
  9455  	v_0 := v.Args[0]
  9456  	b := v.Block
  9457  	typ := &b.Func.Config.Types
  9458  	// match: (Rsh16x8 x y)
  9459  	// cond: shiftIsBounded(v)
  9460  	// result: (SRAV (SignExt16to64 x) y)
  9461  	for {
  9462  		x := v_0
  9463  		y := v_1
  9464  		if !(shiftIsBounded(v)) {
  9465  			break
  9466  		}
  9467  		v.reset(OpLOONG64SRAV)
  9468  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  9469  		v0.AddArg(x)
  9470  		v.AddArg2(v0, y)
  9471  		return true
  9472  	}
  9473  	// match: (Rsh16x8 <t> x y)
  9474  	// cond: !shiftIsBounded(v)
  9475  	// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
  9476  	for {
  9477  		t := v.Type
  9478  		x := v_0
  9479  		y := v_1
  9480  		if !(!shiftIsBounded(v)) {
  9481  			break
  9482  		}
  9483  		v.reset(OpLOONG64SRAV)
  9484  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  9485  		v0.AddArg(x)
  9486  		v1 := b.NewValue0(v.Pos, OpLOONG64OR, t)
  9487  		v2 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
  9488  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  9489  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  9490  		v4.AddArg(y)
  9491  		v5 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9492  		v5.AuxInt = int64ToAuxInt(63)
  9493  		v3.AddArg2(v4, v5)
  9494  		v2.AddArg(v3)
  9495  		v1.AddArg2(v2, v4)
  9496  		v.AddArg2(v0, v1)
  9497  		return true
  9498  	}
  9499  	return false
  9500  }
  9501  func rewriteValueLOONG64_OpRsh32Ux16(v *Value) bool {
  9502  	v_1 := v.Args[1]
  9503  	v_0 := v.Args[0]
  9504  	b := v.Block
  9505  	typ := &b.Func.Config.Types
  9506  	// match: (Rsh32Ux16 x y)
  9507  	// cond: shiftIsBounded(v)
  9508  	// result: (SRL x y)
  9509  	for {
  9510  		x := v_0
  9511  		y := v_1
  9512  		if !(shiftIsBounded(v)) {
  9513  			break
  9514  		}
  9515  		v.reset(OpLOONG64SRL)
  9516  		v.AddArg2(x, y)
  9517  		return true
  9518  	}
  9519  	// match: (Rsh32Ux16 <t> x y)
  9520  	// cond: !shiftIsBounded(v)
  9521  	// result: (MASKEQZ (SRL <t> x (ZeroExt16to64 y)) (SGTU (MOVVconst <typ.UInt64> [32]) (ZeroExt16to64 y)))
  9522  	for {
  9523  		t := v.Type
  9524  		x := v_0
  9525  		y := v_1
  9526  		if !(!shiftIsBounded(v)) {
  9527  			break
  9528  		}
  9529  		v.reset(OpLOONG64MASKEQZ)
  9530  		v0 := b.NewValue0(v.Pos, OpLOONG64SRL, t)
  9531  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  9532  		v1.AddArg(y)
  9533  		v0.AddArg2(x, v1)
  9534  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  9535  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9536  		v3.AuxInt = int64ToAuxInt(32)
  9537  		v2.AddArg2(v3, v1)
  9538  		v.AddArg2(v0, v2)
  9539  		return true
  9540  	}
  9541  	return false
  9542  }
  9543  func rewriteValueLOONG64_OpRsh32Ux32(v *Value) bool {
  9544  	v_1 := v.Args[1]
  9545  	v_0 := v.Args[0]
  9546  	b := v.Block
  9547  	typ := &b.Func.Config.Types
  9548  	// match: (Rsh32Ux32 x y)
  9549  	// cond: shiftIsBounded(v)
  9550  	// result: (SRL x y)
  9551  	for {
  9552  		x := v_0
  9553  		y := v_1
  9554  		if !(shiftIsBounded(v)) {
  9555  			break
  9556  		}
  9557  		v.reset(OpLOONG64SRL)
  9558  		v.AddArg2(x, y)
  9559  		return true
  9560  	}
  9561  	// match: (Rsh32Ux32 <t> x y)
  9562  	// cond: !shiftIsBounded(v)
  9563  	// result: (MASKEQZ (SRL <t> x (ZeroExt32to64 y)) (SGTU (MOVVconst <typ.UInt64> [32]) (ZeroExt32to64 y)))
  9564  	for {
  9565  		t := v.Type
  9566  		x := v_0
  9567  		y := v_1
  9568  		if !(!shiftIsBounded(v)) {
  9569  			break
  9570  		}
  9571  		v.reset(OpLOONG64MASKEQZ)
  9572  		v0 := b.NewValue0(v.Pos, OpLOONG64SRL, t)
  9573  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  9574  		v1.AddArg(y)
  9575  		v0.AddArg2(x, v1)
  9576  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  9577  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9578  		v3.AuxInt = int64ToAuxInt(32)
  9579  		v2.AddArg2(v3, v1)
  9580  		v.AddArg2(v0, v2)
  9581  		return true
  9582  	}
  9583  	return false
  9584  }
  9585  func rewriteValueLOONG64_OpRsh32Ux64(v *Value) bool {
  9586  	v_1 := v.Args[1]
  9587  	v_0 := v.Args[0]
  9588  	b := v.Block
  9589  	typ := &b.Func.Config.Types
  9590  	// match: (Rsh32Ux64 x y)
  9591  	// cond: shiftIsBounded(v)
  9592  	// result: (SRL x y)
  9593  	for {
  9594  		x := v_0
  9595  		y := v_1
  9596  		if !(shiftIsBounded(v)) {
  9597  			break
  9598  		}
  9599  		v.reset(OpLOONG64SRL)
  9600  		v.AddArg2(x, y)
  9601  		return true
  9602  	}
  9603  	// match: (Rsh32Ux64 <t> x y)
  9604  	// cond: !shiftIsBounded(v)
  9605  	// result: (MASKEQZ (SRL <t> x y) (SGTU (MOVVconst <typ.UInt64> [32]) y))
  9606  	for {
  9607  		t := v.Type
  9608  		x := v_0
  9609  		y := v_1
  9610  		if !(!shiftIsBounded(v)) {
  9611  			break
  9612  		}
  9613  		v.reset(OpLOONG64MASKEQZ)
  9614  		v0 := b.NewValue0(v.Pos, OpLOONG64SRL, t)
  9615  		v0.AddArg2(x, y)
  9616  		v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  9617  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9618  		v2.AuxInt = int64ToAuxInt(32)
  9619  		v1.AddArg2(v2, y)
  9620  		v.AddArg2(v0, v1)
  9621  		return true
  9622  	}
  9623  	return false
  9624  }
  9625  func rewriteValueLOONG64_OpRsh32Ux8(v *Value) bool {
  9626  	v_1 := v.Args[1]
  9627  	v_0 := v.Args[0]
  9628  	b := v.Block
  9629  	typ := &b.Func.Config.Types
  9630  	// match: (Rsh32Ux8 x y)
  9631  	// cond: shiftIsBounded(v)
  9632  	// result: (SRL x y)
  9633  	for {
  9634  		x := v_0
  9635  		y := v_1
  9636  		if !(shiftIsBounded(v)) {
  9637  			break
  9638  		}
  9639  		v.reset(OpLOONG64SRL)
  9640  		v.AddArg2(x, y)
  9641  		return true
  9642  	}
  9643  	// match: (Rsh32Ux8 <t> x y)
  9644  	// cond: !shiftIsBounded(v)
  9645  	// result: (MASKEQZ (SRL <t> x (ZeroExt8to64 y)) (SGTU (MOVVconst <typ.UInt64> [32]) (ZeroExt8to64 y)))
  9646  	for {
  9647  		t := v.Type
  9648  		x := v_0
  9649  		y := v_1
  9650  		if !(!shiftIsBounded(v)) {
  9651  			break
  9652  		}
  9653  		v.reset(OpLOONG64MASKEQZ)
  9654  		v0 := b.NewValue0(v.Pos, OpLOONG64SRL, t)
  9655  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  9656  		v1.AddArg(y)
  9657  		v0.AddArg2(x, v1)
  9658  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  9659  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9660  		v3.AuxInt = int64ToAuxInt(32)
  9661  		v2.AddArg2(v3, v1)
  9662  		v.AddArg2(v0, v2)
  9663  		return true
  9664  	}
  9665  	return false
  9666  }
  9667  func rewriteValueLOONG64_OpRsh32x16(v *Value) bool {
  9668  	v_1 := v.Args[1]
  9669  	v_0 := v.Args[0]
  9670  	b := v.Block
  9671  	typ := &b.Func.Config.Types
  9672  	// match: (Rsh32x16 x y)
  9673  	// cond: shiftIsBounded(v)
  9674  	// result: (SRA x y)
  9675  	for {
  9676  		x := v_0
  9677  		y := v_1
  9678  		if !(shiftIsBounded(v)) {
  9679  			break
  9680  		}
  9681  		v.reset(OpLOONG64SRA)
  9682  		v.AddArg2(x, y)
  9683  		return true
  9684  	}
  9685  	// match: (Rsh32x16 <t> x y)
  9686  	// cond: !shiftIsBounded(v)
  9687  	// result: (SRA x (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [31]))) (ZeroExt16to64 y)))
  9688  	for {
  9689  		t := v.Type
  9690  		x := v_0
  9691  		y := v_1
  9692  		if !(!shiftIsBounded(v)) {
  9693  			break
  9694  		}
  9695  		v.reset(OpLOONG64SRA)
  9696  		v0 := b.NewValue0(v.Pos, OpLOONG64OR, t)
  9697  		v1 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
  9698  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  9699  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  9700  		v3.AddArg(y)
  9701  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9702  		v4.AuxInt = int64ToAuxInt(31)
  9703  		v2.AddArg2(v3, v4)
  9704  		v1.AddArg(v2)
  9705  		v0.AddArg2(v1, v3)
  9706  		v.AddArg2(x, v0)
  9707  		return true
  9708  	}
  9709  	return false
  9710  }
  9711  func rewriteValueLOONG64_OpRsh32x32(v *Value) bool {
  9712  	v_1 := v.Args[1]
  9713  	v_0 := v.Args[0]
  9714  	b := v.Block
  9715  	typ := &b.Func.Config.Types
  9716  	// match: (Rsh32x32 x y)
  9717  	// cond: shiftIsBounded(v)
  9718  	// result: (SRA x y)
  9719  	for {
  9720  		x := v_0
  9721  		y := v_1
  9722  		if !(shiftIsBounded(v)) {
  9723  			break
  9724  		}
  9725  		v.reset(OpLOONG64SRA)
  9726  		v.AddArg2(x, y)
  9727  		return true
  9728  	}
  9729  	// match: (Rsh32x32 <t> x y)
  9730  	// cond: !shiftIsBounded(v)
  9731  	// result: (SRA x (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [31]))) (ZeroExt32to64 y)))
  9732  	for {
  9733  		t := v.Type
  9734  		x := v_0
  9735  		y := v_1
  9736  		if !(!shiftIsBounded(v)) {
  9737  			break
  9738  		}
  9739  		v.reset(OpLOONG64SRA)
  9740  		v0 := b.NewValue0(v.Pos, OpLOONG64OR, t)
  9741  		v1 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
  9742  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  9743  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  9744  		v3.AddArg(y)
  9745  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9746  		v4.AuxInt = int64ToAuxInt(31)
  9747  		v2.AddArg2(v3, v4)
  9748  		v1.AddArg(v2)
  9749  		v0.AddArg2(v1, v3)
  9750  		v.AddArg2(x, v0)
  9751  		return true
  9752  	}
  9753  	return false
  9754  }
  9755  func rewriteValueLOONG64_OpRsh32x64(v *Value) bool {
  9756  	v_1 := v.Args[1]
  9757  	v_0 := v.Args[0]
  9758  	b := v.Block
  9759  	typ := &b.Func.Config.Types
  9760  	// match: (Rsh32x64 x y)
  9761  	// cond: shiftIsBounded(v)
  9762  	// result: (SRA x y)
  9763  	for {
  9764  		x := v_0
  9765  		y := v_1
  9766  		if !(shiftIsBounded(v)) {
  9767  			break
  9768  		}
  9769  		v.reset(OpLOONG64SRA)
  9770  		v.AddArg2(x, y)
  9771  		return true
  9772  	}
  9773  	// match: (Rsh32x64 <t> x y)
  9774  	// cond: !shiftIsBounded(v)
  9775  	// result: (SRA x (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [31]))) y))
  9776  	for {
  9777  		t := v.Type
  9778  		x := v_0
  9779  		y := v_1
  9780  		if !(!shiftIsBounded(v)) {
  9781  			break
  9782  		}
  9783  		v.reset(OpLOONG64SRA)
  9784  		v0 := b.NewValue0(v.Pos, OpLOONG64OR, t)
  9785  		v1 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
  9786  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  9787  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9788  		v3.AuxInt = int64ToAuxInt(31)
  9789  		v2.AddArg2(y, v3)
  9790  		v1.AddArg(v2)
  9791  		v0.AddArg2(v1, y)
  9792  		v.AddArg2(x, v0)
  9793  		return true
  9794  	}
  9795  	return false
  9796  }
  9797  func rewriteValueLOONG64_OpRsh32x8(v *Value) bool {
  9798  	v_1 := v.Args[1]
  9799  	v_0 := v.Args[0]
  9800  	b := v.Block
  9801  	typ := &b.Func.Config.Types
  9802  	// match: (Rsh32x8 x y)
  9803  	// cond: shiftIsBounded(v)
  9804  	// result: (SRA x y)
  9805  	for {
  9806  		x := v_0
  9807  		y := v_1
  9808  		if !(shiftIsBounded(v)) {
  9809  			break
  9810  		}
  9811  		v.reset(OpLOONG64SRA)
  9812  		v.AddArg2(x, y)
  9813  		return true
  9814  	}
  9815  	// match: (Rsh32x8 <t> x y)
  9816  	// cond: !shiftIsBounded(v)
  9817  	// result: (SRA x (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [31]))) (ZeroExt8to64 y)))
  9818  	for {
  9819  		t := v.Type
  9820  		x := v_0
  9821  		y := v_1
  9822  		if !(!shiftIsBounded(v)) {
  9823  			break
  9824  		}
  9825  		v.reset(OpLOONG64SRA)
  9826  		v0 := b.NewValue0(v.Pos, OpLOONG64OR, t)
  9827  		v1 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
  9828  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  9829  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  9830  		v3.AddArg(y)
  9831  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9832  		v4.AuxInt = int64ToAuxInt(31)
  9833  		v2.AddArg2(v3, v4)
  9834  		v1.AddArg(v2)
  9835  		v0.AddArg2(v1, v3)
  9836  		v.AddArg2(x, v0)
  9837  		return true
  9838  	}
  9839  	return false
  9840  }
  9841  func rewriteValueLOONG64_OpRsh64Ux16(v *Value) bool {
  9842  	v_1 := v.Args[1]
  9843  	v_0 := v.Args[0]
  9844  	b := v.Block
  9845  	typ := &b.Func.Config.Types
  9846  	// match: (Rsh64Ux16 x y)
  9847  	// cond: shiftIsBounded(v)
  9848  	// result: (SRLV x y)
  9849  	for {
  9850  		x := v_0
  9851  		y := v_1
  9852  		if !(shiftIsBounded(v)) {
  9853  			break
  9854  		}
  9855  		v.reset(OpLOONG64SRLV)
  9856  		v.AddArg2(x, y)
  9857  		return true
  9858  	}
  9859  	// match: (Rsh64Ux16 <t> x y)
  9860  	// cond: !shiftIsBounded(v)
  9861  	// result: (MASKEQZ (SRLV <t> x (ZeroExt16to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y)))
  9862  	for {
  9863  		t := v.Type
  9864  		x := v_0
  9865  		y := v_1
  9866  		if !(!shiftIsBounded(v)) {
  9867  			break
  9868  		}
  9869  		v.reset(OpLOONG64MASKEQZ)
  9870  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
  9871  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  9872  		v1.AddArg(y)
  9873  		v0.AddArg2(x, v1)
  9874  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  9875  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9876  		v3.AuxInt = int64ToAuxInt(64)
  9877  		v2.AddArg2(v3, v1)
  9878  		v.AddArg2(v0, v2)
  9879  		return true
  9880  	}
  9881  	return false
  9882  }
  9883  func rewriteValueLOONG64_OpRsh64Ux32(v *Value) bool {
  9884  	v_1 := v.Args[1]
  9885  	v_0 := v.Args[0]
  9886  	b := v.Block
  9887  	typ := &b.Func.Config.Types
  9888  	// match: (Rsh64Ux32 x y)
  9889  	// cond: shiftIsBounded(v)
  9890  	// result: (SRLV x y)
  9891  	for {
  9892  		x := v_0
  9893  		y := v_1
  9894  		if !(shiftIsBounded(v)) {
  9895  			break
  9896  		}
  9897  		v.reset(OpLOONG64SRLV)
  9898  		v.AddArg2(x, y)
  9899  		return true
  9900  	}
  9901  	// match: (Rsh64Ux32 <t> x y)
  9902  	// cond: !shiftIsBounded(v)
  9903  	// result: (MASKEQZ (SRLV <t> x (ZeroExt32to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y)))
  9904  	for {
  9905  		t := v.Type
  9906  		x := v_0
  9907  		y := v_1
  9908  		if !(!shiftIsBounded(v)) {
  9909  			break
  9910  		}
  9911  		v.reset(OpLOONG64MASKEQZ)
  9912  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
  9913  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  9914  		v1.AddArg(y)
  9915  		v0.AddArg2(x, v1)
  9916  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  9917  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9918  		v3.AuxInt = int64ToAuxInt(64)
  9919  		v2.AddArg2(v3, v1)
  9920  		v.AddArg2(v0, v2)
  9921  		return true
  9922  	}
  9923  	return false
  9924  }
  9925  func rewriteValueLOONG64_OpRsh64Ux64(v *Value) bool {
  9926  	v_1 := v.Args[1]
  9927  	v_0 := v.Args[0]
  9928  	b := v.Block
  9929  	typ := &b.Func.Config.Types
  9930  	// match: (Rsh64Ux64 x y)
  9931  	// cond: shiftIsBounded(v)
  9932  	// result: (SRLV x y)
  9933  	for {
  9934  		x := v_0
  9935  		y := v_1
  9936  		if !(shiftIsBounded(v)) {
  9937  			break
  9938  		}
  9939  		v.reset(OpLOONG64SRLV)
  9940  		v.AddArg2(x, y)
  9941  		return true
  9942  	}
  9943  	// match: (Rsh64Ux64 <t> x y)
  9944  	// cond: !shiftIsBounded(v)
  9945  	// result: (MASKEQZ (SRLV <t> x y) (SGTU (MOVVconst <typ.UInt64> [64]) y))
  9946  	for {
  9947  		t := v.Type
  9948  		x := v_0
  9949  		y := v_1
  9950  		if !(!shiftIsBounded(v)) {
  9951  			break
  9952  		}
  9953  		v.reset(OpLOONG64MASKEQZ)
  9954  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
  9955  		v0.AddArg2(x, y)
  9956  		v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  9957  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9958  		v2.AuxInt = int64ToAuxInt(64)
  9959  		v1.AddArg2(v2, y)
  9960  		v.AddArg2(v0, v1)
  9961  		return true
  9962  	}
  9963  	return false
  9964  }
  9965  func rewriteValueLOONG64_OpRsh64Ux8(v *Value) bool {
  9966  	v_1 := v.Args[1]
  9967  	v_0 := v.Args[0]
  9968  	b := v.Block
  9969  	typ := &b.Func.Config.Types
  9970  	// match: (Rsh64Ux8 x y)
  9971  	// cond: shiftIsBounded(v)
  9972  	// result: (SRLV x y)
  9973  	for {
  9974  		x := v_0
  9975  		y := v_1
  9976  		if !(shiftIsBounded(v)) {
  9977  			break
  9978  		}
  9979  		v.reset(OpLOONG64SRLV)
  9980  		v.AddArg2(x, y)
  9981  		return true
  9982  	}
  9983  	// match: (Rsh64Ux8 <t> x y)
  9984  	// cond: !shiftIsBounded(v)
  9985  	// result: (MASKEQZ (SRLV <t> x (ZeroExt8to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y)))
  9986  	for {
  9987  		t := v.Type
  9988  		x := v_0
  9989  		y := v_1
  9990  		if !(!shiftIsBounded(v)) {
  9991  			break
  9992  		}
  9993  		v.reset(OpLOONG64MASKEQZ)
  9994  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
  9995  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  9996  		v1.AddArg(y)
  9997  		v0.AddArg2(x, v1)
  9998  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  9999  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10000  		v3.AuxInt = int64ToAuxInt(64)
 10001  		v2.AddArg2(v3, v1)
 10002  		v.AddArg2(v0, v2)
 10003  		return true
 10004  	}
 10005  	return false
 10006  }
 10007  func rewriteValueLOONG64_OpRsh64x16(v *Value) bool {
 10008  	v_1 := v.Args[1]
 10009  	v_0 := v.Args[0]
 10010  	b := v.Block
 10011  	typ := &b.Func.Config.Types
 10012  	// match: (Rsh64x16 x y)
 10013  	// cond: shiftIsBounded(v)
 10014  	// result: (SRAV x y)
 10015  	for {
 10016  		x := v_0
 10017  		y := v_1
 10018  		if !(shiftIsBounded(v)) {
 10019  			break
 10020  		}
 10021  		v.reset(OpLOONG64SRAV)
 10022  		v.AddArg2(x, y)
 10023  		return true
 10024  	}
 10025  	// match: (Rsh64x16 <t> x y)
 10026  	// cond: !shiftIsBounded(v)
 10027  	// result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y)))
 10028  	for {
 10029  		t := v.Type
 10030  		x := v_0
 10031  		y := v_1
 10032  		if !(!shiftIsBounded(v)) {
 10033  			break
 10034  		}
 10035  		v.reset(OpLOONG64SRAV)
 10036  		v0 := b.NewValue0(v.Pos, OpLOONG64OR, t)
 10037  		v1 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
 10038  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10039  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 10040  		v3.AddArg(y)
 10041  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10042  		v4.AuxInt = int64ToAuxInt(63)
 10043  		v2.AddArg2(v3, v4)
 10044  		v1.AddArg(v2)
 10045  		v0.AddArg2(v1, v3)
 10046  		v.AddArg2(x, v0)
 10047  		return true
 10048  	}
 10049  	return false
 10050  }
 10051  func rewriteValueLOONG64_OpRsh64x32(v *Value) bool {
 10052  	v_1 := v.Args[1]
 10053  	v_0 := v.Args[0]
 10054  	b := v.Block
 10055  	typ := &b.Func.Config.Types
 10056  	// match: (Rsh64x32 x y)
 10057  	// cond: shiftIsBounded(v)
 10058  	// result: (SRAV x y)
 10059  	for {
 10060  		x := v_0
 10061  		y := v_1
 10062  		if !(shiftIsBounded(v)) {
 10063  			break
 10064  		}
 10065  		v.reset(OpLOONG64SRAV)
 10066  		v.AddArg2(x, y)
 10067  		return true
 10068  	}
 10069  	// match: (Rsh64x32 <t> x y)
 10070  	// cond: !shiftIsBounded(v)
 10071  	// result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y)))
 10072  	for {
 10073  		t := v.Type
 10074  		x := v_0
 10075  		y := v_1
 10076  		if !(!shiftIsBounded(v)) {
 10077  			break
 10078  		}
 10079  		v.reset(OpLOONG64SRAV)
 10080  		v0 := b.NewValue0(v.Pos, OpLOONG64OR, t)
 10081  		v1 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
 10082  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10083  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 10084  		v3.AddArg(y)
 10085  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10086  		v4.AuxInt = int64ToAuxInt(63)
 10087  		v2.AddArg2(v3, v4)
 10088  		v1.AddArg(v2)
 10089  		v0.AddArg2(v1, v3)
 10090  		v.AddArg2(x, v0)
 10091  		return true
 10092  	}
 10093  	return false
 10094  }
 10095  func rewriteValueLOONG64_OpRsh64x64(v *Value) bool {
 10096  	v_1 := v.Args[1]
 10097  	v_0 := v.Args[0]
 10098  	b := v.Block
 10099  	typ := &b.Func.Config.Types
 10100  	// match: (Rsh64x64 x y)
 10101  	// cond: shiftIsBounded(v)
 10102  	// result: (SRAV x y)
 10103  	for {
 10104  		x := v_0
 10105  		y := v_1
 10106  		if !(shiftIsBounded(v)) {
 10107  			break
 10108  		}
 10109  		v.reset(OpLOONG64SRAV)
 10110  		v.AddArg2(x, y)
 10111  		return true
 10112  	}
 10113  	// match: (Rsh64x64 <t> x y)
 10114  	// cond: !shiftIsBounded(v)
 10115  	// result: (SRAV x (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y))
 10116  	for {
 10117  		t := v.Type
 10118  		x := v_0
 10119  		y := v_1
 10120  		if !(!shiftIsBounded(v)) {
 10121  			break
 10122  		}
 10123  		v.reset(OpLOONG64SRAV)
 10124  		v0 := b.NewValue0(v.Pos, OpLOONG64OR, t)
 10125  		v1 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
 10126  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10127  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10128  		v3.AuxInt = int64ToAuxInt(63)
 10129  		v2.AddArg2(y, v3)
 10130  		v1.AddArg(v2)
 10131  		v0.AddArg2(v1, y)
 10132  		v.AddArg2(x, v0)
 10133  		return true
 10134  	}
 10135  	return false
 10136  }
 10137  func rewriteValueLOONG64_OpRsh64x8(v *Value) bool {
 10138  	v_1 := v.Args[1]
 10139  	v_0 := v.Args[0]
 10140  	b := v.Block
 10141  	typ := &b.Func.Config.Types
 10142  	// match: (Rsh64x8 x y)
 10143  	// cond: shiftIsBounded(v)
 10144  	// result: (SRAV x y)
 10145  	for {
 10146  		x := v_0
 10147  		y := v_1
 10148  		if !(shiftIsBounded(v)) {
 10149  			break
 10150  		}
 10151  		v.reset(OpLOONG64SRAV)
 10152  		v.AddArg2(x, y)
 10153  		return true
 10154  	}
 10155  	// match: (Rsh64x8 <t> x y)
 10156  	// cond: !shiftIsBounded(v)
 10157  	// result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
 10158  	for {
 10159  		t := v.Type
 10160  		x := v_0
 10161  		y := v_1
 10162  		if !(!shiftIsBounded(v)) {
 10163  			break
 10164  		}
 10165  		v.reset(OpLOONG64SRAV)
 10166  		v0 := b.NewValue0(v.Pos, OpLOONG64OR, t)
 10167  		v1 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
 10168  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10169  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 10170  		v3.AddArg(y)
 10171  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10172  		v4.AuxInt = int64ToAuxInt(63)
 10173  		v2.AddArg2(v3, v4)
 10174  		v1.AddArg(v2)
 10175  		v0.AddArg2(v1, v3)
 10176  		v.AddArg2(x, v0)
 10177  		return true
 10178  	}
 10179  	return false
 10180  }
 10181  func rewriteValueLOONG64_OpRsh8Ux16(v *Value) bool {
 10182  	v_1 := v.Args[1]
 10183  	v_0 := v.Args[0]
 10184  	b := v.Block
 10185  	typ := &b.Func.Config.Types
 10186  	// match: (Rsh8Ux16 x y)
 10187  	// cond: shiftIsBounded(v)
 10188  	// result: (SRLV (ZeroExt8to64 x) y)
 10189  	for {
 10190  		x := v_0
 10191  		y := v_1
 10192  		if !(shiftIsBounded(v)) {
 10193  			break
 10194  		}
 10195  		v.reset(OpLOONG64SRLV)
 10196  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 10197  		v0.AddArg(x)
 10198  		v.AddArg2(v0, y)
 10199  		return true
 10200  	}
 10201  	// match: (Rsh8Ux16 <t> x y)
 10202  	// cond: !shiftIsBounded(v)
 10203  	// result: (MASKEQZ (SRLV <t> (ZeroExt8to64 x) (ZeroExt16to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y)))
 10204  	for {
 10205  		t := v.Type
 10206  		x := v_0
 10207  		y := v_1
 10208  		if !(!shiftIsBounded(v)) {
 10209  			break
 10210  		}
 10211  		v.reset(OpLOONG64MASKEQZ)
 10212  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
 10213  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 10214  		v1.AddArg(x)
 10215  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 10216  		v2.AddArg(y)
 10217  		v0.AddArg2(v1, v2)
 10218  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10219  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10220  		v4.AuxInt = int64ToAuxInt(64)
 10221  		v3.AddArg2(v4, v2)
 10222  		v.AddArg2(v0, v3)
 10223  		return true
 10224  	}
 10225  	return false
 10226  }
 10227  func rewriteValueLOONG64_OpRsh8Ux32(v *Value) bool {
 10228  	v_1 := v.Args[1]
 10229  	v_0 := v.Args[0]
 10230  	b := v.Block
 10231  	typ := &b.Func.Config.Types
 10232  	// match: (Rsh8Ux32 x y)
 10233  	// cond: shiftIsBounded(v)
 10234  	// result: (SRLV (ZeroExt8to64 x) y)
 10235  	for {
 10236  		x := v_0
 10237  		y := v_1
 10238  		if !(shiftIsBounded(v)) {
 10239  			break
 10240  		}
 10241  		v.reset(OpLOONG64SRLV)
 10242  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 10243  		v0.AddArg(x)
 10244  		v.AddArg2(v0, y)
 10245  		return true
 10246  	}
 10247  	// match: (Rsh8Ux32 <t> x y)
 10248  	// cond: !shiftIsBounded(v)
 10249  	// result: (MASKEQZ (SRLV <t> (ZeroExt8to64 x) (ZeroExt32to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y)))
 10250  	for {
 10251  		t := v.Type
 10252  		x := v_0
 10253  		y := v_1
 10254  		if !(!shiftIsBounded(v)) {
 10255  			break
 10256  		}
 10257  		v.reset(OpLOONG64MASKEQZ)
 10258  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
 10259  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 10260  		v1.AddArg(x)
 10261  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 10262  		v2.AddArg(y)
 10263  		v0.AddArg2(v1, v2)
 10264  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10265  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10266  		v4.AuxInt = int64ToAuxInt(64)
 10267  		v3.AddArg2(v4, v2)
 10268  		v.AddArg2(v0, v3)
 10269  		return true
 10270  	}
 10271  	return false
 10272  }
 10273  func rewriteValueLOONG64_OpRsh8Ux64(v *Value) bool {
 10274  	v_1 := v.Args[1]
 10275  	v_0 := v.Args[0]
 10276  	b := v.Block
 10277  	typ := &b.Func.Config.Types
 10278  	// match: (Rsh8Ux64 x y)
 10279  	// cond: shiftIsBounded(v)
 10280  	// result: (SRLV (ZeroExt8to64 x) y)
 10281  	for {
 10282  		x := v_0
 10283  		y := v_1
 10284  		if !(shiftIsBounded(v)) {
 10285  			break
 10286  		}
 10287  		v.reset(OpLOONG64SRLV)
 10288  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 10289  		v0.AddArg(x)
 10290  		v.AddArg2(v0, y)
 10291  		return true
 10292  	}
 10293  	// match: (Rsh8Ux64 <t> x y)
 10294  	// cond: !shiftIsBounded(v)
 10295  	// result: (MASKEQZ (SRLV <t> (ZeroExt8to64 x) y) (SGTU (MOVVconst <typ.UInt64> [64]) y))
 10296  	for {
 10297  		t := v.Type
 10298  		x := v_0
 10299  		y := v_1
 10300  		if !(!shiftIsBounded(v)) {
 10301  			break
 10302  		}
 10303  		v.reset(OpLOONG64MASKEQZ)
 10304  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
 10305  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 10306  		v1.AddArg(x)
 10307  		v0.AddArg2(v1, y)
 10308  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10309  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10310  		v3.AuxInt = int64ToAuxInt(64)
 10311  		v2.AddArg2(v3, y)
 10312  		v.AddArg2(v0, v2)
 10313  		return true
 10314  	}
 10315  	return false
 10316  }
 10317  func rewriteValueLOONG64_OpRsh8Ux8(v *Value) bool {
 10318  	v_1 := v.Args[1]
 10319  	v_0 := v.Args[0]
 10320  	b := v.Block
 10321  	typ := &b.Func.Config.Types
 10322  	// match: (Rsh8Ux8 x y)
 10323  	// cond: shiftIsBounded(v)
 10324  	// result: (SRLV (ZeroExt8to64 x) y)
 10325  	for {
 10326  		x := v_0
 10327  		y := v_1
 10328  		if !(shiftIsBounded(v)) {
 10329  			break
 10330  		}
 10331  		v.reset(OpLOONG64SRLV)
 10332  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 10333  		v0.AddArg(x)
 10334  		v.AddArg2(v0, y)
 10335  		return true
 10336  	}
 10337  	// match: (Rsh8Ux8 <t> x y)
 10338  	// cond: !shiftIsBounded(v)
 10339  	// result: (MASKEQZ (SRLV <t> (ZeroExt8to64 x) (ZeroExt8to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y)))
 10340  	for {
 10341  		t := v.Type
 10342  		x := v_0
 10343  		y := v_1
 10344  		if !(!shiftIsBounded(v)) {
 10345  			break
 10346  		}
 10347  		v.reset(OpLOONG64MASKEQZ)
 10348  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
 10349  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 10350  		v1.AddArg(x)
 10351  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 10352  		v2.AddArg(y)
 10353  		v0.AddArg2(v1, v2)
 10354  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10355  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10356  		v4.AuxInt = int64ToAuxInt(64)
 10357  		v3.AddArg2(v4, v2)
 10358  		v.AddArg2(v0, v3)
 10359  		return true
 10360  	}
 10361  	return false
 10362  }
 10363  func rewriteValueLOONG64_OpRsh8x16(v *Value) bool {
 10364  	v_1 := v.Args[1]
 10365  	v_0 := v.Args[0]
 10366  	b := v.Block
 10367  	typ := &b.Func.Config.Types
 10368  	// match: (Rsh8x16 x y)
 10369  	// cond: shiftIsBounded(v)
 10370  	// result: (SRAV (SignExt8to64 x) y)
 10371  	for {
 10372  		x := v_0
 10373  		y := v_1
 10374  		if !(shiftIsBounded(v)) {
 10375  			break
 10376  		}
 10377  		v.reset(OpLOONG64SRAV)
 10378  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
 10379  		v0.AddArg(x)
 10380  		v.AddArg2(v0, y)
 10381  		return true
 10382  	}
 10383  	// match: (Rsh8x16 <t> x y)
 10384  	// cond: !shiftIsBounded(v)
 10385  	// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y)))
 10386  	for {
 10387  		t := v.Type
 10388  		x := v_0
 10389  		y := v_1
 10390  		if !(!shiftIsBounded(v)) {
 10391  			break
 10392  		}
 10393  		v.reset(OpLOONG64SRAV)
 10394  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
 10395  		v0.AddArg(x)
 10396  		v1 := b.NewValue0(v.Pos, OpLOONG64OR, t)
 10397  		v2 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
 10398  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10399  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 10400  		v4.AddArg(y)
 10401  		v5 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10402  		v5.AuxInt = int64ToAuxInt(63)
 10403  		v3.AddArg2(v4, v5)
 10404  		v2.AddArg(v3)
 10405  		v1.AddArg2(v2, v4)
 10406  		v.AddArg2(v0, v1)
 10407  		return true
 10408  	}
 10409  	return false
 10410  }
 10411  func rewriteValueLOONG64_OpRsh8x32(v *Value) bool {
 10412  	v_1 := v.Args[1]
 10413  	v_0 := v.Args[0]
 10414  	b := v.Block
 10415  	typ := &b.Func.Config.Types
 10416  	// match: (Rsh8x32 x y)
 10417  	// cond: shiftIsBounded(v)
 10418  	// result: (SRAV (SignExt8to64 x) y)
 10419  	for {
 10420  		x := v_0
 10421  		y := v_1
 10422  		if !(shiftIsBounded(v)) {
 10423  			break
 10424  		}
 10425  		v.reset(OpLOONG64SRAV)
 10426  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
 10427  		v0.AddArg(x)
 10428  		v.AddArg2(v0, y)
 10429  		return true
 10430  	}
 10431  	// match: (Rsh8x32 <t> x y)
 10432  	// cond: !shiftIsBounded(v)
 10433  	// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y)))
 10434  	for {
 10435  		t := v.Type
 10436  		x := v_0
 10437  		y := v_1
 10438  		if !(!shiftIsBounded(v)) {
 10439  			break
 10440  		}
 10441  		v.reset(OpLOONG64SRAV)
 10442  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
 10443  		v0.AddArg(x)
 10444  		v1 := b.NewValue0(v.Pos, OpLOONG64OR, t)
 10445  		v2 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
 10446  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10447  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 10448  		v4.AddArg(y)
 10449  		v5 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10450  		v5.AuxInt = int64ToAuxInt(63)
 10451  		v3.AddArg2(v4, v5)
 10452  		v2.AddArg(v3)
 10453  		v1.AddArg2(v2, v4)
 10454  		v.AddArg2(v0, v1)
 10455  		return true
 10456  	}
 10457  	return false
 10458  }
 10459  func rewriteValueLOONG64_OpRsh8x64(v *Value) bool {
 10460  	v_1 := v.Args[1]
 10461  	v_0 := v.Args[0]
 10462  	b := v.Block
 10463  	typ := &b.Func.Config.Types
 10464  	// match: (Rsh8x64 x y)
 10465  	// cond: shiftIsBounded(v)
 10466  	// result: (SRAV (SignExt8to64 x) y)
 10467  	for {
 10468  		x := v_0
 10469  		y := v_1
 10470  		if !(shiftIsBounded(v)) {
 10471  			break
 10472  		}
 10473  		v.reset(OpLOONG64SRAV)
 10474  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
 10475  		v0.AddArg(x)
 10476  		v.AddArg2(v0, y)
 10477  		return true
 10478  	}
 10479  	// match: (Rsh8x64 <t> x y)
 10480  	// cond: !shiftIsBounded(v)
 10481  	// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y))
 10482  	for {
 10483  		t := v.Type
 10484  		x := v_0
 10485  		y := v_1
 10486  		if !(!shiftIsBounded(v)) {
 10487  			break
 10488  		}
 10489  		v.reset(OpLOONG64SRAV)
 10490  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
 10491  		v0.AddArg(x)
 10492  		v1 := b.NewValue0(v.Pos, OpLOONG64OR, t)
 10493  		v2 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
 10494  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10495  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10496  		v4.AuxInt = int64ToAuxInt(63)
 10497  		v3.AddArg2(y, v4)
 10498  		v2.AddArg(v3)
 10499  		v1.AddArg2(v2, y)
 10500  		v.AddArg2(v0, v1)
 10501  		return true
 10502  	}
 10503  	return false
 10504  }
 10505  func rewriteValueLOONG64_OpRsh8x8(v *Value) bool {
 10506  	v_1 := v.Args[1]
 10507  	v_0 := v.Args[0]
 10508  	b := v.Block
 10509  	typ := &b.Func.Config.Types
 10510  	// match: (Rsh8x8 x y)
 10511  	// cond: shiftIsBounded(v)
 10512  	// result: (SRAV (SignExt8to64 x) y)
 10513  	for {
 10514  		x := v_0
 10515  		y := v_1
 10516  		if !(shiftIsBounded(v)) {
 10517  			break
 10518  		}
 10519  		v.reset(OpLOONG64SRAV)
 10520  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
 10521  		v0.AddArg(x)
 10522  		v.AddArg2(v0, y)
 10523  		return true
 10524  	}
 10525  	// match: (Rsh8x8 <t> x y)
 10526  	// cond: !shiftIsBounded(v)
 10527  	// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
 10528  	for {
 10529  		t := v.Type
 10530  		x := v_0
 10531  		y := v_1
 10532  		if !(!shiftIsBounded(v)) {
 10533  			break
 10534  		}
 10535  		v.reset(OpLOONG64SRAV)
 10536  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
 10537  		v0.AddArg(x)
 10538  		v1 := b.NewValue0(v.Pos, OpLOONG64OR, t)
 10539  		v2 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
 10540  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10541  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 10542  		v4.AddArg(y)
 10543  		v5 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10544  		v5.AuxInt = int64ToAuxInt(63)
 10545  		v3.AddArg2(v4, v5)
 10546  		v2.AddArg(v3)
 10547  		v1.AddArg2(v2, v4)
 10548  		v.AddArg2(v0, v1)
 10549  		return true
 10550  	}
 10551  	return false
 10552  }
 10553  func rewriteValueLOONG64_OpSelect0(v *Value) bool {
 10554  	v_0 := v.Args[0]
 10555  	b := v.Block
 10556  	// match: (Select0 (Mul64uhilo x y))
 10557  	// result: (MULHVU x y)
 10558  	for {
 10559  		if v_0.Op != OpMul64uhilo {
 10560  			break
 10561  		}
 10562  		y := v_0.Args[1]
 10563  		x := v_0.Args[0]
 10564  		v.reset(OpLOONG64MULHVU)
 10565  		v.AddArg2(x, y)
 10566  		return true
 10567  	}
 10568  	// match: (Select0 (Mul64uover x y))
 10569  	// result: (MULV x y)
 10570  	for {
 10571  		if v_0.Op != OpMul64uover {
 10572  			break
 10573  		}
 10574  		y := v_0.Args[1]
 10575  		x := v_0.Args[0]
 10576  		v.reset(OpLOONG64MULV)
 10577  		v.AddArg2(x, y)
 10578  		return true
 10579  	}
 10580  	// match: (Select0 <t> (Add64carry x y c))
 10581  	// result: (ADDV (ADDV <t> x y) c)
 10582  	for {
 10583  		t := v.Type
 10584  		if v_0.Op != OpAdd64carry {
 10585  			break
 10586  		}
 10587  		c := v_0.Args[2]
 10588  		x := v_0.Args[0]
 10589  		y := v_0.Args[1]
 10590  		v.reset(OpLOONG64ADDV)
 10591  		v0 := b.NewValue0(v.Pos, OpLOONG64ADDV, t)
 10592  		v0.AddArg2(x, y)
 10593  		v.AddArg2(v0, c)
 10594  		return true
 10595  	}
 10596  	// match: (Select0 <t> (Sub64borrow x y c))
 10597  	// result: (SUBV (SUBV <t> x y) c)
 10598  	for {
 10599  		t := v.Type
 10600  		if v_0.Op != OpSub64borrow {
 10601  			break
 10602  		}
 10603  		c := v_0.Args[2]
 10604  		x := v_0.Args[0]
 10605  		y := v_0.Args[1]
 10606  		v.reset(OpLOONG64SUBV)
 10607  		v0 := b.NewValue0(v.Pos, OpLOONG64SUBV, t)
 10608  		v0.AddArg2(x, y)
 10609  		v.AddArg2(v0, c)
 10610  		return true
 10611  	}
 10612  	return false
 10613  }
 10614  func rewriteValueLOONG64_OpSelect1(v *Value) bool {
 10615  	v_0 := v.Args[0]
 10616  	b := v.Block
 10617  	typ := &b.Func.Config.Types
 10618  	// match: (Select1 (Mul64uhilo x y))
 10619  	// result: (MULV x y)
 10620  	for {
 10621  		if v_0.Op != OpMul64uhilo {
 10622  			break
 10623  		}
 10624  		y := v_0.Args[1]
 10625  		x := v_0.Args[0]
 10626  		v.reset(OpLOONG64MULV)
 10627  		v.AddArg2(x, y)
 10628  		return true
 10629  	}
 10630  	// match: (Select1 (Mul64uover x y))
 10631  	// result: (SGTU <typ.Bool> (MULHVU x y) (MOVVconst <typ.UInt64> [0]))
 10632  	for {
 10633  		if v_0.Op != OpMul64uover {
 10634  			break
 10635  		}
 10636  		y := v_0.Args[1]
 10637  		x := v_0.Args[0]
 10638  		v.reset(OpLOONG64SGTU)
 10639  		v.Type = typ.Bool
 10640  		v0 := b.NewValue0(v.Pos, OpLOONG64MULHVU, typ.UInt64)
 10641  		v0.AddArg2(x, y)
 10642  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10643  		v1.AuxInt = int64ToAuxInt(0)
 10644  		v.AddArg2(v0, v1)
 10645  		return true
 10646  	}
 10647  	// match: (Select1 <t> (Add64carry x y c))
 10648  	// result: (OR (SGTU <t> x s:(ADDV <t> x y)) (SGTU <t> s (ADDV <t> s c)))
 10649  	for {
 10650  		t := v.Type
 10651  		if v_0.Op != OpAdd64carry {
 10652  			break
 10653  		}
 10654  		c := v_0.Args[2]
 10655  		x := v_0.Args[0]
 10656  		y := v_0.Args[1]
 10657  		v.reset(OpLOONG64OR)
 10658  		v0 := b.NewValue0(v.Pos, OpLOONG64SGTU, t)
 10659  		s := b.NewValue0(v.Pos, OpLOONG64ADDV, t)
 10660  		s.AddArg2(x, y)
 10661  		v0.AddArg2(x, s)
 10662  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, t)
 10663  		v3 := b.NewValue0(v.Pos, OpLOONG64ADDV, t)
 10664  		v3.AddArg2(s, c)
 10665  		v2.AddArg2(s, v3)
 10666  		v.AddArg2(v0, v2)
 10667  		return true
 10668  	}
 10669  	// match: (Select1 <t> (Sub64borrow x y c))
 10670  	// result: (OR (SGTU <t> s:(SUBV <t> x y) x) (SGTU <t> (SUBV <t> s c) s))
 10671  	for {
 10672  		t := v.Type
 10673  		if v_0.Op != OpSub64borrow {
 10674  			break
 10675  		}
 10676  		c := v_0.Args[2]
 10677  		x := v_0.Args[0]
 10678  		y := v_0.Args[1]
 10679  		v.reset(OpLOONG64OR)
 10680  		v0 := b.NewValue0(v.Pos, OpLOONG64SGTU, t)
 10681  		s := b.NewValue0(v.Pos, OpLOONG64SUBV, t)
 10682  		s.AddArg2(x, y)
 10683  		v0.AddArg2(s, x)
 10684  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, t)
 10685  		v3 := b.NewValue0(v.Pos, OpLOONG64SUBV, t)
 10686  		v3.AddArg2(s, c)
 10687  		v2.AddArg2(v3, s)
 10688  		v.AddArg2(v0, v2)
 10689  		return true
 10690  	}
 10691  	return false
 10692  }
 10693  func rewriteValueLOONG64_OpSelectN(v *Value) bool {
 10694  	v_0 := v.Args[0]
 10695  	b := v.Block
 10696  	config := b.Func.Config
 10697  	// match: (SelectN [0] call:(CALLstatic {sym} dst src (MOVVconst [sz]) mem))
 10698  	// cond: sz >= 0 && isSameCall(sym, "runtime.memmove") && call.Uses == 1 && isInlinableMemmove(dst, src, sz, config) && clobber(call)
 10699  	// result: (Move [sz] dst src mem)
 10700  	for {
 10701  		if auxIntToInt64(v.AuxInt) != 0 {
 10702  			break
 10703  		}
 10704  		call := v_0
 10705  		if call.Op != OpLOONG64CALLstatic || len(call.Args) != 4 {
 10706  			break
 10707  		}
 10708  		sym := auxToCall(call.Aux)
 10709  		mem := call.Args[3]
 10710  		dst := call.Args[0]
 10711  		src := call.Args[1]
 10712  		call_2 := call.Args[2]
 10713  		if call_2.Op != OpLOONG64MOVVconst {
 10714  			break
 10715  		}
 10716  		sz := auxIntToInt64(call_2.AuxInt)
 10717  		if !(sz >= 0 && isSameCall(sym, "runtime.memmove") && call.Uses == 1 && isInlinableMemmove(dst, src, sz, config) && clobber(call)) {
 10718  			break
 10719  		}
 10720  		v.reset(OpMove)
 10721  		v.AuxInt = int64ToAuxInt(sz)
 10722  		v.AddArg3(dst, src, mem)
 10723  		return true
 10724  	}
 10725  	return false
 10726  }
 10727  func rewriteValueLOONG64_OpSlicemask(v *Value) bool {
 10728  	v_0 := v.Args[0]
 10729  	b := v.Block
 10730  	// match: (Slicemask <t> x)
 10731  	// result: (SRAVconst (NEGV <t> x) [63])
 10732  	for {
 10733  		t := v.Type
 10734  		x := v_0
 10735  		v.reset(OpLOONG64SRAVconst)
 10736  		v.AuxInt = int64ToAuxInt(63)
 10737  		v0 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
 10738  		v0.AddArg(x)
 10739  		v.AddArg(v0)
 10740  		return true
 10741  	}
 10742  }
 10743  func rewriteValueLOONG64_OpStore(v *Value) bool {
 10744  	v_2 := v.Args[2]
 10745  	v_1 := v.Args[1]
 10746  	v_0 := v.Args[0]
 10747  	// match: (Store {t} ptr val mem)
 10748  	// cond: t.Size() == 1
 10749  	// result: (MOVBstore ptr val mem)
 10750  	for {
 10751  		t := auxToType(v.Aux)
 10752  		ptr := v_0
 10753  		val := v_1
 10754  		mem := v_2
 10755  		if !(t.Size() == 1) {
 10756  			break
 10757  		}
 10758  		v.reset(OpLOONG64MOVBstore)
 10759  		v.AddArg3(ptr, val, mem)
 10760  		return true
 10761  	}
 10762  	// match: (Store {t} ptr val mem)
 10763  	// cond: t.Size() == 2
 10764  	// result: (MOVHstore ptr val mem)
 10765  	for {
 10766  		t := auxToType(v.Aux)
 10767  		ptr := v_0
 10768  		val := v_1
 10769  		mem := v_2
 10770  		if !(t.Size() == 2) {
 10771  			break
 10772  		}
 10773  		v.reset(OpLOONG64MOVHstore)
 10774  		v.AddArg3(ptr, val, mem)
 10775  		return true
 10776  	}
 10777  	// match: (Store {t} ptr val mem)
 10778  	// cond: t.Size() == 4 && !t.IsFloat()
 10779  	// result: (MOVWstore ptr val mem)
 10780  	for {
 10781  		t := auxToType(v.Aux)
 10782  		ptr := v_0
 10783  		val := v_1
 10784  		mem := v_2
 10785  		if !(t.Size() == 4 && !t.IsFloat()) {
 10786  			break
 10787  		}
 10788  		v.reset(OpLOONG64MOVWstore)
 10789  		v.AddArg3(ptr, val, mem)
 10790  		return true
 10791  	}
 10792  	// match: (Store {t} ptr val mem)
 10793  	// cond: t.Size() == 8 && !t.IsFloat()
 10794  	// result: (MOVVstore ptr val mem)
 10795  	for {
 10796  		t := auxToType(v.Aux)
 10797  		ptr := v_0
 10798  		val := v_1
 10799  		mem := v_2
 10800  		if !(t.Size() == 8 && !t.IsFloat()) {
 10801  			break
 10802  		}
 10803  		v.reset(OpLOONG64MOVVstore)
 10804  		v.AddArg3(ptr, val, mem)
 10805  		return true
 10806  	}
 10807  	// match: (Store {t} ptr val mem)
 10808  	// cond: t.Size() == 4 && t.IsFloat()
 10809  	// result: (MOVFstore ptr val mem)
 10810  	for {
 10811  		t := auxToType(v.Aux)
 10812  		ptr := v_0
 10813  		val := v_1
 10814  		mem := v_2
 10815  		if !(t.Size() == 4 && t.IsFloat()) {
 10816  			break
 10817  		}
 10818  		v.reset(OpLOONG64MOVFstore)
 10819  		v.AddArg3(ptr, val, mem)
 10820  		return true
 10821  	}
 10822  	// match: (Store {t} ptr val mem)
 10823  	// cond: t.Size() == 8 && t.IsFloat()
 10824  	// result: (MOVDstore ptr val mem)
 10825  	for {
 10826  		t := auxToType(v.Aux)
 10827  		ptr := v_0
 10828  		val := v_1
 10829  		mem := v_2
 10830  		if !(t.Size() == 8 && t.IsFloat()) {
 10831  			break
 10832  		}
 10833  		v.reset(OpLOONG64MOVDstore)
 10834  		v.AddArg3(ptr, val, mem)
 10835  		return true
 10836  	}
 10837  	return false
 10838  }
 10839  func rewriteValueLOONG64_OpZero(v *Value) bool {
 10840  	v_1 := v.Args[1]
 10841  	v_0 := v.Args[0]
 10842  	b := v.Block
 10843  	typ := &b.Func.Config.Types
 10844  	// match: (Zero [0] _ mem)
 10845  	// result: mem
 10846  	for {
 10847  		if auxIntToInt64(v.AuxInt) != 0 {
 10848  			break
 10849  		}
 10850  		mem := v_1
 10851  		v.copyOf(mem)
 10852  		return true
 10853  	}
 10854  	// match: (Zero [1] ptr mem)
 10855  	// result: (MOVBstore ptr (MOVVconst [0]) mem)
 10856  	for {
 10857  		if auxIntToInt64(v.AuxInt) != 1 {
 10858  			break
 10859  		}
 10860  		ptr := v_0
 10861  		mem := v_1
 10862  		v.reset(OpLOONG64MOVBstore)
 10863  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10864  		v0.AuxInt = int64ToAuxInt(0)
 10865  		v.AddArg3(ptr, v0, mem)
 10866  		return true
 10867  	}
 10868  	// match: (Zero [2] ptr mem)
 10869  	// result: (MOVHstore ptr (MOVVconst [0]) mem)
 10870  	for {
 10871  		if auxIntToInt64(v.AuxInt) != 2 {
 10872  			break
 10873  		}
 10874  		ptr := v_0
 10875  		mem := v_1
 10876  		v.reset(OpLOONG64MOVHstore)
 10877  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10878  		v0.AuxInt = int64ToAuxInt(0)
 10879  		v.AddArg3(ptr, v0, mem)
 10880  		return true
 10881  	}
 10882  	// match: (Zero [3] ptr mem)
 10883  	// result: (MOVBstore [2] ptr (MOVVconst [0]) (MOVHstore ptr (MOVVconst [0]) mem))
 10884  	for {
 10885  		if auxIntToInt64(v.AuxInt) != 3 {
 10886  			break
 10887  		}
 10888  		ptr := v_0
 10889  		mem := v_1
 10890  		v.reset(OpLOONG64MOVBstore)
 10891  		v.AuxInt = int32ToAuxInt(2)
 10892  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10893  		v0.AuxInt = int64ToAuxInt(0)
 10894  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVHstore, types.TypeMem)
 10895  		v1.AddArg3(ptr, v0, mem)
 10896  		v.AddArg3(ptr, v0, v1)
 10897  		return true
 10898  	}
 10899  	// match: (Zero [4] {t} ptr mem)
 10900  	// result: (MOVWstore ptr (MOVVconst [0]) mem)
 10901  	for {
 10902  		if auxIntToInt64(v.AuxInt) != 4 {
 10903  			break
 10904  		}
 10905  		ptr := v_0
 10906  		mem := v_1
 10907  		v.reset(OpLOONG64MOVWstore)
 10908  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10909  		v0.AuxInt = int64ToAuxInt(0)
 10910  		v.AddArg3(ptr, v0, mem)
 10911  		return true
 10912  	}
 10913  	// match: (Zero [5] ptr mem)
 10914  	// result: (MOVBstore [4] ptr (MOVVconst [0]) (MOVWstore ptr (MOVVconst [0]) mem))
 10915  	for {
 10916  		if auxIntToInt64(v.AuxInt) != 5 {
 10917  			break
 10918  		}
 10919  		ptr := v_0
 10920  		mem := v_1
 10921  		v.reset(OpLOONG64MOVBstore)
 10922  		v.AuxInt = int32ToAuxInt(4)
 10923  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10924  		v0.AuxInt = int64ToAuxInt(0)
 10925  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVWstore, types.TypeMem)
 10926  		v1.AddArg3(ptr, v0, mem)
 10927  		v.AddArg3(ptr, v0, v1)
 10928  		return true
 10929  	}
 10930  	// match: (Zero [6] ptr mem)
 10931  	// result: (MOVHstore [4] ptr (MOVVconst [0]) (MOVWstore ptr (MOVVconst [0]) mem))
 10932  	for {
 10933  		if auxIntToInt64(v.AuxInt) != 6 {
 10934  			break
 10935  		}
 10936  		ptr := v_0
 10937  		mem := v_1
 10938  		v.reset(OpLOONG64MOVHstore)
 10939  		v.AuxInt = int32ToAuxInt(4)
 10940  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10941  		v0.AuxInt = int64ToAuxInt(0)
 10942  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVWstore, types.TypeMem)
 10943  		v1.AddArg3(ptr, v0, mem)
 10944  		v.AddArg3(ptr, v0, v1)
 10945  		return true
 10946  	}
 10947  	// match: (Zero [7] ptr mem)
 10948  	// result: (MOVWstore [3] ptr (MOVVconst [0]) (MOVWstore ptr (MOVVconst [0]) mem))
 10949  	for {
 10950  		if auxIntToInt64(v.AuxInt) != 7 {
 10951  			break
 10952  		}
 10953  		ptr := v_0
 10954  		mem := v_1
 10955  		v.reset(OpLOONG64MOVWstore)
 10956  		v.AuxInt = int32ToAuxInt(3)
 10957  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10958  		v0.AuxInt = int64ToAuxInt(0)
 10959  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVWstore, types.TypeMem)
 10960  		v1.AddArg3(ptr, v0, mem)
 10961  		v.AddArg3(ptr, v0, v1)
 10962  		return true
 10963  	}
 10964  	// match: (Zero [8] {t} ptr mem)
 10965  	// result: (MOVVstore ptr (MOVVconst [0]) mem)
 10966  	for {
 10967  		if auxIntToInt64(v.AuxInt) != 8 {
 10968  			break
 10969  		}
 10970  		ptr := v_0
 10971  		mem := v_1
 10972  		v.reset(OpLOONG64MOVVstore)
 10973  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10974  		v0.AuxInt = int64ToAuxInt(0)
 10975  		v.AddArg3(ptr, v0, mem)
 10976  		return true
 10977  	}
 10978  	// match: (Zero [9] ptr mem)
 10979  	// result: (MOVBstore [8] ptr (MOVVconst [0]) (MOVVstore ptr (MOVVconst [0]) mem))
 10980  	for {
 10981  		if auxIntToInt64(v.AuxInt) != 9 {
 10982  			break
 10983  		}
 10984  		ptr := v_0
 10985  		mem := v_1
 10986  		v.reset(OpLOONG64MOVBstore)
 10987  		v.AuxInt = int32ToAuxInt(8)
 10988  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10989  		v0.AuxInt = int64ToAuxInt(0)
 10990  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
 10991  		v1.AddArg3(ptr, v0, mem)
 10992  		v.AddArg3(ptr, v0, v1)
 10993  		return true
 10994  	}
 10995  	// match: (Zero [10] ptr mem)
 10996  	// result: (MOVHstore [8] ptr (MOVVconst [0]) (MOVVstore ptr (MOVVconst [0]) mem))
 10997  	for {
 10998  		if auxIntToInt64(v.AuxInt) != 10 {
 10999  			break
 11000  		}
 11001  		ptr := v_0
 11002  		mem := v_1
 11003  		v.reset(OpLOONG64MOVHstore)
 11004  		v.AuxInt = int32ToAuxInt(8)
 11005  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11006  		v0.AuxInt = int64ToAuxInt(0)
 11007  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
 11008  		v1.AddArg3(ptr, v0, mem)
 11009  		v.AddArg3(ptr, v0, v1)
 11010  		return true
 11011  	}
 11012  	// match: (Zero [11] ptr mem)
 11013  	// result: (MOVWstore [7] ptr (MOVVconst [0]) (MOVVstore ptr (MOVVconst [0]) mem))
 11014  	for {
 11015  		if auxIntToInt64(v.AuxInt) != 11 {
 11016  			break
 11017  		}
 11018  		ptr := v_0
 11019  		mem := v_1
 11020  		v.reset(OpLOONG64MOVWstore)
 11021  		v.AuxInt = int32ToAuxInt(7)
 11022  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11023  		v0.AuxInt = int64ToAuxInt(0)
 11024  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
 11025  		v1.AddArg3(ptr, v0, mem)
 11026  		v.AddArg3(ptr, v0, v1)
 11027  		return true
 11028  	}
 11029  	// match: (Zero [12] ptr mem)
 11030  	// result: (MOVWstore [8] ptr (MOVVconst [0]) (MOVVstore ptr (MOVVconst [0]) mem))
 11031  	for {
 11032  		if auxIntToInt64(v.AuxInt) != 12 {
 11033  			break
 11034  		}
 11035  		ptr := v_0
 11036  		mem := v_1
 11037  		v.reset(OpLOONG64MOVWstore)
 11038  		v.AuxInt = int32ToAuxInt(8)
 11039  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11040  		v0.AuxInt = int64ToAuxInt(0)
 11041  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
 11042  		v1.AddArg3(ptr, v0, mem)
 11043  		v.AddArg3(ptr, v0, v1)
 11044  		return true
 11045  	}
 11046  	// match: (Zero [13] ptr mem)
 11047  	// result: (MOVVstore [5] ptr (MOVVconst [0]) (MOVVstore ptr (MOVVconst [0]) mem))
 11048  	for {
 11049  		if auxIntToInt64(v.AuxInt) != 13 {
 11050  			break
 11051  		}
 11052  		ptr := v_0
 11053  		mem := v_1
 11054  		v.reset(OpLOONG64MOVVstore)
 11055  		v.AuxInt = int32ToAuxInt(5)
 11056  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11057  		v0.AuxInt = int64ToAuxInt(0)
 11058  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
 11059  		v1.AddArg3(ptr, v0, mem)
 11060  		v.AddArg3(ptr, v0, v1)
 11061  		return true
 11062  	}
 11063  	// match: (Zero [14] ptr mem)
 11064  	// result: (MOVVstore [6] ptr (MOVVconst [0]) (MOVVstore ptr (MOVVconst [0]) mem))
 11065  	for {
 11066  		if auxIntToInt64(v.AuxInt) != 14 {
 11067  			break
 11068  		}
 11069  		ptr := v_0
 11070  		mem := v_1
 11071  		v.reset(OpLOONG64MOVVstore)
 11072  		v.AuxInt = int32ToAuxInt(6)
 11073  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11074  		v0.AuxInt = int64ToAuxInt(0)
 11075  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
 11076  		v1.AddArg3(ptr, v0, mem)
 11077  		v.AddArg3(ptr, v0, v1)
 11078  		return true
 11079  	}
 11080  	// match: (Zero [15] ptr mem)
 11081  	// result: (MOVVstore [7] ptr (MOVVconst [0]) (MOVVstore ptr (MOVVconst [0]) mem))
 11082  	for {
 11083  		if auxIntToInt64(v.AuxInt) != 15 {
 11084  			break
 11085  		}
 11086  		ptr := v_0
 11087  		mem := v_1
 11088  		v.reset(OpLOONG64MOVVstore)
 11089  		v.AuxInt = int32ToAuxInt(7)
 11090  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11091  		v0.AuxInt = int64ToAuxInt(0)
 11092  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
 11093  		v1.AddArg3(ptr, v0, mem)
 11094  		v.AddArg3(ptr, v0, v1)
 11095  		return true
 11096  	}
 11097  	// match: (Zero [16] ptr mem)
 11098  	// result: (MOVVstore [8] ptr (MOVVconst [0]) (MOVVstore ptr (MOVVconst [0]) mem))
 11099  	for {
 11100  		if auxIntToInt64(v.AuxInt) != 16 {
 11101  			break
 11102  		}
 11103  		ptr := v_0
 11104  		mem := v_1
 11105  		v.reset(OpLOONG64MOVVstore)
 11106  		v.AuxInt = int32ToAuxInt(8)
 11107  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11108  		v0.AuxInt = int64ToAuxInt(0)
 11109  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
 11110  		v1.AddArg3(ptr, v0, mem)
 11111  		v.AddArg3(ptr, v0, v1)
 11112  		return true
 11113  	}
 11114  	// match: (Zero [s] ptr mem)
 11115  	// cond: s%8 != 0 && s > 16
 11116  	// result: (Zero [s%8] (OffPtr <ptr.Type> ptr [s-s%8]) (Zero [s-s%8] ptr mem))
 11117  	for {
 11118  		s := auxIntToInt64(v.AuxInt)
 11119  		ptr := v_0
 11120  		mem := v_1
 11121  		if !(s%8 != 0 && s > 16) {
 11122  			break
 11123  		}
 11124  		v.reset(OpZero)
 11125  		v.AuxInt = int64ToAuxInt(s % 8)
 11126  		v0 := b.NewValue0(v.Pos, OpOffPtr, ptr.Type)
 11127  		v0.AuxInt = int64ToAuxInt(s - s%8)
 11128  		v0.AddArg(ptr)
 11129  		v1 := b.NewValue0(v.Pos, OpZero, types.TypeMem)
 11130  		v1.AuxInt = int64ToAuxInt(s - s%8)
 11131  		v1.AddArg2(ptr, mem)
 11132  		v.AddArg2(v0, v1)
 11133  		return true
 11134  	}
 11135  	// match: (Zero [s] ptr mem)
 11136  	// cond: s%8 == 0 && s > 16 && s <= 8*128
 11137  	// result: (DUFFZERO [8 * (128 - s/8)] ptr mem)
 11138  	for {
 11139  		s := auxIntToInt64(v.AuxInt)
 11140  		ptr := v_0
 11141  		mem := v_1
 11142  		if !(s%8 == 0 && s > 16 && s <= 8*128) {
 11143  			break
 11144  		}
 11145  		v.reset(OpLOONG64DUFFZERO)
 11146  		v.AuxInt = int64ToAuxInt(8 * (128 - s/8))
 11147  		v.AddArg2(ptr, mem)
 11148  		return true
 11149  	}
 11150  	// match: (Zero [s] ptr mem)
 11151  	// cond: s%8 == 0 && s > 8*128
 11152  	// result: (LoweredZero ptr (ADDVconst <ptr.Type> ptr [s-8]) mem)
 11153  	for {
 11154  		s := auxIntToInt64(v.AuxInt)
 11155  		ptr := v_0
 11156  		mem := v_1
 11157  		if !(s%8 == 0 && s > 8*128) {
 11158  			break
 11159  		}
 11160  		v.reset(OpLOONG64LoweredZero)
 11161  		v0 := b.NewValue0(v.Pos, OpLOONG64ADDVconst, ptr.Type)
 11162  		v0.AuxInt = int64ToAuxInt(s - 8)
 11163  		v0.AddArg(ptr)
 11164  		v.AddArg3(ptr, v0, mem)
 11165  		return true
 11166  	}
 11167  	return false
 11168  }
 11169  func rewriteBlockLOONG64(b *Block) bool {
 11170  	typ := &b.Func.Config.Types
 11171  	switch b.Kind {
 11172  	case BlockLOONG64EQ:
 11173  		// match: (EQ (FPFlagTrue cmp) yes no)
 11174  		// result: (FPF cmp yes no)
 11175  		for b.Controls[0].Op == OpLOONG64FPFlagTrue {
 11176  			v_0 := b.Controls[0]
 11177  			cmp := v_0.Args[0]
 11178  			b.resetWithControl(BlockLOONG64FPF, cmp)
 11179  			return true
 11180  		}
 11181  		// match: (EQ (FPFlagFalse cmp) yes no)
 11182  		// result: (FPT cmp yes no)
 11183  		for b.Controls[0].Op == OpLOONG64FPFlagFalse {
 11184  			v_0 := b.Controls[0]
 11185  			cmp := v_0.Args[0]
 11186  			b.resetWithControl(BlockLOONG64FPT, cmp)
 11187  			return true
 11188  		}
 11189  		// match: (EQ (XORconst [1] cmp:(SGT _ _)) yes no)
 11190  		// result: (NE cmp yes no)
 11191  		for b.Controls[0].Op == OpLOONG64XORconst {
 11192  			v_0 := b.Controls[0]
 11193  			if auxIntToInt64(v_0.AuxInt) != 1 {
 11194  				break
 11195  			}
 11196  			cmp := v_0.Args[0]
 11197  			if cmp.Op != OpLOONG64SGT {
 11198  				break
 11199  			}
 11200  			b.resetWithControl(BlockLOONG64NE, cmp)
 11201  			return true
 11202  		}
 11203  		// match: (EQ (XORconst [1] cmp:(SGTU _ _)) yes no)
 11204  		// result: (NE cmp yes no)
 11205  		for b.Controls[0].Op == OpLOONG64XORconst {
 11206  			v_0 := b.Controls[0]
 11207  			if auxIntToInt64(v_0.AuxInt) != 1 {
 11208  				break
 11209  			}
 11210  			cmp := v_0.Args[0]
 11211  			if cmp.Op != OpLOONG64SGTU {
 11212  				break
 11213  			}
 11214  			b.resetWithControl(BlockLOONG64NE, cmp)
 11215  			return true
 11216  		}
 11217  		// match: (EQ (XORconst [1] cmp:(SGTconst _)) yes no)
 11218  		// result: (NE cmp yes no)
 11219  		for b.Controls[0].Op == OpLOONG64XORconst {
 11220  			v_0 := b.Controls[0]
 11221  			if auxIntToInt64(v_0.AuxInt) != 1 {
 11222  				break
 11223  			}
 11224  			cmp := v_0.Args[0]
 11225  			if cmp.Op != OpLOONG64SGTconst {
 11226  				break
 11227  			}
 11228  			b.resetWithControl(BlockLOONG64NE, cmp)
 11229  			return true
 11230  		}
 11231  		// match: (EQ (XORconst [1] cmp:(SGTUconst _)) yes no)
 11232  		// result: (NE cmp yes no)
 11233  		for b.Controls[0].Op == OpLOONG64XORconst {
 11234  			v_0 := b.Controls[0]
 11235  			if auxIntToInt64(v_0.AuxInt) != 1 {
 11236  				break
 11237  			}
 11238  			cmp := v_0.Args[0]
 11239  			if cmp.Op != OpLOONG64SGTUconst {
 11240  				break
 11241  			}
 11242  			b.resetWithControl(BlockLOONG64NE, cmp)
 11243  			return true
 11244  		}
 11245  		// match: (EQ (SGTUconst [1] x) yes no)
 11246  		// result: (NE x yes no)
 11247  		for b.Controls[0].Op == OpLOONG64SGTUconst {
 11248  			v_0 := b.Controls[0]
 11249  			if auxIntToInt64(v_0.AuxInt) != 1 {
 11250  				break
 11251  			}
 11252  			x := v_0.Args[0]
 11253  			b.resetWithControl(BlockLOONG64NE, x)
 11254  			return true
 11255  		}
 11256  		// match: (EQ (SGTU x (MOVVconst [0])) yes no)
 11257  		// result: (EQ x yes no)
 11258  		for b.Controls[0].Op == OpLOONG64SGTU {
 11259  			v_0 := b.Controls[0]
 11260  			_ = v_0.Args[1]
 11261  			x := v_0.Args[0]
 11262  			v_0_1 := v_0.Args[1]
 11263  			if v_0_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
 11264  				break
 11265  			}
 11266  			b.resetWithControl(BlockLOONG64EQ, x)
 11267  			return true
 11268  		}
 11269  		// match: (EQ (SGTconst [0] x) yes no)
 11270  		// result: (GEZ x yes no)
 11271  		for b.Controls[0].Op == OpLOONG64SGTconst {
 11272  			v_0 := b.Controls[0]
 11273  			if auxIntToInt64(v_0.AuxInt) != 0 {
 11274  				break
 11275  			}
 11276  			x := v_0.Args[0]
 11277  			b.resetWithControl(BlockLOONG64GEZ, x)
 11278  			return true
 11279  		}
 11280  		// match: (EQ (SGT x (MOVVconst [0])) yes no)
 11281  		// result: (LEZ x yes no)
 11282  		for b.Controls[0].Op == OpLOONG64SGT {
 11283  			v_0 := b.Controls[0]
 11284  			_ = v_0.Args[1]
 11285  			x := v_0.Args[0]
 11286  			v_0_1 := v_0.Args[1]
 11287  			if v_0_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
 11288  				break
 11289  			}
 11290  			b.resetWithControl(BlockLOONG64LEZ, x)
 11291  			return true
 11292  		}
 11293  		// match: (EQ (SGTU (MOVVconst [c]) y) yes no)
 11294  		// cond: c >= -2048 && c <= 2047
 11295  		// result: (EQ (SGTUconst [c] y) yes no)
 11296  		for b.Controls[0].Op == OpLOONG64SGTU {
 11297  			v_0 := b.Controls[0]
 11298  			y := v_0.Args[1]
 11299  			v_0_0 := v_0.Args[0]
 11300  			if v_0_0.Op != OpLOONG64MOVVconst {
 11301  				break
 11302  			}
 11303  			c := auxIntToInt64(v_0_0.AuxInt)
 11304  			if !(c >= -2048 && c <= 2047) {
 11305  				break
 11306  			}
 11307  			v0 := b.NewValue0(v_0.Pos, OpLOONG64SGTUconst, typ.Bool)
 11308  			v0.AuxInt = int64ToAuxInt(c)
 11309  			v0.AddArg(y)
 11310  			b.resetWithControl(BlockLOONG64EQ, v0)
 11311  			return true
 11312  		}
 11313  		// match: (EQ (SUBV x y) yes no)
 11314  		// result: (BEQ x y yes no)
 11315  		for b.Controls[0].Op == OpLOONG64SUBV {
 11316  			v_0 := b.Controls[0]
 11317  			y := v_0.Args[1]
 11318  			x := v_0.Args[0]
 11319  			b.resetWithControl2(BlockLOONG64BEQ, x, y)
 11320  			return true
 11321  		}
 11322  		// match: (EQ (SGT x y) yes no)
 11323  		// result: (BGE y x yes no)
 11324  		for b.Controls[0].Op == OpLOONG64SGT {
 11325  			v_0 := b.Controls[0]
 11326  			y := v_0.Args[1]
 11327  			x := v_0.Args[0]
 11328  			b.resetWithControl2(BlockLOONG64BGE, y, x)
 11329  			return true
 11330  		}
 11331  		// match: (EQ (SGTU x y) yes no)
 11332  		// result: (BGEU y x yes no)
 11333  		for b.Controls[0].Op == OpLOONG64SGTU {
 11334  			v_0 := b.Controls[0]
 11335  			y := v_0.Args[1]
 11336  			x := v_0.Args[0]
 11337  			b.resetWithControl2(BlockLOONG64BGEU, y, x)
 11338  			return true
 11339  		}
 11340  		// match: (EQ (MOVVconst [0]) yes no)
 11341  		// result: (First yes no)
 11342  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 11343  			v_0 := b.Controls[0]
 11344  			if auxIntToInt64(v_0.AuxInt) != 0 {
 11345  				break
 11346  			}
 11347  			b.Reset(BlockFirst)
 11348  			return true
 11349  		}
 11350  		// match: (EQ (MOVVconst [c]) yes no)
 11351  		// cond: c != 0
 11352  		// result: (First no yes)
 11353  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 11354  			v_0 := b.Controls[0]
 11355  			c := auxIntToInt64(v_0.AuxInt)
 11356  			if !(c != 0) {
 11357  				break
 11358  			}
 11359  			b.Reset(BlockFirst)
 11360  			b.swapSuccessors()
 11361  			return true
 11362  		}
 11363  	case BlockLOONG64GEZ:
 11364  		// match: (GEZ (MOVVconst [c]) yes no)
 11365  		// cond: c >= 0
 11366  		// result: (First yes no)
 11367  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 11368  			v_0 := b.Controls[0]
 11369  			c := auxIntToInt64(v_0.AuxInt)
 11370  			if !(c >= 0) {
 11371  				break
 11372  			}
 11373  			b.Reset(BlockFirst)
 11374  			return true
 11375  		}
 11376  		// match: (GEZ (MOVVconst [c]) yes no)
 11377  		// cond: c < 0
 11378  		// result: (First no yes)
 11379  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 11380  			v_0 := b.Controls[0]
 11381  			c := auxIntToInt64(v_0.AuxInt)
 11382  			if !(c < 0) {
 11383  				break
 11384  			}
 11385  			b.Reset(BlockFirst)
 11386  			b.swapSuccessors()
 11387  			return true
 11388  		}
 11389  	case BlockLOONG64GTZ:
 11390  		// match: (GTZ (MOVVconst [c]) yes no)
 11391  		// cond: c > 0
 11392  		// result: (First yes no)
 11393  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 11394  			v_0 := b.Controls[0]
 11395  			c := auxIntToInt64(v_0.AuxInt)
 11396  			if !(c > 0) {
 11397  				break
 11398  			}
 11399  			b.Reset(BlockFirst)
 11400  			return true
 11401  		}
 11402  		// match: (GTZ (MOVVconst [c]) yes no)
 11403  		// cond: c <= 0
 11404  		// result: (First no yes)
 11405  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 11406  			v_0 := b.Controls[0]
 11407  			c := auxIntToInt64(v_0.AuxInt)
 11408  			if !(c <= 0) {
 11409  				break
 11410  			}
 11411  			b.Reset(BlockFirst)
 11412  			b.swapSuccessors()
 11413  			return true
 11414  		}
 11415  	case BlockIf:
 11416  		// match: (If cond yes no)
 11417  		// result: (NE (MOVBUreg <typ.UInt64> cond) yes no)
 11418  		for {
 11419  			cond := b.Controls[0]
 11420  			v0 := b.NewValue0(cond.Pos, OpLOONG64MOVBUreg, typ.UInt64)
 11421  			v0.AddArg(cond)
 11422  			b.resetWithControl(BlockLOONG64NE, v0)
 11423  			return true
 11424  		}
 11425  	case BlockLOONG64LEZ:
 11426  		// match: (LEZ (MOVVconst [c]) yes no)
 11427  		// cond: c <= 0
 11428  		// result: (First yes no)
 11429  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 11430  			v_0 := b.Controls[0]
 11431  			c := auxIntToInt64(v_0.AuxInt)
 11432  			if !(c <= 0) {
 11433  				break
 11434  			}
 11435  			b.Reset(BlockFirst)
 11436  			return true
 11437  		}
 11438  		// match: (LEZ (MOVVconst [c]) yes no)
 11439  		// cond: c > 0
 11440  		// result: (First no yes)
 11441  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 11442  			v_0 := b.Controls[0]
 11443  			c := auxIntToInt64(v_0.AuxInt)
 11444  			if !(c > 0) {
 11445  				break
 11446  			}
 11447  			b.Reset(BlockFirst)
 11448  			b.swapSuccessors()
 11449  			return true
 11450  		}
 11451  	case BlockLOONG64LTZ:
 11452  		// match: (LTZ (MOVVconst [c]) yes no)
 11453  		// cond: c < 0
 11454  		// result: (First yes no)
 11455  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 11456  			v_0 := b.Controls[0]
 11457  			c := auxIntToInt64(v_0.AuxInt)
 11458  			if !(c < 0) {
 11459  				break
 11460  			}
 11461  			b.Reset(BlockFirst)
 11462  			return true
 11463  		}
 11464  		// match: (LTZ (MOVVconst [c]) yes no)
 11465  		// cond: c >= 0
 11466  		// result: (First no yes)
 11467  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 11468  			v_0 := b.Controls[0]
 11469  			c := auxIntToInt64(v_0.AuxInt)
 11470  			if !(c >= 0) {
 11471  				break
 11472  			}
 11473  			b.Reset(BlockFirst)
 11474  			b.swapSuccessors()
 11475  			return true
 11476  		}
 11477  	case BlockLOONG64NE:
 11478  		// match: (NE (FPFlagTrue cmp) yes no)
 11479  		// result: (FPT cmp yes no)
 11480  		for b.Controls[0].Op == OpLOONG64FPFlagTrue {
 11481  			v_0 := b.Controls[0]
 11482  			cmp := v_0.Args[0]
 11483  			b.resetWithControl(BlockLOONG64FPT, cmp)
 11484  			return true
 11485  		}
 11486  		// match: (NE (FPFlagFalse cmp) yes no)
 11487  		// result: (FPF cmp yes no)
 11488  		for b.Controls[0].Op == OpLOONG64FPFlagFalse {
 11489  			v_0 := b.Controls[0]
 11490  			cmp := v_0.Args[0]
 11491  			b.resetWithControl(BlockLOONG64FPF, cmp)
 11492  			return true
 11493  		}
 11494  		// match: (NE (XORconst [1] cmp:(SGT _ _)) yes no)
 11495  		// result: (EQ cmp yes no)
 11496  		for b.Controls[0].Op == OpLOONG64XORconst {
 11497  			v_0 := b.Controls[0]
 11498  			if auxIntToInt64(v_0.AuxInt) != 1 {
 11499  				break
 11500  			}
 11501  			cmp := v_0.Args[0]
 11502  			if cmp.Op != OpLOONG64SGT {
 11503  				break
 11504  			}
 11505  			b.resetWithControl(BlockLOONG64EQ, cmp)
 11506  			return true
 11507  		}
 11508  		// match: (NE (XORconst [1] cmp:(SGTU _ _)) yes no)
 11509  		// result: (EQ cmp yes no)
 11510  		for b.Controls[0].Op == OpLOONG64XORconst {
 11511  			v_0 := b.Controls[0]
 11512  			if auxIntToInt64(v_0.AuxInt) != 1 {
 11513  				break
 11514  			}
 11515  			cmp := v_0.Args[0]
 11516  			if cmp.Op != OpLOONG64SGTU {
 11517  				break
 11518  			}
 11519  			b.resetWithControl(BlockLOONG64EQ, cmp)
 11520  			return true
 11521  		}
 11522  		// match: (NE (XORconst [1] cmp:(SGTconst _)) yes no)
 11523  		// result: (EQ cmp yes no)
 11524  		for b.Controls[0].Op == OpLOONG64XORconst {
 11525  			v_0 := b.Controls[0]
 11526  			if auxIntToInt64(v_0.AuxInt) != 1 {
 11527  				break
 11528  			}
 11529  			cmp := v_0.Args[0]
 11530  			if cmp.Op != OpLOONG64SGTconst {
 11531  				break
 11532  			}
 11533  			b.resetWithControl(BlockLOONG64EQ, cmp)
 11534  			return true
 11535  		}
 11536  		// match: (NE (XORconst [1] cmp:(SGTUconst _)) yes no)
 11537  		// result: (EQ cmp yes no)
 11538  		for b.Controls[0].Op == OpLOONG64XORconst {
 11539  			v_0 := b.Controls[0]
 11540  			if auxIntToInt64(v_0.AuxInt) != 1 {
 11541  				break
 11542  			}
 11543  			cmp := v_0.Args[0]
 11544  			if cmp.Op != OpLOONG64SGTUconst {
 11545  				break
 11546  			}
 11547  			b.resetWithControl(BlockLOONG64EQ, cmp)
 11548  			return true
 11549  		}
 11550  		// match: (NE (SGTUconst [1] x) yes no)
 11551  		// result: (EQ x yes no)
 11552  		for b.Controls[0].Op == OpLOONG64SGTUconst {
 11553  			v_0 := b.Controls[0]
 11554  			if auxIntToInt64(v_0.AuxInt) != 1 {
 11555  				break
 11556  			}
 11557  			x := v_0.Args[0]
 11558  			b.resetWithControl(BlockLOONG64EQ, x)
 11559  			return true
 11560  		}
 11561  		// match: (NE (SGTU x (MOVVconst [0])) yes no)
 11562  		// result: (NE x yes no)
 11563  		for b.Controls[0].Op == OpLOONG64SGTU {
 11564  			v_0 := b.Controls[0]
 11565  			_ = v_0.Args[1]
 11566  			x := v_0.Args[0]
 11567  			v_0_1 := v_0.Args[1]
 11568  			if v_0_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
 11569  				break
 11570  			}
 11571  			b.resetWithControl(BlockLOONG64NE, x)
 11572  			return true
 11573  		}
 11574  		// match: (NE (SGTconst [0] x) yes no)
 11575  		// result: (LTZ x yes no)
 11576  		for b.Controls[0].Op == OpLOONG64SGTconst {
 11577  			v_0 := b.Controls[0]
 11578  			if auxIntToInt64(v_0.AuxInt) != 0 {
 11579  				break
 11580  			}
 11581  			x := v_0.Args[0]
 11582  			b.resetWithControl(BlockLOONG64LTZ, x)
 11583  			return true
 11584  		}
 11585  		// match: (NE (SGT x (MOVVconst [0])) yes no)
 11586  		// result: (GTZ x yes no)
 11587  		for b.Controls[0].Op == OpLOONG64SGT {
 11588  			v_0 := b.Controls[0]
 11589  			_ = v_0.Args[1]
 11590  			x := v_0.Args[0]
 11591  			v_0_1 := v_0.Args[1]
 11592  			if v_0_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
 11593  				break
 11594  			}
 11595  			b.resetWithControl(BlockLOONG64GTZ, x)
 11596  			return true
 11597  		}
 11598  		// match: (NE (SGTU (MOVVconst [c]) y) yes no)
 11599  		// cond: c >= -2048 && c <= 2047
 11600  		// result: (NE (SGTUconst [c] y) yes no)
 11601  		for b.Controls[0].Op == OpLOONG64SGTU {
 11602  			v_0 := b.Controls[0]
 11603  			y := v_0.Args[1]
 11604  			v_0_0 := v_0.Args[0]
 11605  			if v_0_0.Op != OpLOONG64MOVVconst {
 11606  				break
 11607  			}
 11608  			c := auxIntToInt64(v_0_0.AuxInt)
 11609  			if !(c >= -2048 && c <= 2047) {
 11610  				break
 11611  			}
 11612  			v0 := b.NewValue0(v_0.Pos, OpLOONG64SGTUconst, typ.Bool)
 11613  			v0.AuxInt = int64ToAuxInt(c)
 11614  			v0.AddArg(y)
 11615  			b.resetWithControl(BlockLOONG64NE, v0)
 11616  			return true
 11617  		}
 11618  		// match: (NE (SUBV x y) yes no)
 11619  		// result: (BNE x y yes no)
 11620  		for b.Controls[0].Op == OpLOONG64SUBV {
 11621  			v_0 := b.Controls[0]
 11622  			y := v_0.Args[1]
 11623  			x := v_0.Args[0]
 11624  			b.resetWithControl2(BlockLOONG64BNE, x, y)
 11625  			return true
 11626  		}
 11627  		// match: (NE (SGT x y) yes no)
 11628  		// result: (BLT y x yes no)
 11629  		for b.Controls[0].Op == OpLOONG64SGT {
 11630  			v_0 := b.Controls[0]
 11631  			y := v_0.Args[1]
 11632  			x := v_0.Args[0]
 11633  			b.resetWithControl2(BlockLOONG64BLT, y, x)
 11634  			return true
 11635  		}
 11636  		// match: (NE (SGTU x y) yes no)
 11637  		// result: (BLTU y x yes no)
 11638  		for b.Controls[0].Op == OpLOONG64SGTU {
 11639  			v_0 := b.Controls[0]
 11640  			y := v_0.Args[1]
 11641  			x := v_0.Args[0]
 11642  			b.resetWithControl2(BlockLOONG64BLTU, y, x)
 11643  			return true
 11644  		}
 11645  		// match: (NE (MOVVconst [0]) yes no)
 11646  		// result: (First no yes)
 11647  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 11648  			v_0 := b.Controls[0]
 11649  			if auxIntToInt64(v_0.AuxInt) != 0 {
 11650  				break
 11651  			}
 11652  			b.Reset(BlockFirst)
 11653  			b.swapSuccessors()
 11654  			return true
 11655  		}
 11656  		// match: (NE (MOVVconst [c]) yes no)
 11657  		// cond: c != 0
 11658  		// result: (First yes no)
 11659  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 11660  			v_0 := b.Controls[0]
 11661  			c := auxIntToInt64(v_0.AuxInt)
 11662  			if !(c != 0) {
 11663  				break
 11664  			}
 11665  			b.Reset(BlockFirst)
 11666  			return true
 11667  		}
 11668  	}
 11669  	return false
 11670  }
 11671  

View as plain text