Source file src/cmd/compile/internal/ssa/rewriteRISCV64.go

     1  // Code generated from _gen/RISCV64.rules using 'go generate'; DO NOT EDIT.
     2  
     3  package ssa
     4  
     5  import "internal/buildcfg"
     6  import "math"
     7  import "cmd/compile/internal/types"
     8  
     9  func rewriteValueRISCV64(v *Value) bool {
    10  	switch v.Op {
    11  	case OpAbs:
    12  		v.Op = OpRISCV64FABSD
    13  		return true
    14  	case OpAdd16:
    15  		v.Op = OpRISCV64ADD
    16  		return true
    17  	case OpAdd32:
    18  		v.Op = OpRISCV64ADD
    19  		return true
    20  	case OpAdd32F:
    21  		v.Op = OpRISCV64FADDS
    22  		return true
    23  	case OpAdd64:
    24  		v.Op = OpRISCV64ADD
    25  		return true
    26  	case OpAdd64F:
    27  		v.Op = OpRISCV64FADDD
    28  		return true
    29  	case OpAdd8:
    30  		v.Op = OpRISCV64ADD
    31  		return true
    32  	case OpAddPtr:
    33  		v.Op = OpRISCV64ADD
    34  		return true
    35  	case OpAddr:
    36  		return rewriteValueRISCV64_OpAddr(v)
    37  	case OpAnd16:
    38  		v.Op = OpRISCV64AND
    39  		return true
    40  	case OpAnd32:
    41  		v.Op = OpRISCV64AND
    42  		return true
    43  	case OpAnd64:
    44  		v.Op = OpRISCV64AND
    45  		return true
    46  	case OpAnd8:
    47  		v.Op = OpRISCV64AND
    48  		return true
    49  	case OpAndB:
    50  		v.Op = OpRISCV64AND
    51  		return true
    52  	case OpAtomicAdd32:
    53  		v.Op = OpRISCV64LoweredAtomicAdd32
    54  		return true
    55  	case OpAtomicAdd64:
    56  		v.Op = OpRISCV64LoweredAtomicAdd64
    57  		return true
    58  	case OpAtomicAnd32:
    59  		v.Op = OpRISCV64LoweredAtomicAnd32
    60  		return true
    61  	case OpAtomicAnd8:
    62  		return rewriteValueRISCV64_OpAtomicAnd8(v)
    63  	case OpAtomicCompareAndSwap32:
    64  		return rewriteValueRISCV64_OpAtomicCompareAndSwap32(v)
    65  	case OpAtomicCompareAndSwap64:
    66  		v.Op = OpRISCV64LoweredAtomicCas64
    67  		return true
    68  	case OpAtomicExchange32:
    69  		v.Op = OpRISCV64LoweredAtomicExchange32
    70  		return true
    71  	case OpAtomicExchange64:
    72  		v.Op = OpRISCV64LoweredAtomicExchange64
    73  		return true
    74  	case OpAtomicLoad32:
    75  		v.Op = OpRISCV64LoweredAtomicLoad32
    76  		return true
    77  	case OpAtomicLoad64:
    78  		v.Op = OpRISCV64LoweredAtomicLoad64
    79  		return true
    80  	case OpAtomicLoad8:
    81  		v.Op = OpRISCV64LoweredAtomicLoad8
    82  		return true
    83  	case OpAtomicLoadPtr:
    84  		v.Op = OpRISCV64LoweredAtomicLoad64
    85  		return true
    86  	case OpAtomicOr32:
    87  		v.Op = OpRISCV64LoweredAtomicOr32
    88  		return true
    89  	case OpAtomicOr8:
    90  		return rewriteValueRISCV64_OpAtomicOr8(v)
    91  	case OpAtomicStore32:
    92  		v.Op = OpRISCV64LoweredAtomicStore32
    93  		return true
    94  	case OpAtomicStore64:
    95  		v.Op = OpRISCV64LoweredAtomicStore64
    96  		return true
    97  	case OpAtomicStore8:
    98  		v.Op = OpRISCV64LoweredAtomicStore8
    99  		return true
   100  	case OpAtomicStorePtrNoWB:
   101  		v.Op = OpRISCV64LoweredAtomicStore64
   102  		return true
   103  	case OpAvg64u:
   104  		return rewriteValueRISCV64_OpAvg64u(v)
   105  	case OpBitLen16:
   106  		return rewriteValueRISCV64_OpBitLen16(v)
   107  	case OpBitLen32:
   108  		return rewriteValueRISCV64_OpBitLen32(v)
   109  	case OpBitLen64:
   110  		return rewriteValueRISCV64_OpBitLen64(v)
   111  	case OpBitLen8:
   112  		return rewriteValueRISCV64_OpBitLen8(v)
   113  	case OpClosureCall:
   114  		v.Op = OpRISCV64CALLclosure
   115  		return true
   116  	case OpCom16:
   117  		v.Op = OpRISCV64NOT
   118  		return true
   119  	case OpCom32:
   120  		v.Op = OpRISCV64NOT
   121  		return true
   122  	case OpCom64:
   123  		v.Op = OpRISCV64NOT
   124  		return true
   125  	case OpCom8:
   126  		v.Op = OpRISCV64NOT
   127  		return true
   128  	case OpConst16:
   129  		return rewriteValueRISCV64_OpConst16(v)
   130  	case OpConst32:
   131  		return rewriteValueRISCV64_OpConst32(v)
   132  	case OpConst32F:
   133  		return rewriteValueRISCV64_OpConst32F(v)
   134  	case OpConst64:
   135  		return rewriteValueRISCV64_OpConst64(v)
   136  	case OpConst64F:
   137  		return rewriteValueRISCV64_OpConst64F(v)
   138  	case OpConst8:
   139  		return rewriteValueRISCV64_OpConst8(v)
   140  	case OpConstBool:
   141  		return rewriteValueRISCV64_OpConstBool(v)
   142  	case OpConstNil:
   143  		return rewriteValueRISCV64_OpConstNil(v)
   144  	case OpCopysign:
   145  		v.Op = OpRISCV64FSGNJD
   146  		return true
   147  	case OpCtz16:
   148  		return rewriteValueRISCV64_OpCtz16(v)
   149  	case OpCtz16NonZero:
   150  		v.Op = OpCtz64
   151  		return true
   152  	case OpCtz32:
   153  		v.Op = OpRISCV64CTZW
   154  		return true
   155  	case OpCtz32NonZero:
   156  		v.Op = OpCtz64
   157  		return true
   158  	case OpCtz64:
   159  		v.Op = OpRISCV64CTZ
   160  		return true
   161  	case OpCtz64NonZero:
   162  		v.Op = OpCtz64
   163  		return true
   164  	case OpCtz8:
   165  		return rewriteValueRISCV64_OpCtz8(v)
   166  	case OpCtz8NonZero:
   167  		v.Op = OpCtz64
   168  		return true
   169  	case OpCvt32Fto32:
   170  		v.Op = OpRISCV64FCVTWS
   171  		return true
   172  	case OpCvt32Fto64:
   173  		v.Op = OpRISCV64FCVTLS
   174  		return true
   175  	case OpCvt32Fto64F:
   176  		v.Op = OpRISCV64FCVTDS
   177  		return true
   178  	case OpCvt32to32F:
   179  		v.Op = OpRISCV64FCVTSW
   180  		return true
   181  	case OpCvt32to64F:
   182  		v.Op = OpRISCV64FCVTDW
   183  		return true
   184  	case OpCvt64Fto32:
   185  		v.Op = OpRISCV64FCVTWD
   186  		return true
   187  	case OpCvt64Fto32F:
   188  		v.Op = OpRISCV64FCVTSD
   189  		return true
   190  	case OpCvt64Fto64:
   191  		v.Op = OpRISCV64FCVTLD
   192  		return true
   193  	case OpCvt64to32F:
   194  		v.Op = OpRISCV64FCVTSL
   195  		return true
   196  	case OpCvt64to64F:
   197  		v.Op = OpRISCV64FCVTDL
   198  		return true
   199  	case OpCvtBoolToUint8:
   200  		v.Op = OpCopy
   201  		return true
   202  	case OpDiv16:
   203  		return rewriteValueRISCV64_OpDiv16(v)
   204  	case OpDiv16u:
   205  		return rewriteValueRISCV64_OpDiv16u(v)
   206  	case OpDiv32:
   207  		return rewriteValueRISCV64_OpDiv32(v)
   208  	case OpDiv32F:
   209  		v.Op = OpRISCV64FDIVS
   210  		return true
   211  	case OpDiv32u:
   212  		v.Op = OpRISCV64DIVUW
   213  		return true
   214  	case OpDiv64:
   215  		return rewriteValueRISCV64_OpDiv64(v)
   216  	case OpDiv64F:
   217  		v.Op = OpRISCV64FDIVD
   218  		return true
   219  	case OpDiv64u:
   220  		v.Op = OpRISCV64DIVU
   221  		return true
   222  	case OpDiv8:
   223  		return rewriteValueRISCV64_OpDiv8(v)
   224  	case OpDiv8u:
   225  		return rewriteValueRISCV64_OpDiv8u(v)
   226  	case OpEq16:
   227  		return rewriteValueRISCV64_OpEq16(v)
   228  	case OpEq32:
   229  		return rewriteValueRISCV64_OpEq32(v)
   230  	case OpEq32F:
   231  		v.Op = OpRISCV64FEQS
   232  		return true
   233  	case OpEq64:
   234  		return rewriteValueRISCV64_OpEq64(v)
   235  	case OpEq64F:
   236  		v.Op = OpRISCV64FEQD
   237  		return true
   238  	case OpEq8:
   239  		return rewriteValueRISCV64_OpEq8(v)
   240  	case OpEqB:
   241  		return rewriteValueRISCV64_OpEqB(v)
   242  	case OpEqPtr:
   243  		return rewriteValueRISCV64_OpEqPtr(v)
   244  	case OpFMA:
   245  		v.Op = OpRISCV64FMADDD
   246  		return true
   247  	case OpGetCallerPC:
   248  		v.Op = OpRISCV64LoweredGetCallerPC
   249  		return true
   250  	case OpGetCallerSP:
   251  		v.Op = OpRISCV64LoweredGetCallerSP
   252  		return true
   253  	case OpGetClosurePtr:
   254  		v.Op = OpRISCV64LoweredGetClosurePtr
   255  		return true
   256  	case OpHmul32:
   257  		return rewriteValueRISCV64_OpHmul32(v)
   258  	case OpHmul32u:
   259  		return rewriteValueRISCV64_OpHmul32u(v)
   260  	case OpHmul64:
   261  		v.Op = OpRISCV64MULH
   262  		return true
   263  	case OpHmul64u:
   264  		v.Op = OpRISCV64MULHU
   265  		return true
   266  	case OpInterCall:
   267  		v.Op = OpRISCV64CALLinter
   268  		return true
   269  	case OpIsInBounds:
   270  		v.Op = OpLess64U
   271  		return true
   272  	case OpIsNonNil:
   273  		v.Op = OpRISCV64SNEZ
   274  		return true
   275  	case OpIsSliceInBounds:
   276  		v.Op = OpLeq64U
   277  		return true
   278  	case OpLeq16:
   279  		return rewriteValueRISCV64_OpLeq16(v)
   280  	case OpLeq16U:
   281  		return rewriteValueRISCV64_OpLeq16U(v)
   282  	case OpLeq32:
   283  		return rewriteValueRISCV64_OpLeq32(v)
   284  	case OpLeq32F:
   285  		v.Op = OpRISCV64FLES
   286  		return true
   287  	case OpLeq32U:
   288  		return rewriteValueRISCV64_OpLeq32U(v)
   289  	case OpLeq64:
   290  		return rewriteValueRISCV64_OpLeq64(v)
   291  	case OpLeq64F:
   292  		v.Op = OpRISCV64FLED
   293  		return true
   294  	case OpLeq64U:
   295  		return rewriteValueRISCV64_OpLeq64U(v)
   296  	case OpLeq8:
   297  		return rewriteValueRISCV64_OpLeq8(v)
   298  	case OpLeq8U:
   299  		return rewriteValueRISCV64_OpLeq8U(v)
   300  	case OpLess16:
   301  		return rewriteValueRISCV64_OpLess16(v)
   302  	case OpLess16U:
   303  		return rewriteValueRISCV64_OpLess16U(v)
   304  	case OpLess32:
   305  		return rewriteValueRISCV64_OpLess32(v)
   306  	case OpLess32F:
   307  		v.Op = OpRISCV64FLTS
   308  		return true
   309  	case OpLess32U:
   310  		return rewriteValueRISCV64_OpLess32U(v)
   311  	case OpLess64:
   312  		v.Op = OpRISCV64SLT
   313  		return true
   314  	case OpLess64F:
   315  		v.Op = OpRISCV64FLTD
   316  		return true
   317  	case OpLess64U:
   318  		v.Op = OpRISCV64SLTU
   319  		return true
   320  	case OpLess8:
   321  		return rewriteValueRISCV64_OpLess8(v)
   322  	case OpLess8U:
   323  		return rewriteValueRISCV64_OpLess8U(v)
   324  	case OpLoad:
   325  		return rewriteValueRISCV64_OpLoad(v)
   326  	case OpLocalAddr:
   327  		return rewriteValueRISCV64_OpLocalAddr(v)
   328  	case OpLsh16x16:
   329  		return rewriteValueRISCV64_OpLsh16x16(v)
   330  	case OpLsh16x32:
   331  		return rewriteValueRISCV64_OpLsh16x32(v)
   332  	case OpLsh16x64:
   333  		return rewriteValueRISCV64_OpLsh16x64(v)
   334  	case OpLsh16x8:
   335  		return rewriteValueRISCV64_OpLsh16x8(v)
   336  	case OpLsh32x16:
   337  		return rewriteValueRISCV64_OpLsh32x16(v)
   338  	case OpLsh32x32:
   339  		return rewriteValueRISCV64_OpLsh32x32(v)
   340  	case OpLsh32x64:
   341  		return rewriteValueRISCV64_OpLsh32x64(v)
   342  	case OpLsh32x8:
   343  		return rewriteValueRISCV64_OpLsh32x8(v)
   344  	case OpLsh64x16:
   345  		return rewriteValueRISCV64_OpLsh64x16(v)
   346  	case OpLsh64x32:
   347  		return rewriteValueRISCV64_OpLsh64x32(v)
   348  	case OpLsh64x64:
   349  		return rewriteValueRISCV64_OpLsh64x64(v)
   350  	case OpLsh64x8:
   351  		return rewriteValueRISCV64_OpLsh64x8(v)
   352  	case OpLsh8x16:
   353  		return rewriteValueRISCV64_OpLsh8x16(v)
   354  	case OpLsh8x32:
   355  		return rewriteValueRISCV64_OpLsh8x32(v)
   356  	case OpLsh8x64:
   357  		return rewriteValueRISCV64_OpLsh8x64(v)
   358  	case OpLsh8x8:
   359  		return rewriteValueRISCV64_OpLsh8x8(v)
   360  	case OpMax32F:
   361  		v.Op = OpRISCV64LoweredFMAXS
   362  		return true
   363  	case OpMax64:
   364  		return rewriteValueRISCV64_OpMax64(v)
   365  	case OpMax64F:
   366  		v.Op = OpRISCV64LoweredFMAXD
   367  		return true
   368  	case OpMax64u:
   369  		return rewriteValueRISCV64_OpMax64u(v)
   370  	case OpMin32F:
   371  		v.Op = OpRISCV64LoweredFMINS
   372  		return true
   373  	case OpMin64:
   374  		return rewriteValueRISCV64_OpMin64(v)
   375  	case OpMin64F:
   376  		v.Op = OpRISCV64LoweredFMIND
   377  		return true
   378  	case OpMin64u:
   379  		return rewriteValueRISCV64_OpMin64u(v)
   380  	case OpMod16:
   381  		return rewriteValueRISCV64_OpMod16(v)
   382  	case OpMod16u:
   383  		return rewriteValueRISCV64_OpMod16u(v)
   384  	case OpMod32:
   385  		return rewriteValueRISCV64_OpMod32(v)
   386  	case OpMod32u:
   387  		v.Op = OpRISCV64REMUW
   388  		return true
   389  	case OpMod64:
   390  		return rewriteValueRISCV64_OpMod64(v)
   391  	case OpMod64u:
   392  		v.Op = OpRISCV64REMU
   393  		return true
   394  	case OpMod8:
   395  		return rewriteValueRISCV64_OpMod8(v)
   396  	case OpMod8u:
   397  		return rewriteValueRISCV64_OpMod8u(v)
   398  	case OpMove:
   399  		return rewriteValueRISCV64_OpMove(v)
   400  	case OpMul16:
   401  		return rewriteValueRISCV64_OpMul16(v)
   402  	case OpMul32:
   403  		v.Op = OpRISCV64MULW
   404  		return true
   405  	case OpMul32F:
   406  		v.Op = OpRISCV64FMULS
   407  		return true
   408  	case OpMul64:
   409  		v.Op = OpRISCV64MUL
   410  		return true
   411  	case OpMul64F:
   412  		v.Op = OpRISCV64FMULD
   413  		return true
   414  	case OpMul64uhilo:
   415  		v.Op = OpRISCV64LoweredMuluhilo
   416  		return true
   417  	case OpMul64uover:
   418  		v.Op = OpRISCV64LoweredMuluover
   419  		return true
   420  	case OpMul8:
   421  		return rewriteValueRISCV64_OpMul8(v)
   422  	case OpNeg16:
   423  		v.Op = OpRISCV64NEG
   424  		return true
   425  	case OpNeg32:
   426  		v.Op = OpRISCV64NEG
   427  		return true
   428  	case OpNeg32F:
   429  		v.Op = OpRISCV64FNEGS
   430  		return true
   431  	case OpNeg64:
   432  		v.Op = OpRISCV64NEG
   433  		return true
   434  	case OpNeg64F:
   435  		v.Op = OpRISCV64FNEGD
   436  		return true
   437  	case OpNeg8:
   438  		v.Op = OpRISCV64NEG
   439  		return true
   440  	case OpNeq16:
   441  		return rewriteValueRISCV64_OpNeq16(v)
   442  	case OpNeq32:
   443  		return rewriteValueRISCV64_OpNeq32(v)
   444  	case OpNeq32F:
   445  		v.Op = OpRISCV64FNES
   446  		return true
   447  	case OpNeq64:
   448  		return rewriteValueRISCV64_OpNeq64(v)
   449  	case OpNeq64F:
   450  		v.Op = OpRISCV64FNED
   451  		return true
   452  	case OpNeq8:
   453  		return rewriteValueRISCV64_OpNeq8(v)
   454  	case OpNeqB:
   455  		return rewriteValueRISCV64_OpNeqB(v)
   456  	case OpNeqPtr:
   457  		return rewriteValueRISCV64_OpNeqPtr(v)
   458  	case OpNilCheck:
   459  		v.Op = OpRISCV64LoweredNilCheck
   460  		return true
   461  	case OpNot:
   462  		v.Op = OpRISCV64SEQZ
   463  		return true
   464  	case OpOffPtr:
   465  		return rewriteValueRISCV64_OpOffPtr(v)
   466  	case OpOr16:
   467  		v.Op = OpRISCV64OR
   468  		return true
   469  	case OpOr32:
   470  		v.Op = OpRISCV64OR
   471  		return true
   472  	case OpOr64:
   473  		v.Op = OpRISCV64OR
   474  		return true
   475  	case OpOr8:
   476  		v.Op = OpRISCV64OR
   477  		return true
   478  	case OpOrB:
   479  		v.Op = OpRISCV64OR
   480  		return true
   481  	case OpPanicBounds:
   482  		return rewriteValueRISCV64_OpPanicBounds(v)
   483  	case OpPubBarrier:
   484  		v.Op = OpRISCV64LoweredPubBarrier
   485  		return true
   486  	case OpRISCV64ADD:
   487  		return rewriteValueRISCV64_OpRISCV64ADD(v)
   488  	case OpRISCV64ADDI:
   489  		return rewriteValueRISCV64_OpRISCV64ADDI(v)
   490  	case OpRISCV64AND:
   491  		return rewriteValueRISCV64_OpRISCV64AND(v)
   492  	case OpRISCV64ANDI:
   493  		return rewriteValueRISCV64_OpRISCV64ANDI(v)
   494  	case OpRISCV64FADDD:
   495  		return rewriteValueRISCV64_OpRISCV64FADDD(v)
   496  	case OpRISCV64FADDS:
   497  		return rewriteValueRISCV64_OpRISCV64FADDS(v)
   498  	case OpRISCV64FMADDD:
   499  		return rewriteValueRISCV64_OpRISCV64FMADDD(v)
   500  	case OpRISCV64FMADDS:
   501  		return rewriteValueRISCV64_OpRISCV64FMADDS(v)
   502  	case OpRISCV64FMSUBD:
   503  		return rewriteValueRISCV64_OpRISCV64FMSUBD(v)
   504  	case OpRISCV64FMSUBS:
   505  		return rewriteValueRISCV64_OpRISCV64FMSUBS(v)
   506  	case OpRISCV64FNMADDD:
   507  		return rewriteValueRISCV64_OpRISCV64FNMADDD(v)
   508  	case OpRISCV64FNMADDS:
   509  		return rewriteValueRISCV64_OpRISCV64FNMADDS(v)
   510  	case OpRISCV64FNMSUBD:
   511  		return rewriteValueRISCV64_OpRISCV64FNMSUBD(v)
   512  	case OpRISCV64FNMSUBS:
   513  		return rewriteValueRISCV64_OpRISCV64FNMSUBS(v)
   514  	case OpRISCV64FSUBD:
   515  		return rewriteValueRISCV64_OpRISCV64FSUBD(v)
   516  	case OpRISCV64FSUBS:
   517  		return rewriteValueRISCV64_OpRISCV64FSUBS(v)
   518  	case OpRISCV64MOVBUload:
   519  		return rewriteValueRISCV64_OpRISCV64MOVBUload(v)
   520  	case OpRISCV64MOVBUreg:
   521  		return rewriteValueRISCV64_OpRISCV64MOVBUreg(v)
   522  	case OpRISCV64MOVBload:
   523  		return rewriteValueRISCV64_OpRISCV64MOVBload(v)
   524  	case OpRISCV64MOVBreg:
   525  		return rewriteValueRISCV64_OpRISCV64MOVBreg(v)
   526  	case OpRISCV64MOVBstore:
   527  		return rewriteValueRISCV64_OpRISCV64MOVBstore(v)
   528  	case OpRISCV64MOVBstorezero:
   529  		return rewriteValueRISCV64_OpRISCV64MOVBstorezero(v)
   530  	case OpRISCV64MOVDload:
   531  		return rewriteValueRISCV64_OpRISCV64MOVDload(v)
   532  	case OpRISCV64MOVDnop:
   533  		return rewriteValueRISCV64_OpRISCV64MOVDnop(v)
   534  	case OpRISCV64MOVDreg:
   535  		return rewriteValueRISCV64_OpRISCV64MOVDreg(v)
   536  	case OpRISCV64MOVDstore:
   537  		return rewriteValueRISCV64_OpRISCV64MOVDstore(v)
   538  	case OpRISCV64MOVDstorezero:
   539  		return rewriteValueRISCV64_OpRISCV64MOVDstorezero(v)
   540  	case OpRISCV64MOVHUload:
   541  		return rewriteValueRISCV64_OpRISCV64MOVHUload(v)
   542  	case OpRISCV64MOVHUreg:
   543  		return rewriteValueRISCV64_OpRISCV64MOVHUreg(v)
   544  	case OpRISCV64MOVHload:
   545  		return rewriteValueRISCV64_OpRISCV64MOVHload(v)
   546  	case OpRISCV64MOVHreg:
   547  		return rewriteValueRISCV64_OpRISCV64MOVHreg(v)
   548  	case OpRISCV64MOVHstore:
   549  		return rewriteValueRISCV64_OpRISCV64MOVHstore(v)
   550  	case OpRISCV64MOVHstorezero:
   551  		return rewriteValueRISCV64_OpRISCV64MOVHstorezero(v)
   552  	case OpRISCV64MOVWUload:
   553  		return rewriteValueRISCV64_OpRISCV64MOVWUload(v)
   554  	case OpRISCV64MOVWUreg:
   555  		return rewriteValueRISCV64_OpRISCV64MOVWUreg(v)
   556  	case OpRISCV64MOVWload:
   557  		return rewriteValueRISCV64_OpRISCV64MOVWload(v)
   558  	case OpRISCV64MOVWreg:
   559  		return rewriteValueRISCV64_OpRISCV64MOVWreg(v)
   560  	case OpRISCV64MOVWstore:
   561  		return rewriteValueRISCV64_OpRISCV64MOVWstore(v)
   562  	case OpRISCV64MOVWstorezero:
   563  		return rewriteValueRISCV64_OpRISCV64MOVWstorezero(v)
   564  	case OpRISCV64NEG:
   565  		return rewriteValueRISCV64_OpRISCV64NEG(v)
   566  	case OpRISCV64NEGW:
   567  		return rewriteValueRISCV64_OpRISCV64NEGW(v)
   568  	case OpRISCV64OR:
   569  		return rewriteValueRISCV64_OpRISCV64OR(v)
   570  	case OpRISCV64ORI:
   571  		return rewriteValueRISCV64_OpRISCV64ORI(v)
   572  	case OpRISCV64ROL:
   573  		return rewriteValueRISCV64_OpRISCV64ROL(v)
   574  	case OpRISCV64ROLW:
   575  		return rewriteValueRISCV64_OpRISCV64ROLW(v)
   576  	case OpRISCV64ROR:
   577  		return rewriteValueRISCV64_OpRISCV64ROR(v)
   578  	case OpRISCV64RORW:
   579  		return rewriteValueRISCV64_OpRISCV64RORW(v)
   580  	case OpRISCV64SEQZ:
   581  		return rewriteValueRISCV64_OpRISCV64SEQZ(v)
   582  	case OpRISCV64SLL:
   583  		return rewriteValueRISCV64_OpRISCV64SLL(v)
   584  	case OpRISCV64SLLI:
   585  		return rewriteValueRISCV64_OpRISCV64SLLI(v)
   586  	case OpRISCV64SLLW:
   587  		return rewriteValueRISCV64_OpRISCV64SLLW(v)
   588  	case OpRISCV64SLT:
   589  		return rewriteValueRISCV64_OpRISCV64SLT(v)
   590  	case OpRISCV64SLTI:
   591  		return rewriteValueRISCV64_OpRISCV64SLTI(v)
   592  	case OpRISCV64SLTIU:
   593  		return rewriteValueRISCV64_OpRISCV64SLTIU(v)
   594  	case OpRISCV64SLTU:
   595  		return rewriteValueRISCV64_OpRISCV64SLTU(v)
   596  	case OpRISCV64SNEZ:
   597  		return rewriteValueRISCV64_OpRISCV64SNEZ(v)
   598  	case OpRISCV64SRA:
   599  		return rewriteValueRISCV64_OpRISCV64SRA(v)
   600  	case OpRISCV64SRAI:
   601  		return rewriteValueRISCV64_OpRISCV64SRAI(v)
   602  	case OpRISCV64SRAW:
   603  		return rewriteValueRISCV64_OpRISCV64SRAW(v)
   604  	case OpRISCV64SRL:
   605  		return rewriteValueRISCV64_OpRISCV64SRL(v)
   606  	case OpRISCV64SRLI:
   607  		return rewriteValueRISCV64_OpRISCV64SRLI(v)
   608  	case OpRISCV64SRLW:
   609  		return rewriteValueRISCV64_OpRISCV64SRLW(v)
   610  	case OpRISCV64SUB:
   611  		return rewriteValueRISCV64_OpRISCV64SUB(v)
   612  	case OpRISCV64SUBW:
   613  		return rewriteValueRISCV64_OpRISCV64SUBW(v)
   614  	case OpRISCV64XOR:
   615  		return rewriteValueRISCV64_OpRISCV64XOR(v)
   616  	case OpRotateLeft16:
   617  		return rewriteValueRISCV64_OpRotateLeft16(v)
   618  	case OpRotateLeft32:
   619  		v.Op = OpRISCV64ROLW
   620  		return true
   621  	case OpRotateLeft64:
   622  		v.Op = OpRISCV64ROL
   623  		return true
   624  	case OpRotateLeft8:
   625  		return rewriteValueRISCV64_OpRotateLeft8(v)
   626  	case OpRound32F:
   627  		v.Op = OpRISCV64LoweredRound32F
   628  		return true
   629  	case OpRound64F:
   630  		v.Op = OpRISCV64LoweredRound64F
   631  		return true
   632  	case OpRsh16Ux16:
   633  		return rewriteValueRISCV64_OpRsh16Ux16(v)
   634  	case OpRsh16Ux32:
   635  		return rewriteValueRISCV64_OpRsh16Ux32(v)
   636  	case OpRsh16Ux64:
   637  		return rewriteValueRISCV64_OpRsh16Ux64(v)
   638  	case OpRsh16Ux8:
   639  		return rewriteValueRISCV64_OpRsh16Ux8(v)
   640  	case OpRsh16x16:
   641  		return rewriteValueRISCV64_OpRsh16x16(v)
   642  	case OpRsh16x32:
   643  		return rewriteValueRISCV64_OpRsh16x32(v)
   644  	case OpRsh16x64:
   645  		return rewriteValueRISCV64_OpRsh16x64(v)
   646  	case OpRsh16x8:
   647  		return rewriteValueRISCV64_OpRsh16x8(v)
   648  	case OpRsh32Ux16:
   649  		return rewriteValueRISCV64_OpRsh32Ux16(v)
   650  	case OpRsh32Ux32:
   651  		return rewriteValueRISCV64_OpRsh32Ux32(v)
   652  	case OpRsh32Ux64:
   653  		return rewriteValueRISCV64_OpRsh32Ux64(v)
   654  	case OpRsh32Ux8:
   655  		return rewriteValueRISCV64_OpRsh32Ux8(v)
   656  	case OpRsh32x16:
   657  		return rewriteValueRISCV64_OpRsh32x16(v)
   658  	case OpRsh32x32:
   659  		return rewriteValueRISCV64_OpRsh32x32(v)
   660  	case OpRsh32x64:
   661  		return rewriteValueRISCV64_OpRsh32x64(v)
   662  	case OpRsh32x8:
   663  		return rewriteValueRISCV64_OpRsh32x8(v)
   664  	case OpRsh64Ux16:
   665  		return rewriteValueRISCV64_OpRsh64Ux16(v)
   666  	case OpRsh64Ux32:
   667  		return rewriteValueRISCV64_OpRsh64Ux32(v)
   668  	case OpRsh64Ux64:
   669  		return rewriteValueRISCV64_OpRsh64Ux64(v)
   670  	case OpRsh64Ux8:
   671  		return rewriteValueRISCV64_OpRsh64Ux8(v)
   672  	case OpRsh64x16:
   673  		return rewriteValueRISCV64_OpRsh64x16(v)
   674  	case OpRsh64x32:
   675  		return rewriteValueRISCV64_OpRsh64x32(v)
   676  	case OpRsh64x64:
   677  		return rewriteValueRISCV64_OpRsh64x64(v)
   678  	case OpRsh64x8:
   679  		return rewriteValueRISCV64_OpRsh64x8(v)
   680  	case OpRsh8Ux16:
   681  		return rewriteValueRISCV64_OpRsh8Ux16(v)
   682  	case OpRsh8Ux32:
   683  		return rewriteValueRISCV64_OpRsh8Ux32(v)
   684  	case OpRsh8Ux64:
   685  		return rewriteValueRISCV64_OpRsh8Ux64(v)
   686  	case OpRsh8Ux8:
   687  		return rewriteValueRISCV64_OpRsh8Ux8(v)
   688  	case OpRsh8x16:
   689  		return rewriteValueRISCV64_OpRsh8x16(v)
   690  	case OpRsh8x32:
   691  		return rewriteValueRISCV64_OpRsh8x32(v)
   692  	case OpRsh8x64:
   693  		return rewriteValueRISCV64_OpRsh8x64(v)
   694  	case OpRsh8x8:
   695  		return rewriteValueRISCV64_OpRsh8x8(v)
   696  	case OpSelect0:
   697  		return rewriteValueRISCV64_OpSelect0(v)
   698  	case OpSelect1:
   699  		return rewriteValueRISCV64_OpSelect1(v)
   700  	case OpSignExt16to32:
   701  		v.Op = OpRISCV64MOVHreg
   702  		return true
   703  	case OpSignExt16to64:
   704  		v.Op = OpRISCV64MOVHreg
   705  		return true
   706  	case OpSignExt32to64:
   707  		v.Op = OpRISCV64MOVWreg
   708  		return true
   709  	case OpSignExt8to16:
   710  		v.Op = OpRISCV64MOVBreg
   711  		return true
   712  	case OpSignExt8to32:
   713  		v.Op = OpRISCV64MOVBreg
   714  		return true
   715  	case OpSignExt8to64:
   716  		v.Op = OpRISCV64MOVBreg
   717  		return true
   718  	case OpSlicemask:
   719  		return rewriteValueRISCV64_OpSlicemask(v)
   720  	case OpSqrt:
   721  		v.Op = OpRISCV64FSQRTD
   722  		return true
   723  	case OpSqrt32:
   724  		v.Op = OpRISCV64FSQRTS
   725  		return true
   726  	case OpStaticCall:
   727  		v.Op = OpRISCV64CALLstatic
   728  		return true
   729  	case OpStore:
   730  		return rewriteValueRISCV64_OpStore(v)
   731  	case OpSub16:
   732  		v.Op = OpRISCV64SUB
   733  		return true
   734  	case OpSub32:
   735  		v.Op = OpRISCV64SUB
   736  		return true
   737  	case OpSub32F:
   738  		v.Op = OpRISCV64FSUBS
   739  		return true
   740  	case OpSub64:
   741  		v.Op = OpRISCV64SUB
   742  		return true
   743  	case OpSub64F:
   744  		v.Op = OpRISCV64FSUBD
   745  		return true
   746  	case OpSub8:
   747  		v.Op = OpRISCV64SUB
   748  		return true
   749  	case OpSubPtr:
   750  		v.Op = OpRISCV64SUB
   751  		return true
   752  	case OpTailCall:
   753  		v.Op = OpRISCV64CALLtail
   754  		return true
   755  	case OpTrunc16to8:
   756  		v.Op = OpCopy
   757  		return true
   758  	case OpTrunc32to16:
   759  		v.Op = OpCopy
   760  		return true
   761  	case OpTrunc32to8:
   762  		v.Op = OpCopy
   763  		return true
   764  	case OpTrunc64to16:
   765  		v.Op = OpCopy
   766  		return true
   767  	case OpTrunc64to32:
   768  		v.Op = OpCopy
   769  		return true
   770  	case OpTrunc64to8:
   771  		v.Op = OpCopy
   772  		return true
   773  	case OpWB:
   774  		v.Op = OpRISCV64LoweredWB
   775  		return true
   776  	case OpXor16:
   777  		v.Op = OpRISCV64XOR
   778  		return true
   779  	case OpXor32:
   780  		v.Op = OpRISCV64XOR
   781  		return true
   782  	case OpXor64:
   783  		v.Op = OpRISCV64XOR
   784  		return true
   785  	case OpXor8:
   786  		v.Op = OpRISCV64XOR
   787  		return true
   788  	case OpZero:
   789  		return rewriteValueRISCV64_OpZero(v)
   790  	case OpZeroExt16to32:
   791  		v.Op = OpRISCV64MOVHUreg
   792  		return true
   793  	case OpZeroExt16to64:
   794  		v.Op = OpRISCV64MOVHUreg
   795  		return true
   796  	case OpZeroExt32to64:
   797  		v.Op = OpRISCV64MOVWUreg
   798  		return true
   799  	case OpZeroExt8to16:
   800  		v.Op = OpRISCV64MOVBUreg
   801  		return true
   802  	case OpZeroExt8to32:
   803  		v.Op = OpRISCV64MOVBUreg
   804  		return true
   805  	case OpZeroExt8to64:
   806  		v.Op = OpRISCV64MOVBUreg
   807  		return true
   808  	}
   809  	return false
   810  }
   811  func rewriteValueRISCV64_OpAddr(v *Value) bool {
   812  	v_0 := v.Args[0]
   813  	// match: (Addr {sym} base)
   814  	// result: (MOVaddr {sym} [0] base)
   815  	for {
   816  		sym := auxToSym(v.Aux)
   817  		base := v_0
   818  		v.reset(OpRISCV64MOVaddr)
   819  		v.AuxInt = int32ToAuxInt(0)
   820  		v.Aux = symToAux(sym)
   821  		v.AddArg(base)
   822  		return true
   823  	}
   824  }
   825  func rewriteValueRISCV64_OpAtomicAnd8(v *Value) bool {
   826  	v_2 := v.Args[2]
   827  	v_1 := v.Args[1]
   828  	v_0 := v.Args[0]
   829  	b := v.Block
   830  	typ := &b.Func.Config.Types
   831  	// match: (AtomicAnd8 ptr val mem)
   832  	// result: (LoweredAtomicAnd32 (ANDI <typ.Uintptr> [^3] ptr) (NOT <typ.UInt32> (SLL <typ.UInt32> (XORI <typ.UInt32> [0xff] (ZeroExt8to32 val)) (SLLI <typ.UInt64> [3] (ANDI <typ.UInt64> [3] ptr)))) mem)
   833  	for {
   834  		ptr := v_0
   835  		val := v_1
   836  		mem := v_2
   837  		v.reset(OpRISCV64LoweredAtomicAnd32)
   838  		v0 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.Uintptr)
   839  		v0.AuxInt = int64ToAuxInt(^3)
   840  		v0.AddArg(ptr)
   841  		v1 := b.NewValue0(v.Pos, OpRISCV64NOT, typ.UInt32)
   842  		v2 := b.NewValue0(v.Pos, OpRISCV64SLL, typ.UInt32)
   843  		v3 := b.NewValue0(v.Pos, OpRISCV64XORI, typ.UInt32)
   844  		v3.AuxInt = int64ToAuxInt(0xff)
   845  		v4 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   846  		v4.AddArg(val)
   847  		v3.AddArg(v4)
   848  		v5 := b.NewValue0(v.Pos, OpRISCV64SLLI, typ.UInt64)
   849  		v5.AuxInt = int64ToAuxInt(3)
   850  		v6 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.UInt64)
   851  		v6.AuxInt = int64ToAuxInt(3)
   852  		v6.AddArg(ptr)
   853  		v5.AddArg(v6)
   854  		v2.AddArg2(v3, v5)
   855  		v1.AddArg(v2)
   856  		v.AddArg3(v0, v1, mem)
   857  		return true
   858  	}
   859  }
   860  func rewriteValueRISCV64_OpAtomicCompareAndSwap32(v *Value) bool {
   861  	v_3 := v.Args[3]
   862  	v_2 := v.Args[2]
   863  	v_1 := v.Args[1]
   864  	v_0 := v.Args[0]
   865  	b := v.Block
   866  	typ := &b.Func.Config.Types
   867  	// match: (AtomicCompareAndSwap32 ptr old new mem)
   868  	// result: (LoweredAtomicCas32 ptr (SignExt32to64 old) new mem)
   869  	for {
   870  		ptr := v_0
   871  		old := v_1
   872  		new := v_2
   873  		mem := v_3
   874  		v.reset(OpRISCV64LoweredAtomicCas32)
   875  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
   876  		v0.AddArg(old)
   877  		v.AddArg4(ptr, v0, new, mem)
   878  		return true
   879  	}
   880  }
   881  func rewriteValueRISCV64_OpAtomicOr8(v *Value) bool {
   882  	v_2 := v.Args[2]
   883  	v_1 := v.Args[1]
   884  	v_0 := v.Args[0]
   885  	b := v.Block
   886  	typ := &b.Func.Config.Types
   887  	// match: (AtomicOr8 ptr val mem)
   888  	// result: (LoweredAtomicOr32 (ANDI <typ.Uintptr> [^3] ptr) (SLL <typ.UInt32> (ZeroExt8to32 val) (SLLI <typ.UInt64> [3] (ANDI <typ.UInt64> [3] ptr))) mem)
   889  	for {
   890  		ptr := v_0
   891  		val := v_1
   892  		mem := v_2
   893  		v.reset(OpRISCV64LoweredAtomicOr32)
   894  		v0 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.Uintptr)
   895  		v0.AuxInt = int64ToAuxInt(^3)
   896  		v0.AddArg(ptr)
   897  		v1 := b.NewValue0(v.Pos, OpRISCV64SLL, typ.UInt32)
   898  		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   899  		v2.AddArg(val)
   900  		v3 := b.NewValue0(v.Pos, OpRISCV64SLLI, typ.UInt64)
   901  		v3.AuxInt = int64ToAuxInt(3)
   902  		v4 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.UInt64)
   903  		v4.AuxInt = int64ToAuxInt(3)
   904  		v4.AddArg(ptr)
   905  		v3.AddArg(v4)
   906  		v1.AddArg2(v2, v3)
   907  		v.AddArg3(v0, v1, mem)
   908  		return true
   909  	}
   910  }
   911  func rewriteValueRISCV64_OpAvg64u(v *Value) bool {
   912  	v_1 := v.Args[1]
   913  	v_0 := v.Args[0]
   914  	b := v.Block
   915  	// match: (Avg64u <t> x y)
   916  	// result: (ADD (ADD <t> (SRLI <t> [1] x) (SRLI <t> [1] y)) (ANDI <t> [1] (AND <t> x y)))
   917  	for {
   918  		t := v.Type
   919  		x := v_0
   920  		y := v_1
   921  		v.reset(OpRISCV64ADD)
   922  		v0 := b.NewValue0(v.Pos, OpRISCV64ADD, t)
   923  		v1 := b.NewValue0(v.Pos, OpRISCV64SRLI, t)
   924  		v1.AuxInt = int64ToAuxInt(1)
   925  		v1.AddArg(x)
   926  		v2 := b.NewValue0(v.Pos, OpRISCV64SRLI, t)
   927  		v2.AuxInt = int64ToAuxInt(1)
   928  		v2.AddArg(y)
   929  		v0.AddArg2(v1, v2)
   930  		v3 := b.NewValue0(v.Pos, OpRISCV64ANDI, t)
   931  		v3.AuxInt = int64ToAuxInt(1)
   932  		v4 := b.NewValue0(v.Pos, OpRISCV64AND, t)
   933  		v4.AddArg2(x, y)
   934  		v3.AddArg(v4)
   935  		v.AddArg2(v0, v3)
   936  		return true
   937  	}
   938  }
   939  func rewriteValueRISCV64_OpBitLen16(v *Value) bool {
   940  	v_0 := v.Args[0]
   941  	b := v.Block
   942  	typ := &b.Func.Config.Types
   943  	// match: (BitLen16 x)
   944  	// result: (BitLen64 (ZeroExt16to64 x))
   945  	for {
   946  		x := v_0
   947  		v.reset(OpBitLen64)
   948  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
   949  		v0.AddArg(x)
   950  		v.AddArg(v0)
   951  		return true
   952  	}
   953  }
   954  func rewriteValueRISCV64_OpBitLen32(v *Value) bool {
   955  	v_0 := v.Args[0]
   956  	b := v.Block
   957  	typ := &b.Func.Config.Types
   958  	// match: (BitLen32 <t> x)
   959  	// result: (SUB (MOVDconst [32]) (CLZW <t> x))
   960  	for {
   961  		t := v.Type
   962  		x := v_0
   963  		v.reset(OpRISCV64SUB)
   964  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
   965  		v0.AuxInt = int64ToAuxInt(32)
   966  		v1 := b.NewValue0(v.Pos, OpRISCV64CLZW, t)
   967  		v1.AddArg(x)
   968  		v.AddArg2(v0, v1)
   969  		return true
   970  	}
   971  }
   972  func rewriteValueRISCV64_OpBitLen64(v *Value) bool {
   973  	v_0 := v.Args[0]
   974  	b := v.Block
   975  	typ := &b.Func.Config.Types
   976  	// match: (BitLen64 <t> x)
   977  	// result: (SUB (MOVDconst [64]) (CLZ <t> x))
   978  	for {
   979  		t := v.Type
   980  		x := v_0
   981  		v.reset(OpRISCV64SUB)
   982  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
   983  		v0.AuxInt = int64ToAuxInt(64)
   984  		v1 := b.NewValue0(v.Pos, OpRISCV64CLZ, t)
   985  		v1.AddArg(x)
   986  		v.AddArg2(v0, v1)
   987  		return true
   988  	}
   989  }
   990  func rewriteValueRISCV64_OpBitLen8(v *Value) bool {
   991  	v_0 := v.Args[0]
   992  	b := v.Block
   993  	typ := &b.Func.Config.Types
   994  	// match: (BitLen8 x)
   995  	// result: (BitLen64 (ZeroExt8to64 x))
   996  	for {
   997  		x := v_0
   998  		v.reset(OpBitLen64)
   999  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1000  		v0.AddArg(x)
  1001  		v.AddArg(v0)
  1002  		return true
  1003  	}
  1004  }
  1005  func rewriteValueRISCV64_OpConst16(v *Value) bool {
  1006  	// match: (Const16 [val])
  1007  	// result: (MOVDconst [int64(val)])
  1008  	for {
  1009  		val := auxIntToInt16(v.AuxInt)
  1010  		v.reset(OpRISCV64MOVDconst)
  1011  		v.AuxInt = int64ToAuxInt(int64(val))
  1012  		return true
  1013  	}
  1014  }
  1015  func rewriteValueRISCV64_OpConst32(v *Value) bool {
  1016  	// match: (Const32 [val])
  1017  	// result: (MOVDconst [int64(val)])
  1018  	for {
  1019  		val := auxIntToInt32(v.AuxInt)
  1020  		v.reset(OpRISCV64MOVDconst)
  1021  		v.AuxInt = int64ToAuxInt(int64(val))
  1022  		return true
  1023  	}
  1024  }
  1025  func rewriteValueRISCV64_OpConst32F(v *Value) bool {
  1026  	b := v.Block
  1027  	typ := &b.Func.Config.Types
  1028  	// match: (Const32F [val])
  1029  	// result: (FMVSX (MOVDconst [int64(math.Float32bits(val))]))
  1030  	for {
  1031  		val := auxIntToFloat32(v.AuxInt)
  1032  		v.reset(OpRISCV64FMVSX)
  1033  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  1034  		v0.AuxInt = int64ToAuxInt(int64(math.Float32bits(val)))
  1035  		v.AddArg(v0)
  1036  		return true
  1037  	}
  1038  }
  1039  func rewriteValueRISCV64_OpConst64(v *Value) bool {
  1040  	// match: (Const64 [val])
  1041  	// result: (MOVDconst [int64(val)])
  1042  	for {
  1043  		val := auxIntToInt64(v.AuxInt)
  1044  		v.reset(OpRISCV64MOVDconst)
  1045  		v.AuxInt = int64ToAuxInt(int64(val))
  1046  		return true
  1047  	}
  1048  }
  1049  func rewriteValueRISCV64_OpConst64F(v *Value) bool {
  1050  	b := v.Block
  1051  	typ := &b.Func.Config.Types
  1052  	// match: (Const64F [val])
  1053  	// result: (FMVDX (MOVDconst [int64(math.Float64bits(val))]))
  1054  	for {
  1055  		val := auxIntToFloat64(v.AuxInt)
  1056  		v.reset(OpRISCV64FMVDX)
  1057  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  1058  		v0.AuxInt = int64ToAuxInt(int64(math.Float64bits(val)))
  1059  		v.AddArg(v0)
  1060  		return true
  1061  	}
  1062  }
  1063  func rewriteValueRISCV64_OpConst8(v *Value) bool {
  1064  	// match: (Const8 [val])
  1065  	// result: (MOVDconst [int64(val)])
  1066  	for {
  1067  		val := auxIntToInt8(v.AuxInt)
  1068  		v.reset(OpRISCV64MOVDconst)
  1069  		v.AuxInt = int64ToAuxInt(int64(val))
  1070  		return true
  1071  	}
  1072  }
  1073  func rewriteValueRISCV64_OpConstBool(v *Value) bool {
  1074  	// match: (ConstBool [val])
  1075  	// result: (MOVDconst [int64(b2i(val))])
  1076  	for {
  1077  		val := auxIntToBool(v.AuxInt)
  1078  		v.reset(OpRISCV64MOVDconst)
  1079  		v.AuxInt = int64ToAuxInt(int64(b2i(val)))
  1080  		return true
  1081  	}
  1082  }
  1083  func rewriteValueRISCV64_OpConstNil(v *Value) bool {
  1084  	// match: (ConstNil)
  1085  	// result: (MOVDconst [0])
  1086  	for {
  1087  		v.reset(OpRISCV64MOVDconst)
  1088  		v.AuxInt = int64ToAuxInt(0)
  1089  		return true
  1090  	}
  1091  }
  1092  func rewriteValueRISCV64_OpCtz16(v *Value) bool {
  1093  	v_0 := v.Args[0]
  1094  	b := v.Block
  1095  	typ := &b.Func.Config.Types
  1096  	// match: (Ctz16 x)
  1097  	// result: (CTZW (ORI <typ.UInt32> [1<<16] x))
  1098  	for {
  1099  		x := v_0
  1100  		v.reset(OpRISCV64CTZW)
  1101  		v0 := b.NewValue0(v.Pos, OpRISCV64ORI, typ.UInt32)
  1102  		v0.AuxInt = int64ToAuxInt(1 << 16)
  1103  		v0.AddArg(x)
  1104  		v.AddArg(v0)
  1105  		return true
  1106  	}
  1107  }
  1108  func rewriteValueRISCV64_OpCtz8(v *Value) bool {
  1109  	v_0 := v.Args[0]
  1110  	b := v.Block
  1111  	typ := &b.Func.Config.Types
  1112  	// match: (Ctz8 x)
  1113  	// result: (CTZW (ORI <typ.UInt32> [1<<8] x))
  1114  	for {
  1115  		x := v_0
  1116  		v.reset(OpRISCV64CTZW)
  1117  		v0 := b.NewValue0(v.Pos, OpRISCV64ORI, typ.UInt32)
  1118  		v0.AuxInt = int64ToAuxInt(1 << 8)
  1119  		v0.AddArg(x)
  1120  		v.AddArg(v0)
  1121  		return true
  1122  	}
  1123  }
  1124  func rewriteValueRISCV64_OpDiv16(v *Value) bool {
  1125  	v_1 := v.Args[1]
  1126  	v_0 := v.Args[0]
  1127  	b := v.Block
  1128  	typ := &b.Func.Config.Types
  1129  	// match: (Div16 x y [false])
  1130  	// result: (DIVW (SignExt16to32 x) (SignExt16to32 y))
  1131  	for {
  1132  		if auxIntToBool(v.AuxInt) != false {
  1133  			break
  1134  		}
  1135  		x := v_0
  1136  		y := v_1
  1137  		v.reset(OpRISCV64DIVW)
  1138  		v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  1139  		v0.AddArg(x)
  1140  		v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  1141  		v1.AddArg(y)
  1142  		v.AddArg2(v0, v1)
  1143  		return true
  1144  	}
  1145  	return false
  1146  }
  1147  func rewriteValueRISCV64_OpDiv16u(v *Value) bool {
  1148  	v_1 := v.Args[1]
  1149  	v_0 := v.Args[0]
  1150  	b := v.Block
  1151  	typ := &b.Func.Config.Types
  1152  	// match: (Div16u x y)
  1153  	// result: (DIVUW (ZeroExt16to32 x) (ZeroExt16to32 y))
  1154  	for {
  1155  		x := v_0
  1156  		y := v_1
  1157  		v.reset(OpRISCV64DIVUW)
  1158  		v0 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  1159  		v0.AddArg(x)
  1160  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  1161  		v1.AddArg(y)
  1162  		v.AddArg2(v0, v1)
  1163  		return true
  1164  	}
  1165  }
  1166  func rewriteValueRISCV64_OpDiv32(v *Value) bool {
  1167  	v_1 := v.Args[1]
  1168  	v_0 := v.Args[0]
  1169  	// match: (Div32 x y [false])
  1170  	// result: (DIVW x y)
  1171  	for {
  1172  		if auxIntToBool(v.AuxInt) != false {
  1173  			break
  1174  		}
  1175  		x := v_0
  1176  		y := v_1
  1177  		v.reset(OpRISCV64DIVW)
  1178  		v.AddArg2(x, y)
  1179  		return true
  1180  	}
  1181  	return false
  1182  }
  1183  func rewriteValueRISCV64_OpDiv64(v *Value) bool {
  1184  	v_1 := v.Args[1]
  1185  	v_0 := v.Args[0]
  1186  	// match: (Div64 x y [false])
  1187  	// result: (DIV x y)
  1188  	for {
  1189  		if auxIntToBool(v.AuxInt) != false {
  1190  			break
  1191  		}
  1192  		x := v_0
  1193  		y := v_1
  1194  		v.reset(OpRISCV64DIV)
  1195  		v.AddArg2(x, y)
  1196  		return true
  1197  	}
  1198  	return false
  1199  }
  1200  func rewriteValueRISCV64_OpDiv8(v *Value) bool {
  1201  	v_1 := v.Args[1]
  1202  	v_0 := v.Args[0]
  1203  	b := v.Block
  1204  	typ := &b.Func.Config.Types
  1205  	// match: (Div8 x y)
  1206  	// result: (DIVW (SignExt8to32 x) (SignExt8to32 y))
  1207  	for {
  1208  		x := v_0
  1209  		y := v_1
  1210  		v.reset(OpRISCV64DIVW)
  1211  		v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  1212  		v0.AddArg(x)
  1213  		v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  1214  		v1.AddArg(y)
  1215  		v.AddArg2(v0, v1)
  1216  		return true
  1217  	}
  1218  }
  1219  func rewriteValueRISCV64_OpDiv8u(v *Value) bool {
  1220  	v_1 := v.Args[1]
  1221  	v_0 := v.Args[0]
  1222  	b := v.Block
  1223  	typ := &b.Func.Config.Types
  1224  	// match: (Div8u x y)
  1225  	// result: (DIVUW (ZeroExt8to32 x) (ZeroExt8to32 y))
  1226  	for {
  1227  		x := v_0
  1228  		y := v_1
  1229  		v.reset(OpRISCV64DIVUW)
  1230  		v0 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  1231  		v0.AddArg(x)
  1232  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  1233  		v1.AddArg(y)
  1234  		v.AddArg2(v0, v1)
  1235  		return true
  1236  	}
  1237  }
  1238  func rewriteValueRISCV64_OpEq16(v *Value) bool {
  1239  	v_1 := v.Args[1]
  1240  	v_0 := v.Args[0]
  1241  	b := v.Block
  1242  	typ := &b.Func.Config.Types
  1243  	// match: (Eq16 x y)
  1244  	// result: (SEQZ (SUB <x.Type> (ZeroExt16to64 x) (ZeroExt16to64 y)))
  1245  	for {
  1246  		x := v_0
  1247  		y := v_1
  1248  		v.reset(OpRISCV64SEQZ)
  1249  		v0 := b.NewValue0(v.Pos, OpRISCV64SUB, x.Type)
  1250  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1251  		v1.AddArg(x)
  1252  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1253  		v2.AddArg(y)
  1254  		v0.AddArg2(v1, v2)
  1255  		v.AddArg(v0)
  1256  		return true
  1257  	}
  1258  }
  1259  func rewriteValueRISCV64_OpEq32(v *Value) bool {
  1260  	v_1 := v.Args[1]
  1261  	v_0 := v.Args[0]
  1262  	b := v.Block
  1263  	typ := &b.Func.Config.Types
  1264  	// match: (Eq32 x y)
  1265  	// cond: x.Type.IsSigned()
  1266  	// result: (SEQZ (SUB <x.Type> (SignExt32to64 x) (SignExt32to64 y)))
  1267  	for {
  1268  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1269  			x := v_0
  1270  			y := v_1
  1271  			if !(x.Type.IsSigned()) {
  1272  				continue
  1273  			}
  1274  			v.reset(OpRISCV64SEQZ)
  1275  			v0 := b.NewValue0(v.Pos, OpRISCV64SUB, x.Type)
  1276  			v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1277  			v1.AddArg(x)
  1278  			v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1279  			v2.AddArg(y)
  1280  			v0.AddArg2(v1, v2)
  1281  			v.AddArg(v0)
  1282  			return true
  1283  		}
  1284  		break
  1285  	}
  1286  	// match: (Eq32 x y)
  1287  	// cond: !x.Type.IsSigned()
  1288  	// result: (SEQZ (SUB <x.Type> (ZeroExt32to64 x) (ZeroExt32to64 y)))
  1289  	for {
  1290  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1291  			x := v_0
  1292  			y := v_1
  1293  			if !(!x.Type.IsSigned()) {
  1294  				continue
  1295  			}
  1296  			v.reset(OpRISCV64SEQZ)
  1297  			v0 := b.NewValue0(v.Pos, OpRISCV64SUB, x.Type)
  1298  			v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1299  			v1.AddArg(x)
  1300  			v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1301  			v2.AddArg(y)
  1302  			v0.AddArg2(v1, v2)
  1303  			v.AddArg(v0)
  1304  			return true
  1305  		}
  1306  		break
  1307  	}
  1308  	return false
  1309  }
  1310  func rewriteValueRISCV64_OpEq64(v *Value) bool {
  1311  	v_1 := v.Args[1]
  1312  	v_0 := v.Args[0]
  1313  	b := v.Block
  1314  	// match: (Eq64 x y)
  1315  	// result: (SEQZ (SUB <x.Type> x y))
  1316  	for {
  1317  		x := v_0
  1318  		y := v_1
  1319  		v.reset(OpRISCV64SEQZ)
  1320  		v0 := b.NewValue0(v.Pos, OpRISCV64SUB, x.Type)
  1321  		v0.AddArg2(x, y)
  1322  		v.AddArg(v0)
  1323  		return true
  1324  	}
  1325  }
  1326  func rewriteValueRISCV64_OpEq8(v *Value) bool {
  1327  	v_1 := v.Args[1]
  1328  	v_0 := v.Args[0]
  1329  	b := v.Block
  1330  	typ := &b.Func.Config.Types
  1331  	// match: (Eq8 x y)
  1332  	// result: (SEQZ (SUB <x.Type> (ZeroExt8to64 x) (ZeroExt8to64 y)))
  1333  	for {
  1334  		x := v_0
  1335  		y := v_1
  1336  		v.reset(OpRISCV64SEQZ)
  1337  		v0 := b.NewValue0(v.Pos, OpRISCV64SUB, x.Type)
  1338  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1339  		v1.AddArg(x)
  1340  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1341  		v2.AddArg(y)
  1342  		v0.AddArg2(v1, v2)
  1343  		v.AddArg(v0)
  1344  		return true
  1345  	}
  1346  }
  1347  func rewriteValueRISCV64_OpEqB(v *Value) bool {
  1348  	v_1 := v.Args[1]
  1349  	v_0 := v.Args[0]
  1350  	b := v.Block
  1351  	typ := &b.Func.Config.Types
  1352  	// match: (EqB x y)
  1353  	// result: (SEQZ (SUB <typ.Bool> x y))
  1354  	for {
  1355  		x := v_0
  1356  		y := v_1
  1357  		v.reset(OpRISCV64SEQZ)
  1358  		v0 := b.NewValue0(v.Pos, OpRISCV64SUB, typ.Bool)
  1359  		v0.AddArg2(x, y)
  1360  		v.AddArg(v0)
  1361  		return true
  1362  	}
  1363  }
  1364  func rewriteValueRISCV64_OpEqPtr(v *Value) bool {
  1365  	v_1 := v.Args[1]
  1366  	v_0 := v.Args[0]
  1367  	b := v.Block
  1368  	typ := &b.Func.Config.Types
  1369  	// match: (EqPtr x y)
  1370  	// result: (SEQZ (SUB <typ.Uintptr> x y))
  1371  	for {
  1372  		x := v_0
  1373  		y := v_1
  1374  		v.reset(OpRISCV64SEQZ)
  1375  		v0 := b.NewValue0(v.Pos, OpRISCV64SUB, typ.Uintptr)
  1376  		v0.AddArg2(x, y)
  1377  		v.AddArg(v0)
  1378  		return true
  1379  	}
  1380  }
  1381  func rewriteValueRISCV64_OpHmul32(v *Value) bool {
  1382  	v_1 := v.Args[1]
  1383  	v_0 := v.Args[0]
  1384  	b := v.Block
  1385  	typ := &b.Func.Config.Types
  1386  	// match: (Hmul32 x y)
  1387  	// result: (SRAI [32] (MUL (SignExt32to64 x) (SignExt32to64 y)))
  1388  	for {
  1389  		x := v_0
  1390  		y := v_1
  1391  		v.reset(OpRISCV64SRAI)
  1392  		v.AuxInt = int64ToAuxInt(32)
  1393  		v0 := b.NewValue0(v.Pos, OpRISCV64MUL, typ.Int64)
  1394  		v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1395  		v1.AddArg(x)
  1396  		v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1397  		v2.AddArg(y)
  1398  		v0.AddArg2(v1, v2)
  1399  		v.AddArg(v0)
  1400  		return true
  1401  	}
  1402  }
  1403  func rewriteValueRISCV64_OpHmul32u(v *Value) bool {
  1404  	v_1 := v.Args[1]
  1405  	v_0 := v.Args[0]
  1406  	b := v.Block
  1407  	typ := &b.Func.Config.Types
  1408  	// match: (Hmul32u x y)
  1409  	// result: (SRLI [32] (MUL (ZeroExt32to64 x) (ZeroExt32to64 y)))
  1410  	for {
  1411  		x := v_0
  1412  		y := v_1
  1413  		v.reset(OpRISCV64SRLI)
  1414  		v.AuxInt = int64ToAuxInt(32)
  1415  		v0 := b.NewValue0(v.Pos, OpRISCV64MUL, typ.Int64)
  1416  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1417  		v1.AddArg(x)
  1418  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1419  		v2.AddArg(y)
  1420  		v0.AddArg2(v1, v2)
  1421  		v.AddArg(v0)
  1422  		return true
  1423  	}
  1424  }
  1425  func rewriteValueRISCV64_OpLeq16(v *Value) bool {
  1426  	v_1 := v.Args[1]
  1427  	v_0 := v.Args[0]
  1428  	b := v.Block
  1429  	typ := &b.Func.Config.Types
  1430  	// match: (Leq16 x y)
  1431  	// result: (Not (Less16 y x))
  1432  	for {
  1433  		x := v_0
  1434  		y := v_1
  1435  		v.reset(OpNot)
  1436  		v0 := b.NewValue0(v.Pos, OpLess16, typ.Bool)
  1437  		v0.AddArg2(y, x)
  1438  		v.AddArg(v0)
  1439  		return true
  1440  	}
  1441  }
  1442  func rewriteValueRISCV64_OpLeq16U(v *Value) bool {
  1443  	v_1 := v.Args[1]
  1444  	v_0 := v.Args[0]
  1445  	b := v.Block
  1446  	typ := &b.Func.Config.Types
  1447  	// match: (Leq16U x y)
  1448  	// result: (Not (Less16U y x))
  1449  	for {
  1450  		x := v_0
  1451  		y := v_1
  1452  		v.reset(OpNot)
  1453  		v0 := b.NewValue0(v.Pos, OpLess16U, typ.Bool)
  1454  		v0.AddArg2(y, x)
  1455  		v.AddArg(v0)
  1456  		return true
  1457  	}
  1458  }
  1459  func rewriteValueRISCV64_OpLeq32(v *Value) bool {
  1460  	v_1 := v.Args[1]
  1461  	v_0 := v.Args[0]
  1462  	b := v.Block
  1463  	typ := &b.Func.Config.Types
  1464  	// match: (Leq32 x y)
  1465  	// result: (Not (Less32 y x))
  1466  	for {
  1467  		x := v_0
  1468  		y := v_1
  1469  		v.reset(OpNot)
  1470  		v0 := b.NewValue0(v.Pos, OpLess32, typ.Bool)
  1471  		v0.AddArg2(y, x)
  1472  		v.AddArg(v0)
  1473  		return true
  1474  	}
  1475  }
  1476  func rewriteValueRISCV64_OpLeq32U(v *Value) bool {
  1477  	v_1 := v.Args[1]
  1478  	v_0 := v.Args[0]
  1479  	b := v.Block
  1480  	typ := &b.Func.Config.Types
  1481  	// match: (Leq32U x y)
  1482  	// result: (Not (Less32U y x))
  1483  	for {
  1484  		x := v_0
  1485  		y := v_1
  1486  		v.reset(OpNot)
  1487  		v0 := b.NewValue0(v.Pos, OpLess32U, typ.Bool)
  1488  		v0.AddArg2(y, x)
  1489  		v.AddArg(v0)
  1490  		return true
  1491  	}
  1492  }
  1493  func rewriteValueRISCV64_OpLeq64(v *Value) bool {
  1494  	v_1 := v.Args[1]
  1495  	v_0 := v.Args[0]
  1496  	b := v.Block
  1497  	typ := &b.Func.Config.Types
  1498  	// match: (Leq64 x y)
  1499  	// result: (Not (Less64 y x))
  1500  	for {
  1501  		x := v_0
  1502  		y := v_1
  1503  		v.reset(OpNot)
  1504  		v0 := b.NewValue0(v.Pos, OpLess64, typ.Bool)
  1505  		v0.AddArg2(y, x)
  1506  		v.AddArg(v0)
  1507  		return true
  1508  	}
  1509  }
  1510  func rewriteValueRISCV64_OpLeq64U(v *Value) bool {
  1511  	v_1 := v.Args[1]
  1512  	v_0 := v.Args[0]
  1513  	b := v.Block
  1514  	typ := &b.Func.Config.Types
  1515  	// match: (Leq64U x y)
  1516  	// result: (Not (Less64U y x))
  1517  	for {
  1518  		x := v_0
  1519  		y := v_1
  1520  		v.reset(OpNot)
  1521  		v0 := b.NewValue0(v.Pos, OpLess64U, typ.Bool)
  1522  		v0.AddArg2(y, x)
  1523  		v.AddArg(v0)
  1524  		return true
  1525  	}
  1526  }
  1527  func rewriteValueRISCV64_OpLeq8(v *Value) bool {
  1528  	v_1 := v.Args[1]
  1529  	v_0 := v.Args[0]
  1530  	b := v.Block
  1531  	typ := &b.Func.Config.Types
  1532  	// match: (Leq8 x y)
  1533  	// result: (Not (Less8 y x))
  1534  	for {
  1535  		x := v_0
  1536  		y := v_1
  1537  		v.reset(OpNot)
  1538  		v0 := b.NewValue0(v.Pos, OpLess8, typ.Bool)
  1539  		v0.AddArg2(y, x)
  1540  		v.AddArg(v0)
  1541  		return true
  1542  	}
  1543  }
  1544  func rewriteValueRISCV64_OpLeq8U(v *Value) bool {
  1545  	v_1 := v.Args[1]
  1546  	v_0 := v.Args[0]
  1547  	b := v.Block
  1548  	typ := &b.Func.Config.Types
  1549  	// match: (Leq8U x y)
  1550  	// result: (Not (Less8U y x))
  1551  	for {
  1552  		x := v_0
  1553  		y := v_1
  1554  		v.reset(OpNot)
  1555  		v0 := b.NewValue0(v.Pos, OpLess8U, typ.Bool)
  1556  		v0.AddArg2(y, x)
  1557  		v.AddArg(v0)
  1558  		return true
  1559  	}
  1560  }
  1561  func rewriteValueRISCV64_OpLess16(v *Value) bool {
  1562  	v_1 := v.Args[1]
  1563  	v_0 := v.Args[0]
  1564  	b := v.Block
  1565  	typ := &b.Func.Config.Types
  1566  	// match: (Less16 x y)
  1567  	// result: (SLT (SignExt16to64 x) (SignExt16to64 y))
  1568  	for {
  1569  		x := v_0
  1570  		y := v_1
  1571  		v.reset(OpRISCV64SLT)
  1572  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  1573  		v0.AddArg(x)
  1574  		v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  1575  		v1.AddArg(y)
  1576  		v.AddArg2(v0, v1)
  1577  		return true
  1578  	}
  1579  }
  1580  func rewriteValueRISCV64_OpLess16U(v *Value) bool {
  1581  	v_1 := v.Args[1]
  1582  	v_0 := v.Args[0]
  1583  	b := v.Block
  1584  	typ := &b.Func.Config.Types
  1585  	// match: (Less16U x y)
  1586  	// result: (SLTU (ZeroExt16to64 x) (ZeroExt16to64 y))
  1587  	for {
  1588  		x := v_0
  1589  		y := v_1
  1590  		v.reset(OpRISCV64SLTU)
  1591  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1592  		v0.AddArg(x)
  1593  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1594  		v1.AddArg(y)
  1595  		v.AddArg2(v0, v1)
  1596  		return true
  1597  	}
  1598  }
  1599  func rewriteValueRISCV64_OpLess32(v *Value) bool {
  1600  	v_1 := v.Args[1]
  1601  	v_0 := v.Args[0]
  1602  	b := v.Block
  1603  	typ := &b.Func.Config.Types
  1604  	// match: (Less32 x y)
  1605  	// result: (SLT (SignExt32to64 x) (SignExt32to64 y))
  1606  	for {
  1607  		x := v_0
  1608  		y := v_1
  1609  		v.reset(OpRISCV64SLT)
  1610  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1611  		v0.AddArg(x)
  1612  		v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1613  		v1.AddArg(y)
  1614  		v.AddArg2(v0, v1)
  1615  		return true
  1616  	}
  1617  }
  1618  func rewriteValueRISCV64_OpLess32U(v *Value) bool {
  1619  	v_1 := v.Args[1]
  1620  	v_0 := v.Args[0]
  1621  	b := v.Block
  1622  	typ := &b.Func.Config.Types
  1623  	// match: (Less32U x y)
  1624  	// result: (SLTU (ZeroExt32to64 x) (ZeroExt32to64 y))
  1625  	for {
  1626  		x := v_0
  1627  		y := v_1
  1628  		v.reset(OpRISCV64SLTU)
  1629  		v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1630  		v0.AddArg(x)
  1631  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1632  		v1.AddArg(y)
  1633  		v.AddArg2(v0, v1)
  1634  		return true
  1635  	}
  1636  }
  1637  func rewriteValueRISCV64_OpLess8(v *Value) bool {
  1638  	v_1 := v.Args[1]
  1639  	v_0 := v.Args[0]
  1640  	b := v.Block
  1641  	typ := &b.Func.Config.Types
  1642  	// match: (Less8 x y)
  1643  	// result: (SLT (SignExt8to64 x) (SignExt8to64 y))
  1644  	for {
  1645  		x := v_0
  1646  		y := v_1
  1647  		v.reset(OpRISCV64SLT)
  1648  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  1649  		v0.AddArg(x)
  1650  		v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  1651  		v1.AddArg(y)
  1652  		v.AddArg2(v0, v1)
  1653  		return true
  1654  	}
  1655  }
  1656  func rewriteValueRISCV64_OpLess8U(v *Value) bool {
  1657  	v_1 := v.Args[1]
  1658  	v_0 := v.Args[0]
  1659  	b := v.Block
  1660  	typ := &b.Func.Config.Types
  1661  	// match: (Less8U x y)
  1662  	// result: (SLTU (ZeroExt8to64 x) (ZeroExt8to64 y))
  1663  	for {
  1664  		x := v_0
  1665  		y := v_1
  1666  		v.reset(OpRISCV64SLTU)
  1667  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1668  		v0.AddArg(x)
  1669  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1670  		v1.AddArg(y)
  1671  		v.AddArg2(v0, v1)
  1672  		return true
  1673  	}
  1674  }
  1675  func rewriteValueRISCV64_OpLoad(v *Value) bool {
  1676  	v_1 := v.Args[1]
  1677  	v_0 := v.Args[0]
  1678  	// match: (Load <t> ptr mem)
  1679  	// cond: t.IsBoolean()
  1680  	// result: (MOVBUload ptr mem)
  1681  	for {
  1682  		t := v.Type
  1683  		ptr := v_0
  1684  		mem := v_1
  1685  		if !(t.IsBoolean()) {
  1686  			break
  1687  		}
  1688  		v.reset(OpRISCV64MOVBUload)
  1689  		v.AddArg2(ptr, mem)
  1690  		return true
  1691  	}
  1692  	// match: (Load <t> ptr mem)
  1693  	// cond: ( is8BitInt(t) && t.IsSigned())
  1694  	// result: (MOVBload ptr mem)
  1695  	for {
  1696  		t := v.Type
  1697  		ptr := v_0
  1698  		mem := v_1
  1699  		if !(is8BitInt(t) && t.IsSigned()) {
  1700  			break
  1701  		}
  1702  		v.reset(OpRISCV64MOVBload)
  1703  		v.AddArg2(ptr, mem)
  1704  		return true
  1705  	}
  1706  	// match: (Load <t> ptr mem)
  1707  	// cond: ( is8BitInt(t) && !t.IsSigned())
  1708  	// result: (MOVBUload ptr mem)
  1709  	for {
  1710  		t := v.Type
  1711  		ptr := v_0
  1712  		mem := v_1
  1713  		if !(is8BitInt(t) && !t.IsSigned()) {
  1714  			break
  1715  		}
  1716  		v.reset(OpRISCV64MOVBUload)
  1717  		v.AddArg2(ptr, mem)
  1718  		return true
  1719  	}
  1720  	// match: (Load <t> ptr mem)
  1721  	// cond: (is16BitInt(t) && t.IsSigned())
  1722  	// result: (MOVHload ptr mem)
  1723  	for {
  1724  		t := v.Type
  1725  		ptr := v_0
  1726  		mem := v_1
  1727  		if !(is16BitInt(t) && t.IsSigned()) {
  1728  			break
  1729  		}
  1730  		v.reset(OpRISCV64MOVHload)
  1731  		v.AddArg2(ptr, mem)
  1732  		return true
  1733  	}
  1734  	// match: (Load <t> ptr mem)
  1735  	// cond: (is16BitInt(t) && !t.IsSigned())
  1736  	// result: (MOVHUload ptr mem)
  1737  	for {
  1738  		t := v.Type
  1739  		ptr := v_0
  1740  		mem := v_1
  1741  		if !(is16BitInt(t) && !t.IsSigned()) {
  1742  			break
  1743  		}
  1744  		v.reset(OpRISCV64MOVHUload)
  1745  		v.AddArg2(ptr, mem)
  1746  		return true
  1747  	}
  1748  	// match: (Load <t> ptr mem)
  1749  	// cond: (is32BitInt(t) && t.IsSigned())
  1750  	// result: (MOVWload ptr mem)
  1751  	for {
  1752  		t := v.Type
  1753  		ptr := v_0
  1754  		mem := v_1
  1755  		if !(is32BitInt(t) && t.IsSigned()) {
  1756  			break
  1757  		}
  1758  		v.reset(OpRISCV64MOVWload)
  1759  		v.AddArg2(ptr, mem)
  1760  		return true
  1761  	}
  1762  	// match: (Load <t> ptr mem)
  1763  	// cond: (is32BitInt(t) && !t.IsSigned())
  1764  	// result: (MOVWUload ptr mem)
  1765  	for {
  1766  		t := v.Type
  1767  		ptr := v_0
  1768  		mem := v_1
  1769  		if !(is32BitInt(t) && !t.IsSigned()) {
  1770  			break
  1771  		}
  1772  		v.reset(OpRISCV64MOVWUload)
  1773  		v.AddArg2(ptr, mem)
  1774  		return true
  1775  	}
  1776  	// match: (Load <t> ptr mem)
  1777  	// cond: (is64BitInt(t) || isPtr(t))
  1778  	// result: (MOVDload ptr mem)
  1779  	for {
  1780  		t := v.Type
  1781  		ptr := v_0
  1782  		mem := v_1
  1783  		if !(is64BitInt(t) || isPtr(t)) {
  1784  			break
  1785  		}
  1786  		v.reset(OpRISCV64MOVDload)
  1787  		v.AddArg2(ptr, mem)
  1788  		return true
  1789  	}
  1790  	// match: (Load <t> ptr mem)
  1791  	// cond: is32BitFloat(t)
  1792  	// result: (FMOVWload ptr mem)
  1793  	for {
  1794  		t := v.Type
  1795  		ptr := v_0
  1796  		mem := v_1
  1797  		if !(is32BitFloat(t)) {
  1798  			break
  1799  		}
  1800  		v.reset(OpRISCV64FMOVWload)
  1801  		v.AddArg2(ptr, mem)
  1802  		return true
  1803  	}
  1804  	// match: (Load <t> ptr mem)
  1805  	// cond: is64BitFloat(t)
  1806  	// result: (FMOVDload ptr mem)
  1807  	for {
  1808  		t := v.Type
  1809  		ptr := v_0
  1810  		mem := v_1
  1811  		if !(is64BitFloat(t)) {
  1812  			break
  1813  		}
  1814  		v.reset(OpRISCV64FMOVDload)
  1815  		v.AddArg2(ptr, mem)
  1816  		return true
  1817  	}
  1818  	return false
  1819  }
  1820  func rewriteValueRISCV64_OpLocalAddr(v *Value) bool {
  1821  	v_1 := v.Args[1]
  1822  	v_0 := v.Args[0]
  1823  	b := v.Block
  1824  	typ := &b.Func.Config.Types
  1825  	// match: (LocalAddr <t> {sym} base mem)
  1826  	// cond: t.Elem().HasPointers()
  1827  	// result: (MOVaddr {sym} (SPanchored base mem))
  1828  	for {
  1829  		t := v.Type
  1830  		sym := auxToSym(v.Aux)
  1831  		base := v_0
  1832  		mem := v_1
  1833  		if !(t.Elem().HasPointers()) {
  1834  			break
  1835  		}
  1836  		v.reset(OpRISCV64MOVaddr)
  1837  		v.Aux = symToAux(sym)
  1838  		v0 := b.NewValue0(v.Pos, OpSPanchored, typ.Uintptr)
  1839  		v0.AddArg2(base, mem)
  1840  		v.AddArg(v0)
  1841  		return true
  1842  	}
  1843  	// match: (LocalAddr <t> {sym} base _)
  1844  	// cond: !t.Elem().HasPointers()
  1845  	// result: (MOVaddr {sym} base)
  1846  	for {
  1847  		t := v.Type
  1848  		sym := auxToSym(v.Aux)
  1849  		base := v_0
  1850  		if !(!t.Elem().HasPointers()) {
  1851  			break
  1852  		}
  1853  		v.reset(OpRISCV64MOVaddr)
  1854  		v.Aux = symToAux(sym)
  1855  		v.AddArg(base)
  1856  		return true
  1857  	}
  1858  	return false
  1859  }
  1860  func rewriteValueRISCV64_OpLsh16x16(v *Value) bool {
  1861  	v_1 := v.Args[1]
  1862  	v_0 := v.Args[0]
  1863  	b := v.Block
  1864  	typ := &b.Func.Config.Types
  1865  	// match: (Lsh16x16 <t> x y)
  1866  	// cond: !shiftIsBounded(v)
  1867  	// result: (AND (SLL <t> x y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
  1868  	for {
  1869  		t := v.Type
  1870  		x := v_0
  1871  		y := v_1
  1872  		if !(!shiftIsBounded(v)) {
  1873  			break
  1874  		}
  1875  		v.reset(OpRISCV64AND)
  1876  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  1877  		v0.AddArg2(x, y)
  1878  		v1 := b.NewValue0(v.Pos, OpNeg16, t)
  1879  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  1880  		v2.AuxInt = int64ToAuxInt(64)
  1881  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1882  		v3.AddArg(y)
  1883  		v2.AddArg(v3)
  1884  		v1.AddArg(v2)
  1885  		v.AddArg2(v0, v1)
  1886  		return true
  1887  	}
  1888  	// match: (Lsh16x16 x y)
  1889  	// cond: shiftIsBounded(v)
  1890  	// result: (SLL x y)
  1891  	for {
  1892  		x := v_0
  1893  		y := v_1
  1894  		if !(shiftIsBounded(v)) {
  1895  			break
  1896  		}
  1897  		v.reset(OpRISCV64SLL)
  1898  		v.AddArg2(x, y)
  1899  		return true
  1900  	}
  1901  	return false
  1902  }
  1903  func rewriteValueRISCV64_OpLsh16x32(v *Value) bool {
  1904  	v_1 := v.Args[1]
  1905  	v_0 := v.Args[0]
  1906  	b := v.Block
  1907  	typ := &b.Func.Config.Types
  1908  	// match: (Lsh16x32 <t> x y)
  1909  	// cond: !shiftIsBounded(v)
  1910  	// result: (AND (SLL <t> x y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
  1911  	for {
  1912  		t := v.Type
  1913  		x := v_0
  1914  		y := v_1
  1915  		if !(!shiftIsBounded(v)) {
  1916  			break
  1917  		}
  1918  		v.reset(OpRISCV64AND)
  1919  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  1920  		v0.AddArg2(x, y)
  1921  		v1 := b.NewValue0(v.Pos, OpNeg16, t)
  1922  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  1923  		v2.AuxInt = int64ToAuxInt(64)
  1924  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1925  		v3.AddArg(y)
  1926  		v2.AddArg(v3)
  1927  		v1.AddArg(v2)
  1928  		v.AddArg2(v0, v1)
  1929  		return true
  1930  	}
  1931  	// match: (Lsh16x32 x y)
  1932  	// cond: shiftIsBounded(v)
  1933  	// result: (SLL x y)
  1934  	for {
  1935  		x := v_0
  1936  		y := v_1
  1937  		if !(shiftIsBounded(v)) {
  1938  			break
  1939  		}
  1940  		v.reset(OpRISCV64SLL)
  1941  		v.AddArg2(x, y)
  1942  		return true
  1943  	}
  1944  	return false
  1945  }
  1946  func rewriteValueRISCV64_OpLsh16x64(v *Value) bool {
  1947  	v_1 := v.Args[1]
  1948  	v_0 := v.Args[0]
  1949  	b := v.Block
  1950  	// match: (Lsh16x64 <t> x y)
  1951  	// cond: !shiftIsBounded(v)
  1952  	// result: (AND (SLL <t> x y) (Neg16 <t> (SLTIU <t> [64] y)))
  1953  	for {
  1954  		t := v.Type
  1955  		x := v_0
  1956  		y := v_1
  1957  		if !(!shiftIsBounded(v)) {
  1958  			break
  1959  		}
  1960  		v.reset(OpRISCV64AND)
  1961  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  1962  		v0.AddArg2(x, y)
  1963  		v1 := b.NewValue0(v.Pos, OpNeg16, t)
  1964  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  1965  		v2.AuxInt = int64ToAuxInt(64)
  1966  		v2.AddArg(y)
  1967  		v1.AddArg(v2)
  1968  		v.AddArg2(v0, v1)
  1969  		return true
  1970  	}
  1971  	// match: (Lsh16x64 x y)
  1972  	// cond: shiftIsBounded(v)
  1973  	// result: (SLL x y)
  1974  	for {
  1975  		x := v_0
  1976  		y := v_1
  1977  		if !(shiftIsBounded(v)) {
  1978  			break
  1979  		}
  1980  		v.reset(OpRISCV64SLL)
  1981  		v.AddArg2(x, y)
  1982  		return true
  1983  	}
  1984  	return false
  1985  }
  1986  func rewriteValueRISCV64_OpLsh16x8(v *Value) bool {
  1987  	v_1 := v.Args[1]
  1988  	v_0 := v.Args[0]
  1989  	b := v.Block
  1990  	typ := &b.Func.Config.Types
  1991  	// match: (Lsh16x8 <t> x y)
  1992  	// cond: !shiftIsBounded(v)
  1993  	// result: (AND (SLL <t> x y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
  1994  	for {
  1995  		t := v.Type
  1996  		x := v_0
  1997  		y := v_1
  1998  		if !(!shiftIsBounded(v)) {
  1999  			break
  2000  		}
  2001  		v.reset(OpRISCV64AND)
  2002  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2003  		v0.AddArg2(x, y)
  2004  		v1 := b.NewValue0(v.Pos, OpNeg16, t)
  2005  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2006  		v2.AuxInt = int64ToAuxInt(64)
  2007  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  2008  		v3.AddArg(y)
  2009  		v2.AddArg(v3)
  2010  		v1.AddArg(v2)
  2011  		v.AddArg2(v0, v1)
  2012  		return true
  2013  	}
  2014  	// match: (Lsh16x8 x y)
  2015  	// cond: shiftIsBounded(v)
  2016  	// result: (SLL x y)
  2017  	for {
  2018  		x := v_0
  2019  		y := v_1
  2020  		if !(shiftIsBounded(v)) {
  2021  			break
  2022  		}
  2023  		v.reset(OpRISCV64SLL)
  2024  		v.AddArg2(x, y)
  2025  		return true
  2026  	}
  2027  	return false
  2028  }
  2029  func rewriteValueRISCV64_OpLsh32x16(v *Value) bool {
  2030  	v_1 := v.Args[1]
  2031  	v_0 := v.Args[0]
  2032  	b := v.Block
  2033  	typ := &b.Func.Config.Types
  2034  	// match: (Lsh32x16 <t> x y)
  2035  	// cond: !shiftIsBounded(v)
  2036  	// result: (AND (SLL <t> x y) (Neg32 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
  2037  	for {
  2038  		t := v.Type
  2039  		x := v_0
  2040  		y := v_1
  2041  		if !(!shiftIsBounded(v)) {
  2042  			break
  2043  		}
  2044  		v.reset(OpRISCV64AND)
  2045  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2046  		v0.AddArg2(x, y)
  2047  		v1 := b.NewValue0(v.Pos, OpNeg32, t)
  2048  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2049  		v2.AuxInt = int64ToAuxInt(64)
  2050  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  2051  		v3.AddArg(y)
  2052  		v2.AddArg(v3)
  2053  		v1.AddArg(v2)
  2054  		v.AddArg2(v0, v1)
  2055  		return true
  2056  	}
  2057  	// match: (Lsh32x16 x y)
  2058  	// cond: shiftIsBounded(v)
  2059  	// result: (SLL x y)
  2060  	for {
  2061  		x := v_0
  2062  		y := v_1
  2063  		if !(shiftIsBounded(v)) {
  2064  			break
  2065  		}
  2066  		v.reset(OpRISCV64SLL)
  2067  		v.AddArg2(x, y)
  2068  		return true
  2069  	}
  2070  	return false
  2071  }
  2072  func rewriteValueRISCV64_OpLsh32x32(v *Value) bool {
  2073  	v_1 := v.Args[1]
  2074  	v_0 := v.Args[0]
  2075  	b := v.Block
  2076  	typ := &b.Func.Config.Types
  2077  	// match: (Lsh32x32 <t> x y)
  2078  	// cond: !shiftIsBounded(v)
  2079  	// result: (AND (SLL <t> x y) (Neg32 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
  2080  	for {
  2081  		t := v.Type
  2082  		x := v_0
  2083  		y := v_1
  2084  		if !(!shiftIsBounded(v)) {
  2085  			break
  2086  		}
  2087  		v.reset(OpRISCV64AND)
  2088  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2089  		v0.AddArg2(x, y)
  2090  		v1 := b.NewValue0(v.Pos, OpNeg32, t)
  2091  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2092  		v2.AuxInt = int64ToAuxInt(64)
  2093  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  2094  		v3.AddArg(y)
  2095  		v2.AddArg(v3)
  2096  		v1.AddArg(v2)
  2097  		v.AddArg2(v0, v1)
  2098  		return true
  2099  	}
  2100  	// match: (Lsh32x32 x y)
  2101  	// cond: shiftIsBounded(v)
  2102  	// result: (SLL x y)
  2103  	for {
  2104  		x := v_0
  2105  		y := v_1
  2106  		if !(shiftIsBounded(v)) {
  2107  			break
  2108  		}
  2109  		v.reset(OpRISCV64SLL)
  2110  		v.AddArg2(x, y)
  2111  		return true
  2112  	}
  2113  	return false
  2114  }
  2115  func rewriteValueRISCV64_OpLsh32x64(v *Value) bool {
  2116  	v_1 := v.Args[1]
  2117  	v_0 := v.Args[0]
  2118  	b := v.Block
  2119  	// match: (Lsh32x64 <t> x y)
  2120  	// cond: !shiftIsBounded(v)
  2121  	// result: (AND (SLL <t> x y) (Neg32 <t> (SLTIU <t> [64] y)))
  2122  	for {
  2123  		t := v.Type
  2124  		x := v_0
  2125  		y := v_1
  2126  		if !(!shiftIsBounded(v)) {
  2127  			break
  2128  		}
  2129  		v.reset(OpRISCV64AND)
  2130  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2131  		v0.AddArg2(x, y)
  2132  		v1 := b.NewValue0(v.Pos, OpNeg32, t)
  2133  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2134  		v2.AuxInt = int64ToAuxInt(64)
  2135  		v2.AddArg(y)
  2136  		v1.AddArg(v2)
  2137  		v.AddArg2(v0, v1)
  2138  		return true
  2139  	}
  2140  	// match: (Lsh32x64 x y)
  2141  	// cond: shiftIsBounded(v)
  2142  	// result: (SLL x y)
  2143  	for {
  2144  		x := v_0
  2145  		y := v_1
  2146  		if !(shiftIsBounded(v)) {
  2147  			break
  2148  		}
  2149  		v.reset(OpRISCV64SLL)
  2150  		v.AddArg2(x, y)
  2151  		return true
  2152  	}
  2153  	return false
  2154  }
  2155  func rewriteValueRISCV64_OpLsh32x8(v *Value) bool {
  2156  	v_1 := v.Args[1]
  2157  	v_0 := v.Args[0]
  2158  	b := v.Block
  2159  	typ := &b.Func.Config.Types
  2160  	// match: (Lsh32x8 <t> x y)
  2161  	// cond: !shiftIsBounded(v)
  2162  	// result: (AND (SLL <t> x y) (Neg32 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
  2163  	for {
  2164  		t := v.Type
  2165  		x := v_0
  2166  		y := v_1
  2167  		if !(!shiftIsBounded(v)) {
  2168  			break
  2169  		}
  2170  		v.reset(OpRISCV64AND)
  2171  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2172  		v0.AddArg2(x, y)
  2173  		v1 := b.NewValue0(v.Pos, OpNeg32, t)
  2174  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2175  		v2.AuxInt = int64ToAuxInt(64)
  2176  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  2177  		v3.AddArg(y)
  2178  		v2.AddArg(v3)
  2179  		v1.AddArg(v2)
  2180  		v.AddArg2(v0, v1)
  2181  		return true
  2182  	}
  2183  	// match: (Lsh32x8 x y)
  2184  	// cond: shiftIsBounded(v)
  2185  	// result: (SLL x y)
  2186  	for {
  2187  		x := v_0
  2188  		y := v_1
  2189  		if !(shiftIsBounded(v)) {
  2190  			break
  2191  		}
  2192  		v.reset(OpRISCV64SLL)
  2193  		v.AddArg2(x, y)
  2194  		return true
  2195  	}
  2196  	return false
  2197  }
  2198  func rewriteValueRISCV64_OpLsh64x16(v *Value) bool {
  2199  	v_1 := v.Args[1]
  2200  	v_0 := v.Args[0]
  2201  	b := v.Block
  2202  	typ := &b.Func.Config.Types
  2203  	// match: (Lsh64x16 <t> x y)
  2204  	// cond: !shiftIsBounded(v)
  2205  	// result: (AND (SLL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
  2206  	for {
  2207  		t := v.Type
  2208  		x := v_0
  2209  		y := v_1
  2210  		if !(!shiftIsBounded(v)) {
  2211  			break
  2212  		}
  2213  		v.reset(OpRISCV64AND)
  2214  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2215  		v0.AddArg2(x, y)
  2216  		v1 := b.NewValue0(v.Pos, OpNeg64, t)
  2217  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2218  		v2.AuxInt = int64ToAuxInt(64)
  2219  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  2220  		v3.AddArg(y)
  2221  		v2.AddArg(v3)
  2222  		v1.AddArg(v2)
  2223  		v.AddArg2(v0, v1)
  2224  		return true
  2225  	}
  2226  	// match: (Lsh64x16 x y)
  2227  	// cond: shiftIsBounded(v)
  2228  	// result: (SLL x y)
  2229  	for {
  2230  		x := v_0
  2231  		y := v_1
  2232  		if !(shiftIsBounded(v)) {
  2233  			break
  2234  		}
  2235  		v.reset(OpRISCV64SLL)
  2236  		v.AddArg2(x, y)
  2237  		return true
  2238  	}
  2239  	return false
  2240  }
  2241  func rewriteValueRISCV64_OpLsh64x32(v *Value) bool {
  2242  	v_1 := v.Args[1]
  2243  	v_0 := v.Args[0]
  2244  	b := v.Block
  2245  	typ := &b.Func.Config.Types
  2246  	// match: (Lsh64x32 <t> x y)
  2247  	// cond: !shiftIsBounded(v)
  2248  	// result: (AND (SLL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
  2249  	for {
  2250  		t := v.Type
  2251  		x := v_0
  2252  		y := v_1
  2253  		if !(!shiftIsBounded(v)) {
  2254  			break
  2255  		}
  2256  		v.reset(OpRISCV64AND)
  2257  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2258  		v0.AddArg2(x, y)
  2259  		v1 := b.NewValue0(v.Pos, OpNeg64, t)
  2260  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2261  		v2.AuxInt = int64ToAuxInt(64)
  2262  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  2263  		v3.AddArg(y)
  2264  		v2.AddArg(v3)
  2265  		v1.AddArg(v2)
  2266  		v.AddArg2(v0, v1)
  2267  		return true
  2268  	}
  2269  	// match: (Lsh64x32 x y)
  2270  	// cond: shiftIsBounded(v)
  2271  	// result: (SLL x y)
  2272  	for {
  2273  		x := v_0
  2274  		y := v_1
  2275  		if !(shiftIsBounded(v)) {
  2276  			break
  2277  		}
  2278  		v.reset(OpRISCV64SLL)
  2279  		v.AddArg2(x, y)
  2280  		return true
  2281  	}
  2282  	return false
  2283  }
  2284  func rewriteValueRISCV64_OpLsh64x64(v *Value) bool {
  2285  	v_1 := v.Args[1]
  2286  	v_0 := v.Args[0]
  2287  	b := v.Block
  2288  	// match: (Lsh64x64 <t> x y)
  2289  	// cond: !shiftIsBounded(v)
  2290  	// result: (AND (SLL <t> x y) (Neg64 <t> (SLTIU <t> [64] y)))
  2291  	for {
  2292  		t := v.Type
  2293  		x := v_0
  2294  		y := v_1
  2295  		if !(!shiftIsBounded(v)) {
  2296  			break
  2297  		}
  2298  		v.reset(OpRISCV64AND)
  2299  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2300  		v0.AddArg2(x, y)
  2301  		v1 := b.NewValue0(v.Pos, OpNeg64, t)
  2302  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2303  		v2.AuxInt = int64ToAuxInt(64)
  2304  		v2.AddArg(y)
  2305  		v1.AddArg(v2)
  2306  		v.AddArg2(v0, v1)
  2307  		return true
  2308  	}
  2309  	// match: (Lsh64x64 x y)
  2310  	// cond: shiftIsBounded(v)
  2311  	// result: (SLL x y)
  2312  	for {
  2313  		x := v_0
  2314  		y := v_1
  2315  		if !(shiftIsBounded(v)) {
  2316  			break
  2317  		}
  2318  		v.reset(OpRISCV64SLL)
  2319  		v.AddArg2(x, y)
  2320  		return true
  2321  	}
  2322  	return false
  2323  }
  2324  func rewriteValueRISCV64_OpLsh64x8(v *Value) bool {
  2325  	v_1 := v.Args[1]
  2326  	v_0 := v.Args[0]
  2327  	b := v.Block
  2328  	typ := &b.Func.Config.Types
  2329  	// match: (Lsh64x8 <t> x y)
  2330  	// cond: !shiftIsBounded(v)
  2331  	// result: (AND (SLL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
  2332  	for {
  2333  		t := v.Type
  2334  		x := v_0
  2335  		y := v_1
  2336  		if !(!shiftIsBounded(v)) {
  2337  			break
  2338  		}
  2339  		v.reset(OpRISCV64AND)
  2340  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2341  		v0.AddArg2(x, y)
  2342  		v1 := b.NewValue0(v.Pos, OpNeg64, t)
  2343  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2344  		v2.AuxInt = int64ToAuxInt(64)
  2345  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  2346  		v3.AddArg(y)
  2347  		v2.AddArg(v3)
  2348  		v1.AddArg(v2)
  2349  		v.AddArg2(v0, v1)
  2350  		return true
  2351  	}
  2352  	// match: (Lsh64x8 x y)
  2353  	// cond: shiftIsBounded(v)
  2354  	// result: (SLL x y)
  2355  	for {
  2356  		x := v_0
  2357  		y := v_1
  2358  		if !(shiftIsBounded(v)) {
  2359  			break
  2360  		}
  2361  		v.reset(OpRISCV64SLL)
  2362  		v.AddArg2(x, y)
  2363  		return true
  2364  	}
  2365  	return false
  2366  }
  2367  func rewriteValueRISCV64_OpLsh8x16(v *Value) bool {
  2368  	v_1 := v.Args[1]
  2369  	v_0 := v.Args[0]
  2370  	b := v.Block
  2371  	typ := &b.Func.Config.Types
  2372  	// match: (Lsh8x16 <t> x y)
  2373  	// cond: !shiftIsBounded(v)
  2374  	// result: (AND (SLL <t> x y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
  2375  	for {
  2376  		t := v.Type
  2377  		x := v_0
  2378  		y := v_1
  2379  		if !(!shiftIsBounded(v)) {
  2380  			break
  2381  		}
  2382  		v.reset(OpRISCV64AND)
  2383  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2384  		v0.AddArg2(x, y)
  2385  		v1 := b.NewValue0(v.Pos, OpNeg8, t)
  2386  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2387  		v2.AuxInt = int64ToAuxInt(64)
  2388  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  2389  		v3.AddArg(y)
  2390  		v2.AddArg(v3)
  2391  		v1.AddArg(v2)
  2392  		v.AddArg2(v0, v1)
  2393  		return true
  2394  	}
  2395  	// match: (Lsh8x16 x y)
  2396  	// cond: shiftIsBounded(v)
  2397  	// result: (SLL x y)
  2398  	for {
  2399  		x := v_0
  2400  		y := v_1
  2401  		if !(shiftIsBounded(v)) {
  2402  			break
  2403  		}
  2404  		v.reset(OpRISCV64SLL)
  2405  		v.AddArg2(x, y)
  2406  		return true
  2407  	}
  2408  	return false
  2409  }
  2410  func rewriteValueRISCV64_OpLsh8x32(v *Value) bool {
  2411  	v_1 := v.Args[1]
  2412  	v_0 := v.Args[0]
  2413  	b := v.Block
  2414  	typ := &b.Func.Config.Types
  2415  	// match: (Lsh8x32 <t> x y)
  2416  	// cond: !shiftIsBounded(v)
  2417  	// result: (AND (SLL <t> x y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
  2418  	for {
  2419  		t := v.Type
  2420  		x := v_0
  2421  		y := v_1
  2422  		if !(!shiftIsBounded(v)) {
  2423  			break
  2424  		}
  2425  		v.reset(OpRISCV64AND)
  2426  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2427  		v0.AddArg2(x, y)
  2428  		v1 := b.NewValue0(v.Pos, OpNeg8, t)
  2429  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2430  		v2.AuxInt = int64ToAuxInt(64)
  2431  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  2432  		v3.AddArg(y)
  2433  		v2.AddArg(v3)
  2434  		v1.AddArg(v2)
  2435  		v.AddArg2(v0, v1)
  2436  		return true
  2437  	}
  2438  	// match: (Lsh8x32 x y)
  2439  	// cond: shiftIsBounded(v)
  2440  	// result: (SLL x y)
  2441  	for {
  2442  		x := v_0
  2443  		y := v_1
  2444  		if !(shiftIsBounded(v)) {
  2445  			break
  2446  		}
  2447  		v.reset(OpRISCV64SLL)
  2448  		v.AddArg2(x, y)
  2449  		return true
  2450  	}
  2451  	return false
  2452  }
  2453  func rewriteValueRISCV64_OpLsh8x64(v *Value) bool {
  2454  	v_1 := v.Args[1]
  2455  	v_0 := v.Args[0]
  2456  	b := v.Block
  2457  	// match: (Lsh8x64 <t> x y)
  2458  	// cond: !shiftIsBounded(v)
  2459  	// result: (AND (SLL <t> x y) (Neg8 <t> (SLTIU <t> [64] y)))
  2460  	for {
  2461  		t := v.Type
  2462  		x := v_0
  2463  		y := v_1
  2464  		if !(!shiftIsBounded(v)) {
  2465  			break
  2466  		}
  2467  		v.reset(OpRISCV64AND)
  2468  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2469  		v0.AddArg2(x, y)
  2470  		v1 := b.NewValue0(v.Pos, OpNeg8, t)
  2471  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2472  		v2.AuxInt = int64ToAuxInt(64)
  2473  		v2.AddArg(y)
  2474  		v1.AddArg(v2)
  2475  		v.AddArg2(v0, v1)
  2476  		return true
  2477  	}
  2478  	// match: (Lsh8x64 x y)
  2479  	// cond: shiftIsBounded(v)
  2480  	// result: (SLL x y)
  2481  	for {
  2482  		x := v_0
  2483  		y := v_1
  2484  		if !(shiftIsBounded(v)) {
  2485  			break
  2486  		}
  2487  		v.reset(OpRISCV64SLL)
  2488  		v.AddArg2(x, y)
  2489  		return true
  2490  	}
  2491  	return false
  2492  }
  2493  func rewriteValueRISCV64_OpLsh8x8(v *Value) bool {
  2494  	v_1 := v.Args[1]
  2495  	v_0 := v.Args[0]
  2496  	b := v.Block
  2497  	typ := &b.Func.Config.Types
  2498  	// match: (Lsh8x8 <t> x y)
  2499  	// cond: !shiftIsBounded(v)
  2500  	// result: (AND (SLL <t> x y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
  2501  	for {
  2502  		t := v.Type
  2503  		x := v_0
  2504  		y := v_1
  2505  		if !(!shiftIsBounded(v)) {
  2506  			break
  2507  		}
  2508  		v.reset(OpRISCV64AND)
  2509  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2510  		v0.AddArg2(x, y)
  2511  		v1 := b.NewValue0(v.Pos, OpNeg8, t)
  2512  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2513  		v2.AuxInt = int64ToAuxInt(64)
  2514  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  2515  		v3.AddArg(y)
  2516  		v2.AddArg(v3)
  2517  		v1.AddArg(v2)
  2518  		v.AddArg2(v0, v1)
  2519  		return true
  2520  	}
  2521  	// match: (Lsh8x8 x y)
  2522  	// cond: shiftIsBounded(v)
  2523  	// result: (SLL x y)
  2524  	for {
  2525  		x := v_0
  2526  		y := v_1
  2527  		if !(shiftIsBounded(v)) {
  2528  			break
  2529  		}
  2530  		v.reset(OpRISCV64SLL)
  2531  		v.AddArg2(x, y)
  2532  		return true
  2533  	}
  2534  	return false
  2535  }
  2536  func rewriteValueRISCV64_OpMax64(v *Value) bool {
  2537  	v_1 := v.Args[1]
  2538  	v_0 := v.Args[0]
  2539  	// match: (Max64 x y)
  2540  	// cond: buildcfg.GORISCV64 >= 22
  2541  	// result: (MAX x y)
  2542  	for {
  2543  		x := v_0
  2544  		y := v_1
  2545  		if !(buildcfg.GORISCV64 >= 22) {
  2546  			break
  2547  		}
  2548  		v.reset(OpRISCV64MAX)
  2549  		v.AddArg2(x, y)
  2550  		return true
  2551  	}
  2552  	return false
  2553  }
  2554  func rewriteValueRISCV64_OpMax64u(v *Value) bool {
  2555  	v_1 := v.Args[1]
  2556  	v_0 := v.Args[0]
  2557  	// match: (Max64u x y)
  2558  	// cond: buildcfg.GORISCV64 >= 22
  2559  	// result: (MAXU x y)
  2560  	for {
  2561  		x := v_0
  2562  		y := v_1
  2563  		if !(buildcfg.GORISCV64 >= 22) {
  2564  			break
  2565  		}
  2566  		v.reset(OpRISCV64MAXU)
  2567  		v.AddArg2(x, y)
  2568  		return true
  2569  	}
  2570  	return false
  2571  }
  2572  func rewriteValueRISCV64_OpMin64(v *Value) bool {
  2573  	v_1 := v.Args[1]
  2574  	v_0 := v.Args[0]
  2575  	// match: (Min64 x y)
  2576  	// cond: buildcfg.GORISCV64 >= 22
  2577  	// result: (MIN x y)
  2578  	for {
  2579  		x := v_0
  2580  		y := v_1
  2581  		if !(buildcfg.GORISCV64 >= 22) {
  2582  			break
  2583  		}
  2584  		v.reset(OpRISCV64MIN)
  2585  		v.AddArg2(x, y)
  2586  		return true
  2587  	}
  2588  	return false
  2589  }
  2590  func rewriteValueRISCV64_OpMin64u(v *Value) bool {
  2591  	v_1 := v.Args[1]
  2592  	v_0 := v.Args[0]
  2593  	// match: (Min64u x y)
  2594  	// cond: buildcfg.GORISCV64 >= 22
  2595  	// result: (MINU x y)
  2596  	for {
  2597  		x := v_0
  2598  		y := v_1
  2599  		if !(buildcfg.GORISCV64 >= 22) {
  2600  			break
  2601  		}
  2602  		v.reset(OpRISCV64MINU)
  2603  		v.AddArg2(x, y)
  2604  		return true
  2605  	}
  2606  	return false
  2607  }
  2608  func rewriteValueRISCV64_OpMod16(v *Value) bool {
  2609  	v_1 := v.Args[1]
  2610  	v_0 := v.Args[0]
  2611  	b := v.Block
  2612  	typ := &b.Func.Config.Types
  2613  	// match: (Mod16 x y [false])
  2614  	// result: (REMW (SignExt16to32 x) (SignExt16to32 y))
  2615  	for {
  2616  		if auxIntToBool(v.AuxInt) != false {
  2617  			break
  2618  		}
  2619  		x := v_0
  2620  		y := v_1
  2621  		v.reset(OpRISCV64REMW)
  2622  		v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  2623  		v0.AddArg(x)
  2624  		v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  2625  		v1.AddArg(y)
  2626  		v.AddArg2(v0, v1)
  2627  		return true
  2628  	}
  2629  	return false
  2630  }
  2631  func rewriteValueRISCV64_OpMod16u(v *Value) bool {
  2632  	v_1 := v.Args[1]
  2633  	v_0 := v.Args[0]
  2634  	b := v.Block
  2635  	typ := &b.Func.Config.Types
  2636  	// match: (Mod16u x y)
  2637  	// result: (REMUW (ZeroExt16to32 x) (ZeroExt16to32 y))
  2638  	for {
  2639  		x := v_0
  2640  		y := v_1
  2641  		v.reset(OpRISCV64REMUW)
  2642  		v0 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  2643  		v0.AddArg(x)
  2644  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  2645  		v1.AddArg(y)
  2646  		v.AddArg2(v0, v1)
  2647  		return true
  2648  	}
  2649  }
  2650  func rewriteValueRISCV64_OpMod32(v *Value) bool {
  2651  	v_1 := v.Args[1]
  2652  	v_0 := v.Args[0]
  2653  	// match: (Mod32 x y [false])
  2654  	// result: (REMW x y)
  2655  	for {
  2656  		if auxIntToBool(v.AuxInt) != false {
  2657  			break
  2658  		}
  2659  		x := v_0
  2660  		y := v_1
  2661  		v.reset(OpRISCV64REMW)
  2662  		v.AddArg2(x, y)
  2663  		return true
  2664  	}
  2665  	return false
  2666  }
  2667  func rewriteValueRISCV64_OpMod64(v *Value) bool {
  2668  	v_1 := v.Args[1]
  2669  	v_0 := v.Args[0]
  2670  	// match: (Mod64 x y [false])
  2671  	// result: (REM x y)
  2672  	for {
  2673  		if auxIntToBool(v.AuxInt) != false {
  2674  			break
  2675  		}
  2676  		x := v_0
  2677  		y := v_1
  2678  		v.reset(OpRISCV64REM)
  2679  		v.AddArg2(x, y)
  2680  		return true
  2681  	}
  2682  	return false
  2683  }
  2684  func rewriteValueRISCV64_OpMod8(v *Value) bool {
  2685  	v_1 := v.Args[1]
  2686  	v_0 := v.Args[0]
  2687  	b := v.Block
  2688  	typ := &b.Func.Config.Types
  2689  	// match: (Mod8 x y)
  2690  	// result: (REMW (SignExt8to32 x) (SignExt8to32 y))
  2691  	for {
  2692  		x := v_0
  2693  		y := v_1
  2694  		v.reset(OpRISCV64REMW)
  2695  		v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  2696  		v0.AddArg(x)
  2697  		v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  2698  		v1.AddArg(y)
  2699  		v.AddArg2(v0, v1)
  2700  		return true
  2701  	}
  2702  }
  2703  func rewriteValueRISCV64_OpMod8u(v *Value) bool {
  2704  	v_1 := v.Args[1]
  2705  	v_0 := v.Args[0]
  2706  	b := v.Block
  2707  	typ := &b.Func.Config.Types
  2708  	// match: (Mod8u x y)
  2709  	// result: (REMUW (ZeroExt8to32 x) (ZeroExt8to32 y))
  2710  	for {
  2711  		x := v_0
  2712  		y := v_1
  2713  		v.reset(OpRISCV64REMUW)
  2714  		v0 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  2715  		v0.AddArg(x)
  2716  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  2717  		v1.AddArg(y)
  2718  		v.AddArg2(v0, v1)
  2719  		return true
  2720  	}
  2721  }
  2722  func rewriteValueRISCV64_OpMove(v *Value) bool {
  2723  	v_2 := v.Args[2]
  2724  	v_1 := v.Args[1]
  2725  	v_0 := v.Args[0]
  2726  	b := v.Block
  2727  	config := b.Func.Config
  2728  	typ := &b.Func.Config.Types
  2729  	// match: (Move [0] _ _ mem)
  2730  	// result: mem
  2731  	for {
  2732  		if auxIntToInt64(v.AuxInt) != 0 {
  2733  			break
  2734  		}
  2735  		mem := v_2
  2736  		v.copyOf(mem)
  2737  		return true
  2738  	}
  2739  	// match: (Move [1] dst src mem)
  2740  	// result: (MOVBstore dst (MOVBload src mem) mem)
  2741  	for {
  2742  		if auxIntToInt64(v.AuxInt) != 1 {
  2743  			break
  2744  		}
  2745  		dst := v_0
  2746  		src := v_1
  2747  		mem := v_2
  2748  		v.reset(OpRISCV64MOVBstore)
  2749  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  2750  		v0.AddArg2(src, mem)
  2751  		v.AddArg3(dst, v0, mem)
  2752  		return true
  2753  	}
  2754  	// match: (Move [2] {t} dst src mem)
  2755  	// cond: t.Alignment()%2 == 0
  2756  	// result: (MOVHstore dst (MOVHload src mem) mem)
  2757  	for {
  2758  		if auxIntToInt64(v.AuxInt) != 2 {
  2759  			break
  2760  		}
  2761  		t := auxToType(v.Aux)
  2762  		dst := v_0
  2763  		src := v_1
  2764  		mem := v_2
  2765  		if !(t.Alignment()%2 == 0) {
  2766  			break
  2767  		}
  2768  		v.reset(OpRISCV64MOVHstore)
  2769  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  2770  		v0.AddArg2(src, mem)
  2771  		v.AddArg3(dst, v0, mem)
  2772  		return true
  2773  	}
  2774  	// match: (Move [2] dst src mem)
  2775  	// result: (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem))
  2776  	for {
  2777  		if auxIntToInt64(v.AuxInt) != 2 {
  2778  			break
  2779  		}
  2780  		dst := v_0
  2781  		src := v_1
  2782  		mem := v_2
  2783  		v.reset(OpRISCV64MOVBstore)
  2784  		v.AuxInt = int32ToAuxInt(1)
  2785  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  2786  		v0.AuxInt = int32ToAuxInt(1)
  2787  		v0.AddArg2(src, mem)
  2788  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  2789  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  2790  		v2.AddArg2(src, mem)
  2791  		v1.AddArg3(dst, v2, mem)
  2792  		v.AddArg3(dst, v0, v1)
  2793  		return true
  2794  	}
  2795  	// match: (Move [4] {t} dst src mem)
  2796  	// cond: t.Alignment()%4 == 0
  2797  	// result: (MOVWstore dst (MOVWload src mem) mem)
  2798  	for {
  2799  		if auxIntToInt64(v.AuxInt) != 4 {
  2800  			break
  2801  		}
  2802  		t := auxToType(v.Aux)
  2803  		dst := v_0
  2804  		src := v_1
  2805  		mem := v_2
  2806  		if !(t.Alignment()%4 == 0) {
  2807  			break
  2808  		}
  2809  		v.reset(OpRISCV64MOVWstore)
  2810  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVWload, typ.Int32)
  2811  		v0.AddArg2(src, mem)
  2812  		v.AddArg3(dst, v0, mem)
  2813  		return true
  2814  	}
  2815  	// match: (Move [4] {t} dst src mem)
  2816  	// cond: t.Alignment()%2 == 0
  2817  	// result: (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem))
  2818  	for {
  2819  		if auxIntToInt64(v.AuxInt) != 4 {
  2820  			break
  2821  		}
  2822  		t := auxToType(v.Aux)
  2823  		dst := v_0
  2824  		src := v_1
  2825  		mem := v_2
  2826  		if !(t.Alignment()%2 == 0) {
  2827  			break
  2828  		}
  2829  		v.reset(OpRISCV64MOVHstore)
  2830  		v.AuxInt = int32ToAuxInt(2)
  2831  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  2832  		v0.AuxInt = int32ToAuxInt(2)
  2833  		v0.AddArg2(src, mem)
  2834  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  2835  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  2836  		v2.AddArg2(src, mem)
  2837  		v1.AddArg3(dst, v2, mem)
  2838  		v.AddArg3(dst, v0, v1)
  2839  		return true
  2840  	}
  2841  	// match: (Move [4] dst src mem)
  2842  	// result: (MOVBstore [3] dst (MOVBload [3] src mem) (MOVBstore [2] dst (MOVBload [2] src mem) (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem))))
  2843  	for {
  2844  		if auxIntToInt64(v.AuxInt) != 4 {
  2845  			break
  2846  		}
  2847  		dst := v_0
  2848  		src := v_1
  2849  		mem := v_2
  2850  		v.reset(OpRISCV64MOVBstore)
  2851  		v.AuxInt = int32ToAuxInt(3)
  2852  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  2853  		v0.AuxInt = int32ToAuxInt(3)
  2854  		v0.AddArg2(src, mem)
  2855  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  2856  		v1.AuxInt = int32ToAuxInt(2)
  2857  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  2858  		v2.AuxInt = int32ToAuxInt(2)
  2859  		v2.AddArg2(src, mem)
  2860  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  2861  		v3.AuxInt = int32ToAuxInt(1)
  2862  		v4 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  2863  		v4.AuxInt = int32ToAuxInt(1)
  2864  		v4.AddArg2(src, mem)
  2865  		v5 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  2866  		v6 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  2867  		v6.AddArg2(src, mem)
  2868  		v5.AddArg3(dst, v6, mem)
  2869  		v3.AddArg3(dst, v4, v5)
  2870  		v1.AddArg3(dst, v2, v3)
  2871  		v.AddArg3(dst, v0, v1)
  2872  		return true
  2873  	}
  2874  	// match: (Move [8] {t} dst src mem)
  2875  	// cond: t.Alignment()%8 == 0
  2876  	// result: (MOVDstore dst (MOVDload src mem) mem)
  2877  	for {
  2878  		if auxIntToInt64(v.AuxInt) != 8 {
  2879  			break
  2880  		}
  2881  		t := auxToType(v.Aux)
  2882  		dst := v_0
  2883  		src := v_1
  2884  		mem := v_2
  2885  		if !(t.Alignment()%8 == 0) {
  2886  			break
  2887  		}
  2888  		v.reset(OpRISCV64MOVDstore)
  2889  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
  2890  		v0.AddArg2(src, mem)
  2891  		v.AddArg3(dst, v0, mem)
  2892  		return true
  2893  	}
  2894  	// match: (Move [8] {t} dst src mem)
  2895  	// cond: t.Alignment()%4 == 0
  2896  	// result: (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem))
  2897  	for {
  2898  		if auxIntToInt64(v.AuxInt) != 8 {
  2899  			break
  2900  		}
  2901  		t := auxToType(v.Aux)
  2902  		dst := v_0
  2903  		src := v_1
  2904  		mem := v_2
  2905  		if !(t.Alignment()%4 == 0) {
  2906  			break
  2907  		}
  2908  		v.reset(OpRISCV64MOVWstore)
  2909  		v.AuxInt = int32ToAuxInt(4)
  2910  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVWload, typ.Int32)
  2911  		v0.AuxInt = int32ToAuxInt(4)
  2912  		v0.AddArg2(src, mem)
  2913  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVWstore, types.TypeMem)
  2914  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVWload, typ.Int32)
  2915  		v2.AddArg2(src, mem)
  2916  		v1.AddArg3(dst, v2, mem)
  2917  		v.AddArg3(dst, v0, v1)
  2918  		return true
  2919  	}
  2920  	// match: (Move [8] {t} dst src mem)
  2921  	// cond: t.Alignment()%2 == 0
  2922  	// result: (MOVHstore [6] dst (MOVHload [6] src mem) (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem))))
  2923  	for {
  2924  		if auxIntToInt64(v.AuxInt) != 8 {
  2925  			break
  2926  		}
  2927  		t := auxToType(v.Aux)
  2928  		dst := v_0
  2929  		src := v_1
  2930  		mem := v_2
  2931  		if !(t.Alignment()%2 == 0) {
  2932  			break
  2933  		}
  2934  		v.reset(OpRISCV64MOVHstore)
  2935  		v.AuxInt = int32ToAuxInt(6)
  2936  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  2937  		v0.AuxInt = int32ToAuxInt(6)
  2938  		v0.AddArg2(src, mem)
  2939  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  2940  		v1.AuxInt = int32ToAuxInt(4)
  2941  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  2942  		v2.AuxInt = int32ToAuxInt(4)
  2943  		v2.AddArg2(src, mem)
  2944  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  2945  		v3.AuxInt = int32ToAuxInt(2)
  2946  		v4 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  2947  		v4.AuxInt = int32ToAuxInt(2)
  2948  		v4.AddArg2(src, mem)
  2949  		v5 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  2950  		v6 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  2951  		v6.AddArg2(src, mem)
  2952  		v5.AddArg3(dst, v6, mem)
  2953  		v3.AddArg3(dst, v4, v5)
  2954  		v1.AddArg3(dst, v2, v3)
  2955  		v.AddArg3(dst, v0, v1)
  2956  		return true
  2957  	}
  2958  	// match: (Move [3] dst src mem)
  2959  	// result: (MOVBstore [2] dst (MOVBload [2] src mem) (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem)))
  2960  	for {
  2961  		if auxIntToInt64(v.AuxInt) != 3 {
  2962  			break
  2963  		}
  2964  		dst := v_0
  2965  		src := v_1
  2966  		mem := v_2
  2967  		v.reset(OpRISCV64MOVBstore)
  2968  		v.AuxInt = int32ToAuxInt(2)
  2969  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  2970  		v0.AuxInt = int32ToAuxInt(2)
  2971  		v0.AddArg2(src, mem)
  2972  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  2973  		v1.AuxInt = int32ToAuxInt(1)
  2974  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  2975  		v2.AuxInt = int32ToAuxInt(1)
  2976  		v2.AddArg2(src, mem)
  2977  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  2978  		v4 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  2979  		v4.AddArg2(src, mem)
  2980  		v3.AddArg3(dst, v4, mem)
  2981  		v1.AddArg3(dst, v2, v3)
  2982  		v.AddArg3(dst, v0, v1)
  2983  		return true
  2984  	}
  2985  	// match: (Move [6] {t} dst src mem)
  2986  	// cond: t.Alignment()%2 == 0
  2987  	// result: (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem)))
  2988  	for {
  2989  		if auxIntToInt64(v.AuxInt) != 6 {
  2990  			break
  2991  		}
  2992  		t := auxToType(v.Aux)
  2993  		dst := v_0
  2994  		src := v_1
  2995  		mem := v_2
  2996  		if !(t.Alignment()%2 == 0) {
  2997  			break
  2998  		}
  2999  		v.reset(OpRISCV64MOVHstore)
  3000  		v.AuxInt = int32ToAuxInt(4)
  3001  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  3002  		v0.AuxInt = int32ToAuxInt(4)
  3003  		v0.AddArg2(src, mem)
  3004  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  3005  		v1.AuxInt = int32ToAuxInt(2)
  3006  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  3007  		v2.AuxInt = int32ToAuxInt(2)
  3008  		v2.AddArg2(src, mem)
  3009  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  3010  		v4 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  3011  		v4.AddArg2(src, mem)
  3012  		v3.AddArg3(dst, v4, mem)
  3013  		v1.AddArg3(dst, v2, v3)
  3014  		v.AddArg3(dst, v0, v1)
  3015  		return true
  3016  	}
  3017  	// match: (Move [12] {t} dst src mem)
  3018  	// cond: t.Alignment()%4 == 0
  3019  	// result: (MOVWstore [8] dst (MOVWload [8] src mem) (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem)))
  3020  	for {
  3021  		if auxIntToInt64(v.AuxInt) != 12 {
  3022  			break
  3023  		}
  3024  		t := auxToType(v.Aux)
  3025  		dst := v_0
  3026  		src := v_1
  3027  		mem := v_2
  3028  		if !(t.Alignment()%4 == 0) {
  3029  			break
  3030  		}
  3031  		v.reset(OpRISCV64MOVWstore)
  3032  		v.AuxInt = int32ToAuxInt(8)
  3033  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVWload, typ.Int32)
  3034  		v0.AuxInt = int32ToAuxInt(8)
  3035  		v0.AddArg2(src, mem)
  3036  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVWstore, types.TypeMem)
  3037  		v1.AuxInt = int32ToAuxInt(4)
  3038  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVWload, typ.Int32)
  3039  		v2.AuxInt = int32ToAuxInt(4)
  3040  		v2.AddArg2(src, mem)
  3041  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVWstore, types.TypeMem)
  3042  		v4 := b.NewValue0(v.Pos, OpRISCV64MOVWload, typ.Int32)
  3043  		v4.AddArg2(src, mem)
  3044  		v3.AddArg3(dst, v4, mem)
  3045  		v1.AddArg3(dst, v2, v3)
  3046  		v.AddArg3(dst, v0, v1)
  3047  		return true
  3048  	}
  3049  	// match: (Move [16] {t} dst src mem)
  3050  	// cond: t.Alignment()%8 == 0
  3051  	// result: (MOVDstore [8] dst (MOVDload [8] src mem) (MOVDstore dst (MOVDload src mem) mem))
  3052  	for {
  3053  		if auxIntToInt64(v.AuxInt) != 16 {
  3054  			break
  3055  		}
  3056  		t := auxToType(v.Aux)
  3057  		dst := v_0
  3058  		src := v_1
  3059  		mem := v_2
  3060  		if !(t.Alignment()%8 == 0) {
  3061  			break
  3062  		}
  3063  		v.reset(OpRISCV64MOVDstore)
  3064  		v.AuxInt = int32ToAuxInt(8)
  3065  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
  3066  		v0.AuxInt = int32ToAuxInt(8)
  3067  		v0.AddArg2(src, mem)
  3068  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  3069  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
  3070  		v2.AddArg2(src, mem)
  3071  		v1.AddArg3(dst, v2, mem)
  3072  		v.AddArg3(dst, v0, v1)
  3073  		return true
  3074  	}
  3075  	// match: (Move [24] {t} dst src mem)
  3076  	// cond: t.Alignment()%8 == 0
  3077  	// result: (MOVDstore [16] dst (MOVDload [16] src mem) (MOVDstore [8] dst (MOVDload [8] src mem) (MOVDstore dst (MOVDload src mem) mem)))
  3078  	for {
  3079  		if auxIntToInt64(v.AuxInt) != 24 {
  3080  			break
  3081  		}
  3082  		t := auxToType(v.Aux)
  3083  		dst := v_0
  3084  		src := v_1
  3085  		mem := v_2
  3086  		if !(t.Alignment()%8 == 0) {
  3087  			break
  3088  		}
  3089  		v.reset(OpRISCV64MOVDstore)
  3090  		v.AuxInt = int32ToAuxInt(16)
  3091  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
  3092  		v0.AuxInt = int32ToAuxInt(16)
  3093  		v0.AddArg2(src, mem)
  3094  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  3095  		v1.AuxInt = int32ToAuxInt(8)
  3096  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
  3097  		v2.AuxInt = int32ToAuxInt(8)
  3098  		v2.AddArg2(src, mem)
  3099  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  3100  		v4 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
  3101  		v4.AddArg2(src, mem)
  3102  		v3.AddArg3(dst, v4, mem)
  3103  		v1.AddArg3(dst, v2, v3)
  3104  		v.AddArg3(dst, v0, v1)
  3105  		return true
  3106  	}
  3107  	// match: (Move [32] {t} dst src mem)
  3108  	// cond: t.Alignment()%8 == 0
  3109  	// result: (MOVDstore [24] dst (MOVDload [24] src mem) (MOVDstore [16] dst (MOVDload [16] src mem) (MOVDstore [8] dst (MOVDload [8] src mem) (MOVDstore dst (MOVDload src mem) mem))))
  3110  	for {
  3111  		if auxIntToInt64(v.AuxInt) != 32 {
  3112  			break
  3113  		}
  3114  		t := auxToType(v.Aux)
  3115  		dst := v_0
  3116  		src := v_1
  3117  		mem := v_2
  3118  		if !(t.Alignment()%8 == 0) {
  3119  			break
  3120  		}
  3121  		v.reset(OpRISCV64MOVDstore)
  3122  		v.AuxInt = int32ToAuxInt(24)
  3123  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
  3124  		v0.AuxInt = int32ToAuxInt(24)
  3125  		v0.AddArg2(src, mem)
  3126  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  3127  		v1.AuxInt = int32ToAuxInt(16)
  3128  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
  3129  		v2.AuxInt = int32ToAuxInt(16)
  3130  		v2.AddArg2(src, mem)
  3131  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  3132  		v3.AuxInt = int32ToAuxInt(8)
  3133  		v4 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
  3134  		v4.AuxInt = int32ToAuxInt(8)
  3135  		v4.AddArg2(src, mem)
  3136  		v5 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  3137  		v6 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
  3138  		v6.AddArg2(src, mem)
  3139  		v5.AddArg3(dst, v6, mem)
  3140  		v3.AddArg3(dst, v4, v5)
  3141  		v1.AddArg3(dst, v2, v3)
  3142  		v.AddArg3(dst, v0, v1)
  3143  		return true
  3144  	}
  3145  	// match: (Move [s] {t} dst src mem)
  3146  	// cond: s%8 == 0 && s <= 8*128 && t.Alignment()%8 == 0 && logLargeCopy(v, s)
  3147  	// result: (DUFFCOPY [16 * (128 - s/8)] dst src mem)
  3148  	for {
  3149  		s := auxIntToInt64(v.AuxInt)
  3150  		t := auxToType(v.Aux)
  3151  		dst := v_0
  3152  		src := v_1
  3153  		mem := v_2
  3154  		if !(s%8 == 0 && s <= 8*128 && t.Alignment()%8 == 0 && logLargeCopy(v, s)) {
  3155  			break
  3156  		}
  3157  		v.reset(OpRISCV64DUFFCOPY)
  3158  		v.AuxInt = int64ToAuxInt(16 * (128 - s/8))
  3159  		v.AddArg3(dst, src, mem)
  3160  		return true
  3161  	}
  3162  	// match: (Move [s] {t} dst src mem)
  3163  	// cond: (s <= 16 || logLargeCopy(v, s))
  3164  	// result: (LoweredMove [t.Alignment()] dst src (ADDI <src.Type> [s-moveSize(t.Alignment(), config)] src) mem)
  3165  	for {
  3166  		s := auxIntToInt64(v.AuxInt)
  3167  		t := auxToType(v.Aux)
  3168  		dst := v_0
  3169  		src := v_1
  3170  		mem := v_2
  3171  		if !(s <= 16 || logLargeCopy(v, s)) {
  3172  			break
  3173  		}
  3174  		v.reset(OpRISCV64LoweredMove)
  3175  		v.AuxInt = int64ToAuxInt(t.Alignment())
  3176  		v0 := b.NewValue0(v.Pos, OpRISCV64ADDI, src.Type)
  3177  		v0.AuxInt = int64ToAuxInt(s - moveSize(t.Alignment(), config))
  3178  		v0.AddArg(src)
  3179  		v.AddArg4(dst, src, v0, mem)
  3180  		return true
  3181  	}
  3182  	return false
  3183  }
  3184  func rewriteValueRISCV64_OpMul16(v *Value) bool {
  3185  	v_1 := v.Args[1]
  3186  	v_0 := v.Args[0]
  3187  	b := v.Block
  3188  	typ := &b.Func.Config.Types
  3189  	// match: (Mul16 x y)
  3190  	// result: (MULW (SignExt16to32 x) (SignExt16to32 y))
  3191  	for {
  3192  		x := v_0
  3193  		y := v_1
  3194  		v.reset(OpRISCV64MULW)
  3195  		v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  3196  		v0.AddArg(x)
  3197  		v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  3198  		v1.AddArg(y)
  3199  		v.AddArg2(v0, v1)
  3200  		return true
  3201  	}
  3202  }
  3203  func rewriteValueRISCV64_OpMul8(v *Value) bool {
  3204  	v_1 := v.Args[1]
  3205  	v_0 := v.Args[0]
  3206  	b := v.Block
  3207  	typ := &b.Func.Config.Types
  3208  	// match: (Mul8 x y)
  3209  	// result: (MULW (SignExt8to32 x) (SignExt8to32 y))
  3210  	for {
  3211  		x := v_0
  3212  		y := v_1
  3213  		v.reset(OpRISCV64MULW)
  3214  		v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  3215  		v0.AddArg(x)
  3216  		v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  3217  		v1.AddArg(y)
  3218  		v.AddArg2(v0, v1)
  3219  		return true
  3220  	}
  3221  }
  3222  func rewriteValueRISCV64_OpNeq16(v *Value) bool {
  3223  	v_1 := v.Args[1]
  3224  	v_0 := v.Args[0]
  3225  	b := v.Block
  3226  	typ := &b.Func.Config.Types
  3227  	// match: (Neq16 x y)
  3228  	// result: (Not (Eq16 x y))
  3229  	for {
  3230  		x := v_0
  3231  		y := v_1
  3232  		v.reset(OpNot)
  3233  		v0 := b.NewValue0(v.Pos, OpEq16, typ.Bool)
  3234  		v0.AddArg2(x, y)
  3235  		v.AddArg(v0)
  3236  		return true
  3237  	}
  3238  }
  3239  func rewriteValueRISCV64_OpNeq32(v *Value) bool {
  3240  	v_1 := v.Args[1]
  3241  	v_0 := v.Args[0]
  3242  	b := v.Block
  3243  	typ := &b.Func.Config.Types
  3244  	// match: (Neq32 x y)
  3245  	// result: (Not (Eq32 x y))
  3246  	for {
  3247  		x := v_0
  3248  		y := v_1
  3249  		v.reset(OpNot)
  3250  		v0 := b.NewValue0(v.Pos, OpEq32, typ.Bool)
  3251  		v0.AddArg2(x, y)
  3252  		v.AddArg(v0)
  3253  		return true
  3254  	}
  3255  }
  3256  func rewriteValueRISCV64_OpNeq64(v *Value) bool {
  3257  	v_1 := v.Args[1]
  3258  	v_0 := v.Args[0]
  3259  	b := v.Block
  3260  	typ := &b.Func.Config.Types
  3261  	// match: (Neq64 x y)
  3262  	// result: (Not (Eq64 x y))
  3263  	for {
  3264  		x := v_0
  3265  		y := v_1
  3266  		v.reset(OpNot)
  3267  		v0 := b.NewValue0(v.Pos, OpEq64, typ.Bool)
  3268  		v0.AddArg2(x, y)
  3269  		v.AddArg(v0)
  3270  		return true
  3271  	}
  3272  }
  3273  func rewriteValueRISCV64_OpNeq8(v *Value) bool {
  3274  	v_1 := v.Args[1]
  3275  	v_0 := v.Args[0]
  3276  	b := v.Block
  3277  	typ := &b.Func.Config.Types
  3278  	// match: (Neq8 x y)
  3279  	// result: (Not (Eq8 x y))
  3280  	for {
  3281  		x := v_0
  3282  		y := v_1
  3283  		v.reset(OpNot)
  3284  		v0 := b.NewValue0(v.Pos, OpEq8, typ.Bool)
  3285  		v0.AddArg2(x, y)
  3286  		v.AddArg(v0)
  3287  		return true
  3288  	}
  3289  }
  3290  func rewriteValueRISCV64_OpNeqB(v *Value) bool {
  3291  	v_1 := v.Args[1]
  3292  	v_0 := v.Args[0]
  3293  	b := v.Block
  3294  	typ := &b.Func.Config.Types
  3295  	// match: (NeqB x y)
  3296  	// result: (SNEZ (SUB <typ.Bool> x y))
  3297  	for {
  3298  		x := v_0
  3299  		y := v_1
  3300  		v.reset(OpRISCV64SNEZ)
  3301  		v0 := b.NewValue0(v.Pos, OpRISCV64SUB, typ.Bool)
  3302  		v0.AddArg2(x, y)
  3303  		v.AddArg(v0)
  3304  		return true
  3305  	}
  3306  }
  3307  func rewriteValueRISCV64_OpNeqPtr(v *Value) bool {
  3308  	v_1 := v.Args[1]
  3309  	v_0 := v.Args[0]
  3310  	b := v.Block
  3311  	typ := &b.Func.Config.Types
  3312  	// match: (NeqPtr x y)
  3313  	// result: (Not (EqPtr x y))
  3314  	for {
  3315  		x := v_0
  3316  		y := v_1
  3317  		v.reset(OpNot)
  3318  		v0 := b.NewValue0(v.Pos, OpEqPtr, typ.Bool)
  3319  		v0.AddArg2(x, y)
  3320  		v.AddArg(v0)
  3321  		return true
  3322  	}
  3323  }
  3324  func rewriteValueRISCV64_OpOffPtr(v *Value) bool {
  3325  	v_0 := v.Args[0]
  3326  	b := v.Block
  3327  	typ := &b.Func.Config.Types
  3328  	// match: (OffPtr [off] ptr:(SP))
  3329  	// cond: is32Bit(off)
  3330  	// result: (MOVaddr [int32(off)] ptr)
  3331  	for {
  3332  		off := auxIntToInt64(v.AuxInt)
  3333  		ptr := v_0
  3334  		if ptr.Op != OpSP || !(is32Bit(off)) {
  3335  			break
  3336  		}
  3337  		v.reset(OpRISCV64MOVaddr)
  3338  		v.AuxInt = int32ToAuxInt(int32(off))
  3339  		v.AddArg(ptr)
  3340  		return true
  3341  	}
  3342  	// match: (OffPtr [off] ptr)
  3343  	// cond: is32Bit(off)
  3344  	// result: (ADDI [off] ptr)
  3345  	for {
  3346  		off := auxIntToInt64(v.AuxInt)
  3347  		ptr := v_0
  3348  		if !(is32Bit(off)) {
  3349  			break
  3350  		}
  3351  		v.reset(OpRISCV64ADDI)
  3352  		v.AuxInt = int64ToAuxInt(off)
  3353  		v.AddArg(ptr)
  3354  		return true
  3355  	}
  3356  	// match: (OffPtr [off] ptr)
  3357  	// result: (ADD (MOVDconst [off]) ptr)
  3358  	for {
  3359  		off := auxIntToInt64(v.AuxInt)
  3360  		ptr := v_0
  3361  		v.reset(OpRISCV64ADD)
  3362  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  3363  		v0.AuxInt = int64ToAuxInt(off)
  3364  		v.AddArg2(v0, ptr)
  3365  		return true
  3366  	}
  3367  }
  3368  func rewriteValueRISCV64_OpPanicBounds(v *Value) bool {
  3369  	v_2 := v.Args[2]
  3370  	v_1 := v.Args[1]
  3371  	v_0 := v.Args[0]
  3372  	// match: (PanicBounds [kind] x y mem)
  3373  	// cond: boundsABI(kind) == 0
  3374  	// result: (LoweredPanicBoundsA [kind] x y mem)
  3375  	for {
  3376  		kind := auxIntToInt64(v.AuxInt)
  3377  		x := v_0
  3378  		y := v_1
  3379  		mem := v_2
  3380  		if !(boundsABI(kind) == 0) {
  3381  			break
  3382  		}
  3383  		v.reset(OpRISCV64LoweredPanicBoundsA)
  3384  		v.AuxInt = int64ToAuxInt(kind)
  3385  		v.AddArg3(x, y, mem)
  3386  		return true
  3387  	}
  3388  	// match: (PanicBounds [kind] x y mem)
  3389  	// cond: boundsABI(kind) == 1
  3390  	// result: (LoweredPanicBoundsB [kind] x y mem)
  3391  	for {
  3392  		kind := auxIntToInt64(v.AuxInt)
  3393  		x := v_0
  3394  		y := v_1
  3395  		mem := v_2
  3396  		if !(boundsABI(kind) == 1) {
  3397  			break
  3398  		}
  3399  		v.reset(OpRISCV64LoweredPanicBoundsB)
  3400  		v.AuxInt = int64ToAuxInt(kind)
  3401  		v.AddArg3(x, y, mem)
  3402  		return true
  3403  	}
  3404  	// match: (PanicBounds [kind] x y mem)
  3405  	// cond: boundsABI(kind) == 2
  3406  	// result: (LoweredPanicBoundsC [kind] x y mem)
  3407  	for {
  3408  		kind := auxIntToInt64(v.AuxInt)
  3409  		x := v_0
  3410  		y := v_1
  3411  		mem := v_2
  3412  		if !(boundsABI(kind) == 2) {
  3413  			break
  3414  		}
  3415  		v.reset(OpRISCV64LoweredPanicBoundsC)
  3416  		v.AuxInt = int64ToAuxInt(kind)
  3417  		v.AddArg3(x, y, mem)
  3418  		return true
  3419  	}
  3420  	return false
  3421  }
  3422  func rewriteValueRISCV64_OpRISCV64ADD(v *Value) bool {
  3423  	v_1 := v.Args[1]
  3424  	v_0 := v.Args[0]
  3425  	// match: (ADD (MOVDconst <t> [val]) x)
  3426  	// cond: is32Bit(val) && !t.IsPtr()
  3427  	// result: (ADDI [val] x)
  3428  	for {
  3429  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3430  			if v_0.Op != OpRISCV64MOVDconst {
  3431  				continue
  3432  			}
  3433  			t := v_0.Type
  3434  			val := auxIntToInt64(v_0.AuxInt)
  3435  			x := v_1
  3436  			if !(is32Bit(val) && !t.IsPtr()) {
  3437  				continue
  3438  			}
  3439  			v.reset(OpRISCV64ADDI)
  3440  			v.AuxInt = int64ToAuxInt(val)
  3441  			v.AddArg(x)
  3442  			return true
  3443  		}
  3444  		break
  3445  	}
  3446  	// match: (ADD (SLLI [1] x) y)
  3447  	// cond: buildcfg.GORISCV64 >= 22
  3448  	// result: (SH1ADD x y)
  3449  	for {
  3450  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3451  			if v_0.Op != OpRISCV64SLLI || auxIntToInt64(v_0.AuxInt) != 1 {
  3452  				continue
  3453  			}
  3454  			x := v_0.Args[0]
  3455  			y := v_1
  3456  			if !(buildcfg.GORISCV64 >= 22) {
  3457  				continue
  3458  			}
  3459  			v.reset(OpRISCV64SH1ADD)
  3460  			v.AddArg2(x, y)
  3461  			return true
  3462  		}
  3463  		break
  3464  	}
  3465  	// match: (ADD (SLLI [2] x) y)
  3466  	// cond: buildcfg.GORISCV64 >= 22
  3467  	// result: (SH2ADD x y)
  3468  	for {
  3469  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3470  			if v_0.Op != OpRISCV64SLLI || auxIntToInt64(v_0.AuxInt) != 2 {
  3471  				continue
  3472  			}
  3473  			x := v_0.Args[0]
  3474  			y := v_1
  3475  			if !(buildcfg.GORISCV64 >= 22) {
  3476  				continue
  3477  			}
  3478  			v.reset(OpRISCV64SH2ADD)
  3479  			v.AddArg2(x, y)
  3480  			return true
  3481  		}
  3482  		break
  3483  	}
  3484  	// match: (ADD (SLLI [3] x) y)
  3485  	// cond: buildcfg.GORISCV64 >= 22
  3486  	// result: (SH3ADD x y)
  3487  	for {
  3488  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3489  			if v_0.Op != OpRISCV64SLLI || auxIntToInt64(v_0.AuxInt) != 3 {
  3490  				continue
  3491  			}
  3492  			x := v_0.Args[0]
  3493  			y := v_1
  3494  			if !(buildcfg.GORISCV64 >= 22) {
  3495  				continue
  3496  			}
  3497  			v.reset(OpRISCV64SH3ADD)
  3498  			v.AddArg2(x, y)
  3499  			return true
  3500  		}
  3501  		break
  3502  	}
  3503  	return false
  3504  }
  3505  func rewriteValueRISCV64_OpRISCV64ADDI(v *Value) bool {
  3506  	v_0 := v.Args[0]
  3507  	// match: (ADDI [c] (MOVaddr [d] {s} x))
  3508  	// cond: is32Bit(c+int64(d))
  3509  	// result: (MOVaddr [int32(c)+d] {s} x)
  3510  	for {
  3511  		c := auxIntToInt64(v.AuxInt)
  3512  		if v_0.Op != OpRISCV64MOVaddr {
  3513  			break
  3514  		}
  3515  		d := auxIntToInt32(v_0.AuxInt)
  3516  		s := auxToSym(v_0.Aux)
  3517  		x := v_0.Args[0]
  3518  		if !(is32Bit(c + int64(d))) {
  3519  			break
  3520  		}
  3521  		v.reset(OpRISCV64MOVaddr)
  3522  		v.AuxInt = int32ToAuxInt(int32(c) + d)
  3523  		v.Aux = symToAux(s)
  3524  		v.AddArg(x)
  3525  		return true
  3526  	}
  3527  	// match: (ADDI [0] x)
  3528  	// result: x
  3529  	for {
  3530  		if auxIntToInt64(v.AuxInt) != 0 {
  3531  			break
  3532  		}
  3533  		x := v_0
  3534  		v.copyOf(x)
  3535  		return true
  3536  	}
  3537  	// match: (ADDI [x] (MOVDconst [y]))
  3538  	// cond: is32Bit(x + y)
  3539  	// result: (MOVDconst [x + y])
  3540  	for {
  3541  		x := auxIntToInt64(v.AuxInt)
  3542  		if v_0.Op != OpRISCV64MOVDconst {
  3543  			break
  3544  		}
  3545  		y := auxIntToInt64(v_0.AuxInt)
  3546  		if !(is32Bit(x + y)) {
  3547  			break
  3548  		}
  3549  		v.reset(OpRISCV64MOVDconst)
  3550  		v.AuxInt = int64ToAuxInt(x + y)
  3551  		return true
  3552  	}
  3553  	// match: (ADDI [x] (ADDI [y] z))
  3554  	// cond: is32Bit(x + y)
  3555  	// result: (ADDI [x + y] z)
  3556  	for {
  3557  		x := auxIntToInt64(v.AuxInt)
  3558  		if v_0.Op != OpRISCV64ADDI {
  3559  			break
  3560  		}
  3561  		y := auxIntToInt64(v_0.AuxInt)
  3562  		z := v_0.Args[0]
  3563  		if !(is32Bit(x + y)) {
  3564  			break
  3565  		}
  3566  		v.reset(OpRISCV64ADDI)
  3567  		v.AuxInt = int64ToAuxInt(x + y)
  3568  		v.AddArg(z)
  3569  		return true
  3570  	}
  3571  	return false
  3572  }
  3573  func rewriteValueRISCV64_OpRISCV64AND(v *Value) bool {
  3574  	v_1 := v.Args[1]
  3575  	v_0 := v.Args[0]
  3576  	// match: (AND (MOVDconst [val]) x)
  3577  	// cond: is32Bit(val)
  3578  	// result: (ANDI [val] x)
  3579  	for {
  3580  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3581  			if v_0.Op != OpRISCV64MOVDconst {
  3582  				continue
  3583  			}
  3584  			val := auxIntToInt64(v_0.AuxInt)
  3585  			x := v_1
  3586  			if !(is32Bit(val)) {
  3587  				continue
  3588  			}
  3589  			v.reset(OpRISCV64ANDI)
  3590  			v.AuxInt = int64ToAuxInt(val)
  3591  			v.AddArg(x)
  3592  			return true
  3593  		}
  3594  		break
  3595  	}
  3596  	return false
  3597  }
  3598  func rewriteValueRISCV64_OpRISCV64ANDI(v *Value) bool {
  3599  	v_0 := v.Args[0]
  3600  	// match: (ANDI [0] x)
  3601  	// result: (MOVDconst [0])
  3602  	for {
  3603  		if auxIntToInt64(v.AuxInt) != 0 {
  3604  			break
  3605  		}
  3606  		v.reset(OpRISCV64MOVDconst)
  3607  		v.AuxInt = int64ToAuxInt(0)
  3608  		return true
  3609  	}
  3610  	// match: (ANDI [-1] x)
  3611  	// result: x
  3612  	for {
  3613  		if auxIntToInt64(v.AuxInt) != -1 {
  3614  			break
  3615  		}
  3616  		x := v_0
  3617  		v.copyOf(x)
  3618  		return true
  3619  	}
  3620  	// match: (ANDI [x] (MOVDconst [y]))
  3621  	// result: (MOVDconst [x & y])
  3622  	for {
  3623  		x := auxIntToInt64(v.AuxInt)
  3624  		if v_0.Op != OpRISCV64MOVDconst {
  3625  			break
  3626  		}
  3627  		y := auxIntToInt64(v_0.AuxInt)
  3628  		v.reset(OpRISCV64MOVDconst)
  3629  		v.AuxInt = int64ToAuxInt(x & y)
  3630  		return true
  3631  	}
  3632  	// match: (ANDI [x] (ANDI [y] z))
  3633  	// result: (ANDI [x & y] z)
  3634  	for {
  3635  		x := auxIntToInt64(v.AuxInt)
  3636  		if v_0.Op != OpRISCV64ANDI {
  3637  			break
  3638  		}
  3639  		y := auxIntToInt64(v_0.AuxInt)
  3640  		z := v_0.Args[0]
  3641  		v.reset(OpRISCV64ANDI)
  3642  		v.AuxInt = int64ToAuxInt(x & y)
  3643  		v.AddArg(z)
  3644  		return true
  3645  	}
  3646  	return false
  3647  }
  3648  func rewriteValueRISCV64_OpRISCV64FADDD(v *Value) bool {
  3649  	v_1 := v.Args[1]
  3650  	v_0 := v.Args[0]
  3651  	// match: (FADDD a (FMULD x y))
  3652  	// cond: a.Block.Func.useFMA(v)
  3653  	// result: (FMADDD x y a)
  3654  	for {
  3655  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3656  			a := v_0
  3657  			if v_1.Op != OpRISCV64FMULD {
  3658  				continue
  3659  			}
  3660  			y := v_1.Args[1]
  3661  			x := v_1.Args[0]
  3662  			if !(a.Block.Func.useFMA(v)) {
  3663  				continue
  3664  			}
  3665  			v.reset(OpRISCV64FMADDD)
  3666  			v.AddArg3(x, y, a)
  3667  			return true
  3668  		}
  3669  		break
  3670  	}
  3671  	return false
  3672  }
  3673  func rewriteValueRISCV64_OpRISCV64FADDS(v *Value) bool {
  3674  	v_1 := v.Args[1]
  3675  	v_0 := v.Args[0]
  3676  	// match: (FADDS a (FMULS x y))
  3677  	// cond: a.Block.Func.useFMA(v)
  3678  	// result: (FMADDS x y a)
  3679  	for {
  3680  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3681  			a := v_0
  3682  			if v_1.Op != OpRISCV64FMULS {
  3683  				continue
  3684  			}
  3685  			y := v_1.Args[1]
  3686  			x := v_1.Args[0]
  3687  			if !(a.Block.Func.useFMA(v)) {
  3688  				continue
  3689  			}
  3690  			v.reset(OpRISCV64FMADDS)
  3691  			v.AddArg3(x, y, a)
  3692  			return true
  3693  		}
  3694  		break
  3695  	}
  3696  	return false
  3697  }
  3698  func rewriteValueRISCV64_OpRISCV64FMADDD(v *Value) bool {
  3699  	v_2 := v.Args[2]
  3700  	v_1 := v.Args[1]
  3701  	v_0 := v.Args[0]
  3702  	// match: (FMADDD neg:(FNEGD x) y z)
  3703  	// cond: neg.Uses == 1
  3704  	// result: (FNMSUBD x y z)
  3705  	for {
  3706  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3707  			neg := v_0
  3708  			if neg.Op != OpRISCV64FNEGD {
  3709  				continue
  3710  			}
  3711  			x := neg.Args[0]
  3712  			y := v_1
  3713  			z := v_2
  3714  			if !(neg.Uses == 1) {
  3715  				continue
  3716  			}
  3717  			v.reset(OpRISCV64FNMSUBD)
  3718  			v.AddArg3(x, y, z)
  3719  			return true
  3720  		}
  3721  		break
  3722  	}
  3723  	// match: (FMADDD x y neg:(FNEGD z))
  3724  	// cond: neg.Uses == 1
  3725  	// result: (FMSUBD x y z)
  3726  	for {
  3727  		x := v_0
  3728  		y := v_1
  3729  		neg := v_2
  3730  		if neg.Op != OpRISCV64FNEGD {
  3731  			break
  3732  		}
  3733  		z := neg.Args[0]
  3734  		if !(neg.Uses == 1) {
  3735  			break
  3736  		}
  3737  		v.reset(OpRISCV64FMSUBD)
  3738  		v.AddArg3(x, y, z)
  3739  		return true
  3740  	}
  3741  	return false
  3742  }
  3743  func rewriteValueRISCV64_OpRISCV64FMADDS(v *Value) bool {
  3744  	v_2 := v.Args[2]
  3745  	v_1 := v.Args[1]
  3746  	v_0 := v.Args[0]
  3747  	// match: (FMADDS neg:(FNEGS x) y z)
  3748  	// cond: neg.Uses == 1
  3749  	// result: (FNMSUBS x y z)
  3750  	for {
  3751  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3752  			neg := v_0
  3753  			if neg.Op != OpRISCV64FNEGS {
  3754  				continue
  3755  			}
  3756  			x := neg.Args[0]
  3757  			y := v_1
  3758  			z := v_2
  3759  			if !(neg.Uses == 1) {
  3760  				continue
  3761  			}
  3762  			v.reset(OpRISCV64FNMSUBS)
  3763  			v.AddArg3(x, y, z)
  3764  			return true
  3765  		}
  3766  		break
  3767  	}
  3768  	// match: (FMADDS x y neg:(FNEGS z))
  3769  	// cond: neg.Uses == 1
  3770  	// result: (FMSUBS x y z)
  3771  	for {
  3772  		x := v_0
  3773  		y := v_1
  3774  		neg := v_2
  3775  		if neg.Op != OpRISCV64FNEGS {
  3776  			break
  3777  		}
  3778  		z := neg.Args[0]
  3779  		if !(neg.Uses == 1) {
  3780  			break
  3781  		}
  3782  		v.reset(OpRISCV64FMSUBS)
  3783  		v.AddArg3(x, y, z)
  3784  		return true
  3785  	}
  3786  	return false
  3787  }
  3788  func rewriteValueRISCV64_OpRISCV64FMSUBD(v *Value) bool {
  3789  	v_2 := v.Args[2]
  3790  	v_1 := v.Args[1]
  3791  	v_0 := v.Args[0]
  3792  	// match: (FMSUBD neg:(FNEGD x) y z)
  3793  	// cond: neg.Uses == 1
  3794  	// result: (FNMADDD x y z)
  3795  	for {
  3796  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3797  			neg := v_0
  3798  			if neg.Op != OpRISCV64FNEGD {
  3799  				continue
  3800  			}
  3801  			x := neg.Args[0]
  3802  			y := v_1
  3803  			z := v_2
  3804  			if !(neg.Uses == 1) {
  3805  				continue
  3806  			}
  3807  			v.reset(OpRISCV64FNMADDD)
  3808  			v.AddArg3(x, y, z)
  3809  			return true
  3810  		}
  3811  		break
  3812  	}
  3813  	// match: (FMSUBD x y neg:(FNEGD z))
  3814  	// cond: neg.Uses == 1
  3815  	// result: (FMADDD x y z)
  3816  	for {
  3817  		x := v_0
  3818  		y := v_1
  3819  		neg := v_2
  3820  		if neg.Op != OpRISCV64FNEGD {
  3821  			break
  3822  		}
  3823  		z := neg.Args[0]
  3824  		if !(neg.Uses == 1) {
  3825  			break
  3826  		}
  3827  		v.reset(OpRISCV64FMADDD)
  3828  		v.AddArg3(x, y, z)
  3829  		return true
  3830  	}
  3831  	return false
  3832  }
  3833  func rewriteValueRISCV64_OpRISCV64FMSUBS(v *Value) bool {
  3834  	v_2 := v.Args[2]
  3835  	v_1 := v.Args[1]
  3836  	v_0 := v.Args[0]
  3837  	// match: (FMSUBS neg:(FNEGS x) y z)
  3838  	// cond: neg.Uses == 1
  3839  	// result: (FNMADDS x y z)
  3840  	for {
  3841  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3842  			neg := v_0
  3843  			if neg.Op != OpRISCV64FNEGS {
  3844  				continue
  3845  			}
  3846  			x := neg.Args[0]
  3847  			y := v_1
  3848  			z := v_2
  3849  			if !(neg.Uses == 1) {
  3850  				continue
  3851  			}
  3852  			v.reset(OpRISCV64FNMADDS)
  3853  			v.AddArg3(x, y, z)
  3854  			return true
  3855  		}
  3856  		break
  3857  	}
  3858  	// match: (FMSUBS x y neg:(FNEGS z))
  3859  	// cond: neg.Uses == 1
  3860  	// result: (FMADDS x y z)
  3861  	for {
  3862  		x := v_0
  3863  		y := v_1
  3864  		neg := v_2
  3865  		if neg.Op != OpRISCV64FNEGS {
  3866  			break
  3867  		}
  3868  		z := neg.Args[0]
  3869  		if !(neg.Uses == 1) {
  3870  			break
  3871  		}
  3872  		v.reset(OpRISCV64FMADDS)
  3873  		v.AddArg3(x, y, z)
  3874  		return true
  3875  	}
  3876  	return false
  3877  }
  3878  func rewriteValueRISCV64_OpRISCV64FNMADDD(v *Value) bool {
  3879  	v_2 := v.Args[2]
  3880  	v_1 := v.Args[1]
  3881  	v_0 := v.Args[0]
  3882  	// match: (FNMADDD neg:(FNEGD x) y z)
  3883  	// cond: neg.Uses == 1
  3884  	// result: (FMSUBD x y z)
  3885  	for {
  3886  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3887  			neg := v_0
  3888  			if neg.Op != OpRISCV64FNEGD {
  3889  				continue
  3890  			}
  3891  			x := neg.Args[0]
  3892  			y := v_1
  3893  			z := v_2
  3894  			if !(neg.Uses == 1) {
  3895  				continue
  3896  			}
  3897  			v.reset(OpRISCV64FMSUBD)
  3898  			v.AddArg3(x, y, z)
  3899  			return true
  3900  		}
  3901  		break
  3902  	}
  3903  	// match: (FNMADDD x y neg:(FNEGD z))
  3904  	// cond: neg.Uses == 1
  3905  	// result: (FNMSUBD x y z)
  3906  	for {
  3907  		x := v_0
  3908  		y := v_1
  3909  		neg := v_2
  3910  		if neg.Op != OpRISCV64FNEGD {
  3911  			break
  3912  		}
  3913  		z := neg.Args[0]
  3914  		if !(neg.Uses == 1) {
  3915  			break
  3916  		}
  3917  		v.reset(OpRISCV64FNMSUBD)
  3918  		v.AddArg3(x, y, z)
  3919  		return true
  3920  	}
  3921  	return false
  3922  }
  3923  func rewriteValueRISCV64_OpRISCV64FNMADDS(v *Value) bool {
  3924  	v_2 := v.Args[2]
  3925  	v_1 := v.Args[1]
  3926  	v_0 := v.Args[0]
  3927  	// match: (FNMADDS neg:(FNEGS x) y z)
  3928  	// cond: neg.Uses == 1
  3929  	// result: (FMSUBS x y z)
  3930  	for {
  3931  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3932  			neg := v_0
  3933  			if neg.Op != OpRISCV64FNEGS {
  3934  				continue
  3935  			}
  3936  			x := neg.Args[0]
  3937  			y := v_1
  3938  			z := v_2
  3939  			if !(neg.Uses == 1) {
  3940  				continue
  3941  			}
  3942  			v.reset(OpRISCV64FMSUBS)
  3943  			v.AddArg3(x, y, z)
  3944  			return true
  3945  		}
  3946  		break
  3947  	}
  3948  	// match: (FNMADDS x y neg:(FNEGS z))
  3949  	// cond: neg.Uses == 1
  3950  	// result: (FNMSUBS x y z)
  3951  	for {
  3952  		x := v_0
  3953  		y := v_1
  3954  		neg := v_2
  3955  		if neg.Op != OpRISCV64FNEGS {
  3956  			break
  3957  		}
  3958  		z := neg.Args[0]
  3959  		if !(neg.Uses == 1) {
  3960  			break
  3961  		}
  3962  		v.reset(OpRISCV64FNMSUBS)
  3963  		v.AddArg3(x, y, z)
  3964  		return true
  3965  	}
  3966  	return false
  3967  }
  3968  func rewriteValueRISCV64_OpRISCV64FNMSUBD(v *Value) bool {
  3969  	v_2 := v.Args[2]
  3970  	v_1 := v.Args[1]
  3971  	v_0 := v.Args[0]
  3972  	// match: (FNMSUBD neg:(FNEGD x) y z)
  3973  	// cond: neg.Uses == 1
  3974  	// result: (FMADDD x y z)
  3975  	for {
  3976  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3977  			neg := v_0
  3978  			if neg.Op != OpRISCV64FNEGD {
  3979  				continue
  3980  			}
  3981  			x := neg.Args[0]
  3982  			y := v_1
  3983  			z := v_2
  3984  			if !(neg.Uses == 1) {
  3985  				continue
  3986  			}
  3987  			v.reset(OpRISCV64FMADDD)
  3988  			v.AddArg3(x, y, z)
  3989  			return true
  3990  		}
  3991  		break
  3992  	}
  3993  	// match: (FNMSUBD x y neg:(FNEGD z))
  3994  	// cond: neg.Uses == 1
  3995  	// result: (FNMADDD x y z)
  3996  	for {
  3997  		x := v_0
  3998  		y := v_1
  3999  		neg := v_2
  4000  		if neg.Op != OpRISCV64FNEGD {
  4001  			break
  4002  		}
  4003  		z := neg.Args[0]
  4004  		if !(neg.Uses == 1) {
  4005  			break
  4006  		}
  4007  		v.reset(OpRISCV64FNMADDD)
  4008  		v.AddArg3(x, y, z)
  4009  		return true
  4010  	}
  4011  	return false
  4012  }
  4013  func rewriteValueRISCV64_OpRISCV64FNMSUBS(v *Value) bool {
  4014  	v_2 := v.Args[2]
  4015  	v_1 := v.Args[1]
  4016  	v_0 := v.Args[0]
  4017  	// match: (FNMSUBS neg:(FNEGS x) y z)
  4018  	// cond: neg.Uses == 1
  4019  	// result: (FMADDS x y z)
  4020  	for {
  4021  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4022  			neg := v_0
  4023  			if neg.Op != OpRISCV64FNEGS {
  4024  				continue
  4025  			}
  4026  			x := neg.Args[0]
  4027  			y := v_1
  4028  			z := v_2
  4029  			if !(neg.Uses == 1) {
  4030  				continue
  4031  			}
  4032  			v.reset(OpRISCV64FMADDS)
  4033  			v.AddArg3(x, y, z)
  4034  			return true
  4035  		}
  4036  		break
  4037  	}
  4038  	// match: (FNMSUBS x y neg:(FNEGS z))
  4039  	// cond: neg.Uses == 1
  4040  	// result: (FNMADDS x y z)
  4041  	for {
  4042  		x := v_0
  4043  		y := v_1
  4044  		neg := v_2
  4045  		if neg.Op != OpRISCV64FNEGS {
  4046  			break
  4047  		}
  4048  		z := neg.Args[0]
  4049  		if !(neg.Uses == 1) {
  4050  			break
  4051  		}
  4052  		v.reset(OpRISCV64FNMADDS)
  4053  		v.AddArg3(x, y, z)
  4054  		return true
  4055  	}
  4056  	return false
  4057  }
  4058  func rewriteValueRISCV64_OpRISCV64FSUBD(v *Value) bool {
  4059  	v_1 := v.Args[1]
  4060  	v_0 := v.Args[0]
  4061  	// match: (FSUBD a (FMULD x y))
  4062  	// cond: a.Block.Func.useFMA(v)
  4063  	// result: (FNMSUBD x y a)
  4064  	for {
  4065  		a := v_0
  4066  		if v_1.Op != OpRISCV64FMULD {
  4067  			break
  4068  		}
  4069  		y := v_1.Args[1]
  4070  		x := v_1.Args[0]
  4071  		if !(a.Block.Func.useFMA(v)) {
  4072  			break
  4073  		}
  4074  		v.reset(OpRISCV64FNMSUBD)
  4075  		v.AddArg3(x, y, a)
  4076  		return true
  4077  	}
  4078  	// match: (FSUBD (FMULD x y) a)
  4079  	// cond: a.Block.Func.useFMA(v)
  4080  	// result: (FMSUBD x y a)
  4081  	for {
  4082  		if v_0.Op != OpRISCV64FMULD {
  4083  			break
  4084  		}
  4085  		y := v_0.Args[1]
  4086  		x := v_0.Args[0]
  4087  		a := v_1
  4088  		if !(a.Block.Func.useFMA(v)) {
  4089  			break
  4090  		}
  4091  		v.reset(OpRISCV64FMSUBD)
  4092  		v.AddArg3(x, y, a)
  4093  		return true
  4094  	}
  4095  	return false
  4096  }
  4097  func rewriteValueRISCV64_OpRISCV64FSUBS(v *Value) bool {
  4098  	v_1 := v.Args[1]
  4099  	v_0 := v.Args[0]
  4100  	// match: (FSUBS a (FMULS x y))
  4101  	// cond: a.Block.Func.useFMA(v)
  4102  	// result: (FNMSUBS x y a)
  4103  	for {
  4104  		a := v_0
  4105  		if v_1.Op != OpRISCV64FMULS {
  4106  			break
  4107  		}
  4108  		y := v_1.Args[1]
  4109  		x := v_1.Args[0]
  4110  		if !(a.Block.Func.useFMA(v)) {
  4111  			break
  4112  		}
  4113  		v.reset(OpRISCV64FNMSUBS)
  4114  		v.AddArg3(x, y, a)
  4115  		return true
  4116  	}
  4117  	// match: (FSUBS (FMULS x y) a)
  4118  	// cond: a.Block.Func.useFMA(v)
  4119  	// result: (FMSUBS x y a)
  4120  	for {
  4121  		if v_0.Op != OpRISCV64FMULS {
  4122  			break
  4123  		}
  4124  		y := v_0.Args[1]
  4125  		x := v_0.Args[0]
  4126  		a := v_1
  4127  		if !(a.Block.Func.useFMA(v)) {
  4128  			break
  4129  		}
  4130  		v.reset(OpRISCV64FMSUBS)
  4131  		v.AddArg3(x, y, a)
  4132  		return true
  4133  	}
  4134  	return false
  4135  }
  4136  func rewriteValueRISCV64_OpRISCV64MOVBUload(v *Value) bool {
  4137  	v_1 := v.Args[1]
  4138  	v_0 := v.Args[0]
  4139  	b := v.Block
  4140  	config := b.Func.Config
  4141  	// match: (MOVBUload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  4142  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  4143  	// result: (MOVBUload [off1+off2] {mergeSym(sym1,sym2)} base mem)
  4144  	for {
  4145  		off1 := auxIntToInt32(v.AuxInt)
  4146  		sym1 := auxToSym(v.Aux)
  4147  		if v_0.Op != OpRISCV64MOVaddr {
  4148  			break
  4149  		}
  4150  		off2 := auxIntToInt32(v_0.AuxInt)
  4151  		sym2 := auxToSym(v_0.Aux)
  4152  		base := v_0.Args[0]
  4153  		mem := v_1
  4154  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4155  			break
  4156  		}
  4157  		v.reset(OpRISCV64MOVBUload)
  4158  		v.AuxInt = int32ToAuxInt(off1 + off2)
  4159  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4160  		v.AddArg2(base, mem)
  4161  		return true
  4162  	}
  4163  	// match: (MOVBUload [off1] {sym} (ADDI [off2] base) mem)
  4164  	// cond: is32Bit(int64(off1)+off2)
  4165  	// result: (MOVBUload [off1+int32(off2)] {sym} base mem)
  4166  	for {
  4167  		off1 := auxIntToInt32(v.AuxInt)
  4168  		sym := auxToSym(v.Aux)
  4169  		if v_0.Op != OpRISCV64ADDI {
  4170  			break
  4171  		}
  4172  		off2 := auxIntToInt64(v_0.AuxInt)
  4173  		base := v_0.Args[0]
  4174  		mem := v_1
  4175  		if !(is32Bit(int64(off1) + off2)) {
  4176  			break
  4177  		}
  4178  		v.reset(OpRISCV64MOVBUload)
  4179  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4180  		v.Aux = symToAux(sym)
  4181  		v.AddArg2(base, mem)
  4182  		return true
  4183  	}
  4184  	return false
  4185  }
  4186  func rewriteValueRISCV64_OpRISCV64MOVBUreg(v *Value) bool {
  4187  	v_0 := v.Args[0]
  4188  	b := v.Block
  4189  	// match: (MOVBUreg x:(FLES _ _))
  4190  	// result: x
  4191  	for {
  4192  		x := v_0
  4193  		if x.Op != OpRISCV64FLES {
  4194  			break
  4195  		}
  4196  		v.copyOf(x)
  4197  		return true
  4198  	}
  4199  	// match: (MOVBUreg x:(FLTS _ _))
  4200  	// result: x
  4201  	for {
  4202  		x := v_0
  4203  		if x.Op != OpRISCV64FLTS {
  4204  			break
  4205  		}
  4206  		v.copyOf(x)
  4207  		return true
  4208  	}
  4209  	// match: (MOVBUreg x:(FEQS _ _))
  4210  	// result: x
  4211  	for {
  4212  		x := v_0
  4213  		if x.Op != OpRISCV64FEQS {
  4214  			break
  4215  		}
  4216  		v.copyOf(x)
  4217  		return true
  4218  	}
  4219  	// match: (MOVBUreg x:(FNES _ _))
  4220  	// result: x
  4221  	for {
  4222  		x := v_0
  4223  		if x.Op != OpRISCV64FNES {
  4224  			break
  4225  		}
  4226  		v.copyOf(x)
  4227  		return true
  4228  	}
  4229  	// match: (MOVBUreg x:(FLED _ _))
  4230  	// result: x
  4231  	for {
  4232  		x := v_0
  4233  		if x.Op != OpRISCV64FLED {
  4234  			break
  4235  		}
  4236  		v.copyOf(x)
  4237  		return true
  4238  	}
  4239  	// match: (MOVBUreg x:(FLTD _ _))
  4240  	// result: x
  4241  	for {
  4242  		x := v_0
  4243  		if x.Op != OpRISCV64FLTD {
  4244  			break
  4245  		}
  4246  		v.copyOf(x)
  4247  		return true
  4248  	}
  4249  	// match: (MOVBUreg x:(FEQD _ _))
  4250  	// result: x
  4251  	for {
  4252  		x := v_0
  4253  		if x.Op != OpRISCV64FEQD {
  4254  			break
  4255  		}
  4256  		v.copyOf(x)
  4257  		return true
  4258  	}
  4259  	// match: (MOVBUreg x:(FNED _ _))
  4260  	// result: x
  4261  	for {
  4262  		x := v_0
  4263  		if x.Op != OpRISCV64FNED {
  4264  			break
  4265  		}
  4266  		v.copyOf(x)
  4267  		return true
  4268  	}
  4269  	// match: (MOVBUreg x:(SEQZ _))
  4270  	// result: x
  4271  	for {
  4272  		x := v_0
  4273  		if x.Op != OpRISCV64SEQZ {
  4274  			break
  4275  		}
  4276  		v.copyOf(x)
  4277  		return true
  4278  	}
  4279  	// match: (MOVBUreg x:(SNEZ _))
  4280  	// result: x
  4281  	for {
  4282  		x := v_0
  4283  		if x.Op != OpRISCV64SNEZ {
  4284  			break
  4285  		}
  4286  		v.copyOf(x)
  4287  		return true
  4288  	}
  4289  	// match: (MOVBUreg x:(SLT _ _))
  4290  	// result: x
  4291  	for {
  4292  		x := v_0
  4293  		if x.Op != OpRISCV64SLT {
  4294  			break
  4295  		}
  4296  		v.copyOf(x)
  4297  		return true
  4298  	}
  4299  	// match: (MOVBUreg x:(SLTU _ _))
  4300  	// result: x
  4301  	for {
  4302  		x := v_0
  4303  		if x.Op != OpRISCV64SLTU {
  4304  			break
  4305  		}
  4306  		v.copyOf(x)
  4307  		return true
  4308  	}
  4309  	// match: (MOVBUreg x:(ANDI [c] y))
  4310  	// cond: c >= 0 && int64(uint8(c)) == c
  4311  	// result: x
  4312  	for {
  4313  		x := v_0
  4314  		if x.Op != OpRISCV64ANDI {
  4315  			break
  4316  		}
  4317  		c := auxIntToInt64(x.AuxInt)
  4318  		if !(c >= 0 && int64(uint8(c)) == c) {
  4319  			break
  4320  		}
  4321  		v.copyOf(x)
  4322  		return true
  4323  	}
  4324  	// match: (MOVBUreg (ANDI [c] x))
  4325  	// cond: c < 0
  4326  	// result: (ANDI [int64(uint8(c))] x)
  4327  	for {
  4328  		if v_0.Op != OpRISCV64ANDI {
  4329  			break
  4330  		}
  4331  		c := auxIntToInt64(v_0.AuxInt)
  4332  		x := v_0.Args[0]
  4333  		if !(c < 0) {
  4334  			break
  4335  		}
  4336  		v.reset(OpRISCV64ANDI)
  4337  		v.AuxInt = int64ToAuxInt(int64(uint8(c)))
  4338  		v.AddArg(x)
  4339  		return true
  4340  	}
  4341  	// match: (MOVBUreg (MOVDconst [c]))
  4342  	// result: (MOVDconst [int64(uint8(c))])
  4343  	for {
  4344  		if v_0.Op != OpRISCV64MOVDconst {
  4345  			break
  4346  		}
  4347  		c := auxIntToInt64(v_0.AuxInt)
  4348  		v.reset(OpRISCV64MOVDconst)
  4349  		v.AuxInt = int64ToAuxInt(int64(uint8(c)))
  4350  		return true
  4351  	}
  4352  	// match: (MOVBUreg x:(MOVBUload _ _))
  4353  	// result: (MOVDreg x)
  4354  	for {
  4355  		x := v_0
  4356  		if x.Op != OpRISCV64MOVBUload {
  4357  			break
  4358  		}
  4359  		v.reset(OpRISCV64MOVDreg)
  4360  		v.AddArg(x)
  4361  		return true
  4362  	}
  4363  	// match: (MOVBUreg x:(Select0 (LoweredAtomicLoad8 _ _)))
  4364  	// result: (MOVDreg x)
  4365  	for {
  4366  		x := v_0
  4367  		if x.Op != OpSelect0 {
  4368  			break
  4369  		}
  4370  		x_0 := x.Args[0]
  4371  		if x_0.Op != OpRISCV64LoweredAtomicLoad8 {
  4372  			break
  4373  		}
  4374  		v.reset(OpRISCV64MOVDreg)
  4375  		v.AddArg(x)
  4376  		return true
  4377  	}
  4378  	// match: (MOVBUreg x:(Select0 (LoweredAtomicCas32 _ _ _ _)))
  4379  	// result: (MOVDreg x)
  4380  	for {
  4381  		x := v_0
  4382  		if x.Op != OpSelect0 {
  4383  			break
  4384  		}
  4385  		x_0 := x.Args[0]
  4386  		if x_0.Op != OpRISCV64LoweredAtomicCas32 {
  4387  			break
  4388  		}
  4389  		v.reset(OpRISCV64MOVDreg)
  4390  		v.AddArg(x)
  4391  		return true
  4392  	}
  4393  	// match: (MOVBUreg x:(Select0 (LoweredAtomicCas64 _ _ _ _)))
  4394  	// result: (MOVDreg x)
  4395  	for {
  4396  		x := v_0
  4397  		if x.Op != OpSelect0 {
  4398  			break
  4399  		}
  4400  		x_0 := x.Args[0]
  4401  		if x_0.Op != OpRISCV64LoweredAtomicCas64 {
  4402  			break
  4403  		}
  4404  		v.reset(OpRISCV64MOVDreg)
  4405  		v.AddArg(x)
  4406  		return true
  4407  	}
  4408  	// match: (MOVBUreg x:(MOVBUreg _))
  4409  	// result: (MOVDreg x)
  4410  	for {
  4411  		x := v_0
  4412  		if x.Op != OpRISCV64MOVBUreg {
  4413  			break
  4414  		}
  4415  		v.reset(OpRISCV64MOVDreg)
  4416  		v.AddArg(x)
  4417  		return true
  4418  	}
  4419  	// match: (MOVBUreg <t> x:(MOVBload [off] {sym} ptr mem))
  4420  	// cond: x.Uses == 1 && clobber(x)
  4421  	// result: @x.Block (MOVBUload <t> [off] {sym} ptr mem)
  4422  	for {
  4423  		t := v.Type
  4424  		x := v_0
  4425  		if x.Op != OpRISCV64MOVBload {
  4426  			break
  4427  		}
  4428  		off := auxIntToInt32(x.AuxInt)
  4429  		sym := auxToSym(x.Aux)
  4430  		mem := x.Args[1]
  4431  		ptr := x.Args[0]
  4432  		if !(x.Uses == 1 && clobber(x)) {
  4433  			break
  4434  		}
  4435  		b = x.Block
  4436  		v0 := b.NewValue0(x.Pos, OpRISCV64MOVBUload, t)
  4437  		v.copyOf(v0)
  4438  		v0.AuxInt = int32ToAuxInt(off)
  4439  		v0.Aux = symToAux(sym)
  4440  		v0.AddArg2(ptr, mem)
  4441  		return true
  4442  	}
  4443  	return false
  4444  }
  4445  func rewriteValueRISCV64_OpRISCV64MOVBload(v *Value) bool {
  4446  	v_1 := v.Args[1]
  4447  	v_0 := v.Args[0]
  4448  	b := v.Block
  4449  	config := b.Func.Config
  4450  	// match: (MOVBload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  4451  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  4452  	// result: (MOVBload [off1+off2] {mergeSym(sym1,sym2)} base mem)
  4453  	for {
  4454  		off1 := auxIntToInt32(v.AuxInt)
  4455  		sym1 := auxToSym(v.Aux)
  4456  		if v_0.Op != OpRISCV64MOVaddr {
  4457  			break
  4458  		}
  4459  		off2 := auxIntToInt32(v_0.AuxInt)
  4460  		sym2 := auxToSym(v_0.Aux)
  4461  		base := v_0.Args[0]
  4462  		mem := v_1
  4463  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4464  			break
  4465  		}
  4466  		v.reset(OpRISCV64MOVBload)
  4467  		v.AuxInt = int32ToAuxInt(off1 + off2)
  4468  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4469  		v.AddArg2(base, mem)
  4470  		return true
  4471  	}
  4472  	// match: (MOVBload [off1] {sym} (ADDI [off2] base) mem)
  4473  	// cond: is32Bit(int64(off1)+off2)
  4474  	// result: (MOVBload [off1+int32(off2)] {sym} base mem)
  4475  	for {
  4476  		off1 := auxIntToInt32(v.AuxInt)
  4477  		sym := auxToSym(v.Aux)
  4478  		if v_0.Op != OpRISCV64ADDI {
  4479  			break
  4480  		}
  4481  		off2 := auxIntToInt64(v_0.AuxInt)
  4482  		base := v_0.Args[0]
  4483  		mem := v_1
  4484  		if !(is32Bit(int64(off1) + off2)) {
  4485  			break
  4486  		}
  4487  		v.reset(OpRISCV64MOVBload)
  4488  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4489  		v.Aux = symToAux(sym)
  4490  		v.AddArg2(base, mem)
  4491  		return true
  4492  	}
  4493  	return false
  4494  }
  4495  func rewriteValueRISCV64_OpRISCV64MOVBreg(v *Value) bool {
  4496  	v_0 := v.Args[0]
  4497  	b := v.Block
  4498  	// match: (MOVBreg x:(ANDI [c] y))
  4499  	// cond: c >= 0 && int64(int8(c)) == c
  4500  	// result: x
  4501  	for {
  4502  		x := v_0
  4503  		if x.Op != OpRISCV64ANDI {
  4504  			break
  4505  		}
  4506  		c := auxIntToInt64(x.AuxInt)
  4507  		if !(c >= 0 && int64(int8(c)) == c) {
  4508  			break
  4509  		}
  4510  		v.copyOf(x)
  4511  		return true
  4512  	}
  4513  	// match: (MOVBreg (MOVDconst [c]))
  4514  	// result: (MOVDconst [int64(int8(c))])
  4515  	for {
  4516  		if v_0.Op != OpRISCV64MOVDconst {
  4517  			break
  4518  		}
  4519  		c := auxIntToInt64(v_0.AuxInt)
  4520  		v.reset(OpRISCV64MOVDconst)
  4521  		v.AuxInt = int64ToAuxInt(int64(int8(c)))
  4522  		return true
  4523  	}
  4524  	// match: (MOVBreg x:(MOVBload _ _))
  4525  	// result: (MOVDreg x)
  4526  	for {
  4527  		x := v_0
  4528  		if x.Op != OpRISCV64MOVBload {
  4529  			break
  4530  		}
  4531  		v.reset(OpRISCV64MOVDreg)
  4532  		v.AddArg(x)
  4533  		return true
  4534  	}
  4535  	// match: (MOVBreg x:(MOVBreg _))
  4536  	// result: (MOVDreg x)
  4537  	for {
  4538  		x := v_0
  4539  		if x.Op != OpRISCV64MOVBreg {
  4540  			break
  4541  		}
  4542  		v.reset(OpRISCV64MOVDreg)
  4543  		v.AddArg(x)
  4544  		return true
  4545  	}
  4546  	// match: (MOVBreg <t> x:(MOVBUload [off] {sym} ptr mem))
  4547  	// cond: x.Uses == 1 && clobber(x)
  4548  	// result: @x.Block (MOVBload <t> [off] {sym} ptr mem)
  4549  	for {
  4550  		t := v.Type
  4551  		x := v_0
  4552  		if x.Op != OpRISCV64MOVBUload {
  4553  			break
  4554  		}
  4555  		off := auxIntToInt32(x.AuxInt)
  4556  		sym := auxToSym(x.Aux)
  4557  		mem := x.Args[1]
  4558  		ptr := x.Args[0]
  4559  		if !(x.Uses == 1 && clobber(x)) {
  4560  			break
  4561  		}
  4562  		b = x.Block
  4563  		v0 := b.NewValue0(x.Pos, OpRISCV64MOVBload, t)
  4564  		v.copyOf(v0)
  4565  		v0.AuxInt = int32ToAuxInt(off)
  4566  		v0.Aux = symToAux(sym)
  4567  		v0.AddArg2(ptr, mem)
  4568  		return true
  4569  	}
  4570  	return false
  4571  }
  4572  func rewriteValueRISCV64_OpRISCV64MOVBstore(v *Value) bool {
  4573  	v_2 := v.Args[2]
  4574  	v_1 := v.Args[1]
  4575  	v_0 := v.Args[0]
  4576  	b := v.Block
  4577  	config := b.Func.Config
  4578  	// match: (MOVBstore [off1] {sym1} (MOVaddr [off2] {sym2} base) val mem)
  4579  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  4580  	// result: (MOVBstore [off1+off2] {mergeSym(sym1,sym2)} base val mem)
  4581  	for {
  4582  		off1 := auxIntToInt32(v.AuxInt)
  4583  		sym1 := auxToSym(v.Aux)
  4584  		if v_0.Op != OpRISCV64MOVaddr {
  4585  			break
  4586  		}
  4587  		off2 := auxIntToInt32(v_0.AuxInt)
  4588  		sym2 := auxToSym(v_0.Aux)
  4589  		base := v_0.Args[0]
  4590  		val := v_1
  4591  		mem := v_2
  4592  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4593  			break
  4594  		}
  4595  		v.reset(OpRISCV64MOVBstore)
  4596  		v.AuxInt = int32ToAuxInt(off1 + off2)
  4597  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4598  		v.AddArg3(base, val, mem)
  4599  		return true
  4600  	}
  4601  	// match: (MOVBstore [off1] {sym} (ADDI [off2] base) val mem)
  4602  	// cond: is32Bit(int64(off1)+off2)
  4603  	// result: (MOVBstore [off1+int32(off2)] {sym} base val mem)
  4604  	for {
  4605  		off1 := auxIntToInt32(v.AuxInt)
  4606  		sym := auxToSym(v.Aux)
  4607  		if v_0.Op != OpRISCV64ADDI {
  4608  			break
  4609  		}
  4610  		off2 := auxIntToInt64(v_0.AuxInt)
  4611  		base := v_0.Args[0]
  4612  		val := v_1
  4613  		mem := v_2
  4614  		if !(is32Bit(int64(off1) + off2)) {
  4615  			break
  4616  		}
  4617  		v.reset(OpRISCV64MOVBstore)
  4618  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4619  		v.Aux = symToAux(sym)
  4620  		v.AddArg3(base, val, mem)
  4621  		return true
  4622  	}
  4623  	// match: (MOVBstore [off] {sym} ptr (MOVDconst [0]) mem)
  4624  	// result: (MOVBstorezero [off] {sym} ptr mem)
  4625  	for {
  4626  		off := auxIntToInt32(v.AuxInt)
  4627  		sym := auxToSym(v.Aux)
  4628  		ptr := v_0
  4629  		if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
  4630  			break
  4631  		}
  4632  		mem := v_2
  4633  		v.reset(OpRISCV64MOVBstorezero)
  4634  		v.AuxInt = int32ToAuxInt(off)
  4635  		v.Aux = symToAux(sym)
  4636  		v.AddArg2(ptr, mem)
  4637  		return true
  4638  	}
  4639  	// match: (MOVBstore [off] {sym} ptr (MOVBreg x) mem)
  4640  	// result: (MOVBstore [off] {sym} ptr x mem)
  4641  	for {
  4642  		off := auxIntToInt32(v.AuxInt)
  4643  		sym := auxToSym(v.Aux)
  4644  		ptr := v_0
  4645  		if v_1.Op != OpRISCV64MOVBreg {
  4646  			break
  4647  		}
  4648  		x := v_1.Args[0]
  4649  		mem := v_2
  4650  		v.reset(OpRISCV64MOVBstore)
  4651  		v.AuxInt = int32ToAuxInt(off)
  4652  		v.Aux = symToAux(sym)
  4653  		v.AddArg3(ptr, x, mem)
  4654  		return true
  4655  	}
  4656  	// match: (MOVBstore [off] {sym} ptr (MOVHreg x) mem)
  4657  	// result: (MOVBstore [off] {sym} ptr x mem)
  4658  	for {
  4659  		off := auxIntToInt32(v.AuxInt)
  4660  		sym := auxToSym(v.Aux)
  4661  		ptr := v_0
  4662  		if v_1.Op != OpRISCV64MOVHreg {
  4663  			break
  4664  		}
  4665  		x := v_1.Args[0]
  4666  		mem := v_2
  4667  		v.reset(OpRISCV64MOVBstore)
  4668  		v.AuxInt = int32ToAuxInt(off)
  4669  		v.Aux = symToAux(sym)
  4670  		v.AddArg3(ptr, x, mem)
  4671  		return true
  4672  	}
  4673  	// match: (MOVBstore [off] {sym} ptr (MOVWreg x) mem)
  4674  	// result: (MOVBstore [off] {sym} ptr x mem)
  4675  	for {
  4676  		off := auxIntToInt32(v.AuxInt)
  4677  		sym := auxToSym(v.Aux)
  4678  		ptr := v_0
  4679  		if v_1.Op != OpRISCV64MOVWreg {
  4680  			break
  4681  		}
  4682  		x := v_1.Args[0]
  4683  		mem := v_2
  4684  		v.reset(OpRISCV64MOVBstore)
  4685  		v.AuxInt = int32ToAuxInt(off)
  4686  		v.Aux = symToAux(sym)
  4687  		v.AddArg3(ptr, x, mem)
  4688  		return true
  4689  	}
  4690  	// match: (MOVBstore [off] {sym} ptr (MOVBUreg x) mem)
  4691  	// result: (MOVBstore [off] {sym} ptr x mem)
  4692  	for {
  4693  		off := auxIntToInt32(v.AuxInt)
  4694  		sym := auxToSym(v.Aux)
  4695  		ptr := v_0
  4696  		if v_1.Op != OpRISCV64MOVBUreg {
  4697  			break
  4698  		}
  4699  		x := v_1.Args[0]
  4700  		mem := v_2
  4701  		v.reset(OpRISCV64MOVBstore)
  4702  		v.AuxInt = int32ToAuxInt(off)
  4703  		v.Aux = symToAux(sym)
  4704  		v.AddArg3(ptr, x, mem)
  4705  		return true
  4706  	}
  4707  	// match: (MOVBstore [off] {sym} ptr (MOVHUreg x) mem)
  4708  	// result: (MOVBstore [off] {sym} ptr x mem)
  4709  	for {
  4710  		off := auxIntToInt32(v.AuxInt)
  4711  		sym := auxToSym(v.Aux)
  4712  		ptr := v_0
  4713  		if v_1.Op != OpRISCV64MOVHUreg {
  4714  			break
  4715  		}
  4716  		x := v_1.Args[0]
  4717  		mem := v_2
  4718  		v.reset(OpRISCV64MOVBstore)
  4719  		v.AuxInt = int32ToAuxInt(off)
  4720  		v.Aux = symToAux(sym)
  4721  		v.AddArg3(ptr, x, mem)
  4722  		return true
  4723  	}
  4724  	// match: (MOVBstore [off] {sym} ptr (MOVWUreg x) mem)
  4725  	// result: (MOVBstore [off] {sym} ptr x mem)
  4726  	for {
  4727  		off := auxIntToInt32(v.AuxInt)
  4728  		sym := auxToSym(v.Aux)
  4729  		ptr := v_0
  4730  		if v_1.Op != OpRISCV64MOVWUreg {
  4731  			break
  4732  		}
  4733  		x := v_1.Args[0]
  4734  		mem := v_2
  4735  		v.reset(OpRISCV64MOVBstore)
  4736  		v.AuxInt = int32ToAuxInt(off)
  4737  		v.Aux = symToAux(sym)
  4738  		v.AddArg3(ptr, x, mem)
  4739  		return true
  4740  	}
  4741  	return false
  4742  }
  4743  func rewriteValueRISCV64_OpRISCV64MOVBstorezero(v *Value) bool {
  4744  	v_1 := v.Args[1]
  4745  	v_0 := v.Args[0]
  4746  	b := v.Block
  4747  	config := b.Func.Config
  4748  	// match: (MOVBstorezero [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  4749  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  4750  	// result: (MOVBstorezero [off1+off2] {mergeSym(sym1,sym2)} base mem)
  4751  	for {
  4752  		off1 := auxIntToInt32(v.AuxInt)
  4753  		sym1 := auxToSym(v.Aux)
  4754  		if v_0.Op != OpRISCV64MOVaddr {
  4755  			break
  4756  		}
  4757  		off2 := auxIntToInt32(v_0.AuxInt)
  4758  		sym2 := auxToSym(v_0.Aux)
  4759  		base := v_0.Args[0]
  4760  		mem := v_1
  4761  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4762  			break
  4763  		}
  4764  		v.reset(OpRISCV64MOVBstorezero)
  4765  		v.AuxInt = int32ToAuxInt(off1 + off2)
  4766  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4767  		v.AddArg2(base, mem)
  4768  		return true
  4769  	}
  4770  	// match: (MOVBstorezero [off1] {sym} (ADDI [off2] base) mem)
  4771  	// cond: is32Bit(int64(off1)+off2)
  4772  	// result: (MOVBstorezero [off1+int32(off2)] {sym} base mem)
  4773  	for {
  4774  		off1 := auxIntToInt32(v.AuxInt)
  4775  		sym := auxToSym(v.Aux)
  4776  		if v_0.Op != OpRISCV64ADDI {
  4777  			break
  4778  		}
  4779  		off2 := auxIntToInt64(v_0.AuxInt)
  4780  		base := v_0.Args[0]
  4781  		mem := v_1
  4782  		if !(is32Bit(int64(off1) + off2)) {
  4783  			break
  4784  		}
  4785  		v.reset(OpRISCV64MOVBstorezero)
  4786  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4787  		v.Aux = symToAux(sym)
  4788  		v.AddArg2(base, mem)
  4789  		return true
  4790  	}
  4791  	return false
  4792  }
  4793  func rewriteValueRISCV64_OpRISCV64MOVDload(v *Value) bool {
  4794  	v_1 := v.Args[1]
  4795  	v_0 := v.Args[0]
  4796  	b := v.Block
  4797  	config := b.Func.Config
  4798  	// match: (MOVDload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  4799  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  4800  	// result: (MOVDload [off1+off2] {mergeSym(sym1,sym2)} base mem)
  4801  	for {
  4802  		off1 := auxIntToInt32(v.AuxInt)
  4803  		sym1 := auxToSym(v.Aux)
  4804  		if v_0.Op != OpRISCV64MOVaddr {
  4805  			break
  4806  		}
  4807  		off2 := auxIntToInt32(v_0.AuxInt)
  4808  		sym2 := auxToSym(v_0.Aux)
  4809  		base := v_0.Args[0]
  4810  		mem := v_1
  4811  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4812  			break
  4813  		}
  4814  		v.reset(OpRISCV64MOVDload)
  4815  		v.AuxInt = int32ToAuxInt(off1 + off2)
  4816  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4817  		v.AddArg2(base, mem)
  4818  		return true
  4819  	}
  4820  	// match: (MOVDload [off1] {sym} (ADDI [off2] base) mem)
  4821  	// cond: is32Bit(int64(off1)+off2)
  4822  	// result: (MOVDload [off1+int32(off2)] {sym} base mem)
  4823  	for {
  4824  		off1 := auxIntToInt32(v.AuxInt)
  4825  		sym := auxToSym(v.Aux)
  4826  		if v_0.Op != OpRISCV64ADDI {
  4827  			break
  4828  		}
  4829  		off2 := auxIntToInt64(v_0.AuxInt)
  4830  		base := v_0.Args[0]
  4831  		mem := v_1
  4832  		if !(is32Bit(int64(off1) + off2)) {
  4833  			break
  4834  		}
  4835  		v.reset(OpRISCV64MOVDload)
  4836  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4837  		v.Aux = symToAux(sym)
  4838  		v.AddArg2(base, mem)
  4839  		return true
  4840  	}
  4841  	return false
  4842  }
  4843  func rewriteValueRISCV64_OpRISCV64MOVDnop(v *Value) bool {
  4844  	v_0 := v.Args[0]
  4845  	// match: (MOVDnop (MOVDconst [c]))
  4846  	// result: (MOVDconst [c])
  4847  	for {
  4848  		if v_0.Op != OpRISCV64MOVDconst {
  4849  			break
  4850  		}
  4851  		c := auxIntToInt64(v_0.AuxInt)
  4852  		v.reset(OpRISCV64MOVDconst)
  4853  		v.AuxInt = int64ToAuxInt(c)
  4854  		return true
  4855  	}
  4856  	return false
  4857  }
  4858  func rewriteValueRISCV64_OpRISCV64MOVDreg(v *Value) bool {
  4859  	v_0 := v.Args[0]
  4860  	// match: (MOVDreg x)
  4861  	// cond: x.Uses == 1
  4862  	// result: (MOVDnop x)
  4863  	for {
  4864  		x := v_0
  4865  		if !(x.Uses == 1) {
  4866  			break
  4867  		}
  4868  		v.reset(OpRISCV64MOVDnop)
  4869  		v.AddArg(x)
  4870  		return true
  4871  	}
  4872  	return false
  4873  }
  4874  func rewriteValueRISCV64_OpRISCV64MOVDstore(v *Value) bool {
  4875  	v_2 := v.Args[2]
  4876  	v_1 := v.Args[1]
  4877  	v_0 := v.Args[0]
  4878  	b := v.Block
  4879  	config := b.Func.Config
  4880  	// match: (MOVDstore [off1] {sym1} (MOVaddr [off2] {sym2} base) val mem)
  4881  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  4882  	// result: (MOVDstore [off1+off2] {mergeSym(sym1,sym2)} base val mem)
  4883  	for {
  4884  		off1 := auxIntToInt32(v.AuxInt)
  4885  		sym1 := auxToSym(v.Aux)
  4886  		if v_0.Op != OpRISCV64MOVaddr {
  4887  			break
  4888  		}
  4889  		off2 := auxIntToInt32(v_0.AuxInt)
  4890  		sym2 := auxToSym(v_0.Aux)
  4891  		base := v_0.Args[0]
  4892  		val := v_1
  4893  		mem := v_2
  4894  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4895  			break
  4896  		}
  4897  		v.reset(OpRISCV64MOVDstore)
  4898  		v.AuxInt = int32ToAuxInt(off1 + off2)
  4899  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4900  		v.AddArg3(base, val, mem)
  4901  		return true
  4902  	}
  4903  	// match: (MOVDstore [off1] {sym} (ADDI [off2] base) val mem)
  4904  	// cond: is32Bit(int64(off1)+off2)
  4905  	// result: (MOVDstore [off1+int32(off2)] {sym} base val mem)
  4906  	for {
  4907  		off1 := auxIntToInt32(v.AuxInt)
  4908  		sym := auxToSym(v.Aux)
  4909  		if v_0.Op != OpRISCV64ADDI {
  4910  			break
  4911  		}
  4912  		off2 := auxIntToInt64(v_0.AuxInt)
  4913  		base := v_0.Args[0]
  4914  		val := v_1
  4915  		mem := v_2
  4916  		if !(is32Bit(int64(off1) + off2)) {
  4917  			break
  4918  		}
  4919  		v.reset(OpRISCV64MOVDstore)
  4920  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4921  		v.Aux = symToAux(sym)
  4922  		v.AddArg3(base, val, mem)
  4923  		return true
  4924  	}
  4925  	// match: (MOVDstore [off] {sym} ptr (MOVDconst [0]) mem)
  4926  	// result: (MOVDstorezero [off] {sym} ptr mem)
  4927  	for {
  4928  		off := auxIntToInt32(v.AuxInt)
  4929  		sym := auxToSym(v.Aux)
  4930  		ptr := v_0
  4931  		if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
  4932  			break
  4933  		}
  4934  		mem := v_2
  4935  		v.reset(OpRISCV64MOVDstorezero)
  4936  		v.AuxInt = int32ToAuxInt(off)
  4937  		v.Aux = symToAux(sym)
  4938  		v.AddArg2(ptr, mem)
  4939  		return true
  4940  	}
  4941  	return false
  4942  }
  4943  func rewriteValueRISCV64_OpRISCV64MOVDstorezero(v *Value) bool {
  4944  	v_1 := v.Args[1]
  4945  	v_0 := v.Args[0]
  4946  	b := v.Block
  4947  	config := b.Func.Config
  4948  	// match: (MOVDstorezero [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  4949  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  4950  	// result: (MOVDstorezero [off1+off2] {mergeSym(sym1,sym2)} base mem)
  4951  	for {
  4952  		off1 := auxIntToInt32(v.AuxInt)
  4953  		sym1 := auxToSym(v.Aux)
  4954  		if v_0.Op != OpRISCV64MOVaddr {
  4955  			break
  4956  		}
  4957  		off2 := auxIntToInt32(v_0.AuxInt)
  4958  		sym2 := auxToSym(v_0.Aux)
  4959  		base := v_0.Args[0]
  4960  		mem := v_1
  4961  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4962  			break
  4963  		}
  4964  		v.reset(OpRISCV64MOVDstorezero)
  4965  		v.AuxInt = int32ToAuxInt(off1 + off2)
  4966  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4967  		v.AddArg2(base, mem)
  4968  		return true
  4969  	}
  4970  	// match: (MOVDstorezero [off1] {sym} (ADDI [off2] base) mem)
  4971  	// cond: is32Bit(int64(off1)+off2)
  4972  	// result: (MOVDstorezero [off1+int32(off2)] {sym} base mem)
  4973  	for {
  4974  		off1 := auxIntToInt32(v.AuxInt)
  4975  		sym := auxToSym(v.Aux)
  4976  		if v_0.Op != OpRISCV64ADDI {
  4977  			break
  4978  		}
  4979  		off2 := auxIntToInt64(v_0.AuxInt)
  4980  		base := v_0.Args[0]
  4981  		mem := v_1
  4982  		if !(is32Bit(int64(off1) + off2)) {
  4983  			break
  4984  		}
  4985  		v.reset(OpRISCV64MOVDstorezero)
  4986  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4987  		v.Aux = symToAux(sym)
  4988  		v.AddArg2(base, mem)
  4989  		return true
  4990  	}
  4991  	return false
  4992  }
  4993  func rewriteValueRISCV64_OpRISCV64MOVHUload(v *Value) bool {
  4994  	v_1 := v.Args[1]
  4995  	v_0 := v.Args[0]
  4996  	b := v.Block
  4997  	config := b.Func.Config
  4998  	// match: (MOVHUload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  4999  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  5000  	// result: (MOVHUload [off1+off2] {mergeSym(sym1,sym2)} base mem)
  5001  	for {
  5002  		off1 := auxIntToInt32(v.AuxInt)
  5003  		sym1 := auxToSym(v.Aux)
  5004  		if v_0.Op != OpRISCV64MOVaddr {
  5005  			break
  5006  		}
  5007  		off2 := auxIntToInt32(v_0.AuxInt)
  5008  		sym2 := auxToSym(v_0.Aux)
  5009  		base := v_0.Args[0]
  5010  		mem := v_1
  5011  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  5012  			break
  5013  		}
  5014  		v.reset(OpRISCV64MOVHUload)
  5015  		v.AuxInt = int32ToAuxInt(off1 + off2)
  5016  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5017  		v.AddArg2(base, mem)
  5018  		return true
  5019  	}
  5020  	// match: (MOVHUload [off1] {sym} (ADDI [off2] base) mem)
  5021  	// cond: is32Bit(int64(off1)+off2)
  5022  	// result: (MOVHUload [off1+int32(off2)] {sym} base mem)
  5023  	for {
  5024  		off1 := auxIntToInt32(v.AuxInt)
  5025  		sym := auxToSym(v.Aux)
  5026  		if v_0.Op != OpRISCV64ADDI {
  5027  			break
  5028  		}
  5029  		off2 := auxIntToInt64(v_0.AuxInt)
  5030  		base := v_0.Args[0]
  5031  		mem := v_1
  5032  		if !(is32Bit(int64(off1) + off2)) {
  5033  			break
  5034  		}
  5035  		v.reset(OpRISCV64MOVHUload)
  5036  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5037  		v.Aux = symToAux(sym)
  5038  		v.AddArg2(base, mem)
  5039  		return true
  5040  	}
  5041  	return false
  5042  }
  5043  func rewriteValueRISCV64_OpRISCV64MOVHUreg(v *Value) bool {
  5044  	v_0 := v.Args[0]
  5045  	b := v.Block
  5046  	// match: (MOVHUreg x:(ANDI [c] y))
  5047  	// cond: c >= 0 && int64(uint16(c)) == c
  5048  	// result: x
  5049  	for {
  5050  		x := v_0
  5051  		if x.Op != OpRISCV64ANDI {
  5052  			break
  5053  		}
  5054  		c := auxIntToInt64(x.AuxInt)
  5055  		if !(c >= 0 && int64(uint16(c)) == c) {
  5056  			break
  5057  		}
  5058  		v.copyOf(x)
  5059  		return true
  5060  	}
  5061  	// match: (MOVHUreg (ANDI [c] x))
  5062  	// cond: c < 0
  5063  	// result: (ANDI [int64(uint16(c))] x)
  5064  	for {
  5065  		if v_0.Op != OpRISCV64ANDI {
  5066  			break
  5067  		}
  5068  		c := auxIntToInt64(v_0.AuxInt)
  5069  		x := v_0.Args[0]
  5070  		if !(c < 0) {
  5071  			break
  5072  		}
  5073  		v.reset(OpRISCV64ANDI)
  5074  		v.AuxInt = int64ToAuxInt(int64(uint16(c)))
  5075  		v.AddArg(x)
  5076  		return true
  5077  	}
  5078  	// match: (MOVHUreg (MOVDconst [c]))
  5079  	// result: (MOVDconst [int64(uint16(c))])
  5080  	for {
  5081  		if v_0.Op != OpRISCV64MOVDconst {
  5082  			break
  5083  		}
  5084  		c := auxIntToInt64(v_0.AuxInt)
  5085  		v.reset(OpRISCV64MOVDconst)
  5086  		v.AuxInt = int64ToAuxInt(int64(uint16(c)))
  5087  		return true
  5088  	}
  5089  	// match: (MOVHUreg x:(MOVBUload _ _))
  5090  	// result: (MOVDreg x)
  5091  	for {
  5092  		x := v_0
  5093  		if x.Op != OpRISCV64MOVBUload {
  5094  			break
  5095  		}
  5096  		v.reset(OpRISCV64MOVDreg)
  5097  		v.AddArg(x)
  5098  		return true
  5099  	}
  5100  	// match: (MOVHUreg x:(MOVHUload _ _))
  5101  	// result: (MOVDreg x)
  5102  	for {
  5103  		x := v_0
  5104  		if x.Op != OpRISCV64MOVHUload {
  5105  			break
  5106  		}
  5107  		v.reset(OpRISCV64MOVDreg)
  5108  		v.AddArg(x)
  5109  		return true
  5110  	}
  5111  	// match: (MOVHUreg x:(MOVBUreg _))
  5112  	// result: (MOVDreg x)
  5113  	for {
  5114  		x := v_0
  5115  		if x.Op != OpRISCV64MOVBUreg {
  5116  			break
  5117  		}
  5118  		v.reset(OpRISCV64MOVDreg)
  5119  		v.AddArg(x)
  5120  		return true
  5121  	}
  5122  	// match: (MOVHUreg x:(MOVHUreg _))
  5123  	// result: (MOVDreg x)
  5124  	for {
  5125  		x := v_0
  5126  		if x.Op != OpRISCV64MOVHUreg {
  5127  			break
  5128  		}
  5129  		v.reset(OpRISCV64MOVDreg)
  5130  		v.AddArg(x)
  5131  		return true
  5132  	}
  5133  	// match: (MOVHUreg <t> x:(MOVHload [off] {sym} ptr mem))
  5134  	// cond: x.Uses == 1 && clobber(x)
  5135  	// result: @x.Block (MOVHUload <t> [off] {sym} ptr mem)
  5136  	for {
  5137  		t := v.Type
  5138  		x := v_0
  5139  		if x.Op != OpRISCV64MOVHload {
  5140  			break
  5141  		}
  5142  		off := auxIntToInt32(x.AuxInt)
  5143  		sym := auxToSym(x.Aux)
  5144  		mem := x.Args[1]
  5145  		ptr := x.Args[0]
  5146  		if !(x.Uses == 1 && clobber(x)) {
  5147  			break
  5148  		}
  5149  		b = x.Block
  5150  		v0 := b.NewValue0(x.Pos, OpRISCV64MOVHUload, t)
  5151  		v.copyOf(v0)
  5152  		v0.AuxInt = int32ToAuxInt(off)
  5153  		v0.Aux = symToAux(sym)
  5154  		v0.AddArg2(ptr, mem)
  5155  		return true
  5156  	}
  5157  	return false
  5158  }
  5159  func rewriteValueRISCV64_OpRISCV64MOVHload(v *Value) bool {
  5160  	v_1 := v.Args[1]
  5161  	v_0 := v.Args[0]
  5162  	b := v.Block
  5163  	config := b.Func.Config
  5164  	// match: (MOVHload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  5165  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  5166  	// result: (MOVHload [off1+off2] {mergeSym(sym1,sym2)} base mem)
  5167  	for {
  5168  		off1 := auxIntToInt32(v.AuxInt)
  5169  		sym1 := auxToSym(v.Aux)
  5170  		if v_0.Op != OpRISCV64MOVaddr {
  5171  			break
  5172  		}
  5173  		off2 := auxIntToInt32(v_0.AuxInt)
  5174  		sym2 := auxToSym(v_0.Aux)
  5175  		base := v_0.Args[0]
  5176  		mem := v_1
  5177  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  5178  			break
  5179  		}
  5180  		v.reset(OpRISCV64MOVHload)
  5181  		v.AuxInt = int32ToAuxInt(off1 + off2)
  5182  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5183  		v.AddArg2(base, mem)
  5184  		return true
  5185  	}
  5186  	// match: (MOVHload [off1] {sym} (ADDI [off2] base) mem)
  5187  	// cond: is32Bit(int64(off1)+off2)
  5188  	// result: (MOVHload [off1+int32(off2)] {sym} base mem)
  5189  	for {
  5190  		off1 := auxIntToInt32(v.AuxInt)
  5191  		sym := auxToSym(v.Aux)
  5192  		if v_0.Op != OpRISCV64ADDI {
  5193  			break
  5194  		}
  5195  		off2 := auxIntToInt64(v_0.AuxInt)
  5196  		base := v_0.Args[0]
  5197  		mem := v_1
  5198  		if !(is32Bit(int64(off1) + off2)) {
  5199  			break
  5200  		}
  5201  		v.reset(OpRISCV64MOVHload)
  5202  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5203  		v.Aux = symToAux(sym)
  5204  		v.AddArg2(base, mem)
  5205  		return true
  5206  	}
  5207  	return false
  5208  }
  5209  func rewriteValueRISCV64_OpRISCV64MOVHreg(v *Value) bool {
  5210  	v_0 := v.Args[0]
  5211  	b := v.Block
  5212  	// match: (MOVHreg x:(ANDI [c] y))
  5213  	// cond: c >= 0 && int64(int16(c)) == c
  5214  	// result: x
  5215  	for {
  5216  		x := v_0
  5217  		if x.Op != OpRISCV64ANDI {
  5218  			break
  5219  		}
  5220  		c := auxIntToInt64(x.AuxInt)
  5221  		if !(c >= 0 && int64(int16(c)) == c) {
  5222  			break
  5223  		}
  5224  		v.copyOf(x)
  5225  		return true
  5226  	}
  5227  	// match: (MOVHreg (MOVDconst [c]))
  5228  	// result: (MOVDconst [int64(int16(c))])
  5229  	for {
  5230  		if v_0.Op != OpRISCV64MOVDconst {
  5231  			break
  5232  		}
  5233  		c := auxIntToInt64(v_0.AuxInt)
  5234  		v.reset(OpRISCV64MOVDconst)
  5235  		v.AuxInt = int64ToAuxInt(int64(int16(c)))
  5236  		return true
  5237  	}
  5238  	// match: (MOVHreg x:(MOVBload _ _))
  5239  	// result: (MOVDreg x)
  5240  	for {
  5241  		x := v_0
  5242  		if x.Op != OpRISCV64MOVBload {
  5243  			break
  5244  		}
  5245  		v.reset(OpRISCV64MOVDreg)
  5246  		v.AddArg(x)
  5247  		return true
  5248  	}
  5249  	// match: (MOVHreg x:(MOVBUload _ _))
  5250  	// result: (MOVDreg x)
  5251  	for {
  5252  		x := v_0
  5253  		if x.Op != OpRISCV64MOVBUload {
  5254  			break
  5255  		}
  5256  		v.reset(OpRISCV64MOVDreg)
  5257  		v.AddArg(x)
  5258  		return true
  5259  	}
  5260  	// match: (MOVHreg x:(MOVHload _ _))
  5261  	// result: (MOVDreg x)
  5262  	for {
  5263  		x := v_0
  5264  		if x.Op != OpRISCV64MOVHload {
  5265  			break
  5266  		}
  5267  		v.reset(OpRISCV64MOVDreg)
  5268  		v.AddArg(x)
  5269  		return true
  5270  	}
  5271  	// match: (MOVHreg x:(MOVBreg _))
  5272  	// result: (MOVDreg x)
  5273  	for {
  5274  		x := v_0
  5275  		if x.Op != OpRISCV64MOVBreg {
  5276  			break
  5277  		}
  5278  		v.reset(OpRISCV64MOVDreg)
  5279  		v.AddArg(x)
  5280  		return true
  5281  	}
  5282  	// match: (MOVHreg x:(MOVBUreg _))
  5283  	// result: (MOVDreg x)
  5284  	for {
  5285  		x := v_0
  5286  		if x.Op != OpRISCV64MOVBUreg {
  5287  			break
  5288  		}
  5289  		v.reset(OpRISCV64MOVDreg)
  5290  		v.AddArg(x)
  5291  		return true
  5292  	}
  5293  	// match: (MOVHreg x:(MOVHreg _))
  5294  	// result: (MOVDreg x)
  5295  	for {
  5296  		x := v_0
  5297  		if x.Op != OpRISCV64MOVHreg {
  5298  			break
  5299  		}
  5300  		v.reset(OpRISCV64MOVDreg)
  5301  		v.AddArg(x)
  5302  		return true
  5303  	}
  5304  	// match: (MOVHreg <t> x:(MOVHUload [off] {sym} ptr mem))
  5305  	// cond: x.Uses == 1 && clobber(x)
  5306  	// result: @x.Block (MOVHload <t> [off] {sym} ptr mem)
  5307  	for {
  5308  		t := v.Type
  5309  		x := v_0
  5310  		if x.Op != OpRISCV64MOVHUload {
  5311  			break
  5312  		}
  5313  		off := auxIntToInt32(x.AuxInt)
  5314  		sym := auxToSym(x.Aux)
  5315  		mem := x.Args[1]
  5316  		ptr := x.Args[0]
  5317  		if !(x.Uses == 1 && clobber(x)) {
  5318  			break
  5319  		}
  5320  		b = x.Block
  5321  		v0 := b.NewValue0(x.Pos, OpRISCV64MOVHload, t)
  5322  		v.copyOf(v0)
  5323  		v0.AuxInt = int32ToAuxInt(off)
  5324  		v0.Aux = symToAux(sym)
  5325  		v0.AddArg2(ptr, mem)
  5326  		return true
  5327  	}
  5328  	return false
  5329  }
  5330  func rewriteValueRISCV64_OpRISCV64MOVHstore(v *Value) bool {
  5331  	v_2 := v.Args[2]
  5332  	v_1 := v.Args[1]
  5333  	v_0 := v.Args[0]
  5334  	b := v.Block
  5335  	config := b.Func.Config
  5336  	// match: (MOVHstore [off1] {sym1} (MOVaddr [off2] {sym2} base) val mem)
  5337  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  5338  	// result: (MOVHstore [off1+off2] {mergeSym(sym1,sym2)} base val mem)
  5339  	for {
  5340  		off1 := auxIntToInt32(v.AuxInt)
  5341  		sym1 := auxToSym(v.Aux)
  5342  		if v_0.Op != OpRISCV64MOVaddr {
  5343  			break
  5344  		}
  5345  		off2 := auxIntToInt32(v_0.AuxInt)
  5346  		sym2 := auxToSym(v_0.Aux)
  5347  		base := v_0.Args[0]
  5348  		val := v_1
  5349  		mem := v_2
  5350  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  5351  			break
  5352  		}
  5353  		v.reset(OpRISCV64MOVHstore)
  5354  		v.AuxInt = int32ToAuxInt(off1 + off2)
  5355  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5356  		v.AddArg3(base, val, mem)
  5357  		return true
  5358  	}
  5359  	// match: (MOVHstore [off1] {sym} (ADDI [off2] base) val mem)
  5360  	// cond: is32Bit(int64(off1)+off2)
  5361  	// result: (MOVHstore [off1+int32(off2)] {sym} base val mem)
  5362  	for {
  5363  		off1 := auxIntToInt32(v.AuxInt)
  5364  		sym := auxToSym(v.Aux)
  5365  		if v_0.Op != OpRISCV64ADDI {
  5366  			break
  5367  		}
  5368  		off2 := auxIntToInt64(v_0.AuxInt)
  5369  		base := v_0.Args[0]
  5370  		val := v_1
  5371  		mem := v_2
  5372  		if !(is32Bit(int64(off1) + off2)) {
  5373  			break
  5374  		}
  5375  		v.reset(OpRISCV64MOVHstore)
  5376  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5377  		v.Aux = symToAux(sym)
  5378  		v.AddArg3(base, val, mem)
  5379  		return true
  5380  	}
  5381  	// match: (MOVHstore [off] {sym} ptr (MOVDconst [0]) mem)
  5382  	// result: (MOVHstorezero [off] {sym} ptr mem)
  5383  	for {
  5384  		off := auxIntToInt32(v.AuxInt)
  5385  		sym := auxToSym(v.Aux)
  5386  		ptr := v_0
  5387  		if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
  5388  			break
  5389  		}
  5390  		mem := v_2
  5391  		v.reset(OpRISCV64MOVHstorezero)
  5392  		v.AuxInt = int32ToAuxInt(off)
  5393  		v.Aux = symToAux(sym)
  5394  		v.AddArg2(ptr, mem)
  5395  		return true
  5396  	}
  5397  	// match: (MOVHstore [off] {sym} ptr (MOVHreg x) mem)
  5398  	// result: (MOVHstore [off] {sym} ptr x mem)
  5399  	for {
  5400  		off := auxIntToInt32(v.AuxInt)
  5401  		sym := auxToSym(v.Aux)
  5402  		ptr := v_0
  5403  		if v_1.Op != OpRISCV64MOVHreg {
  5404  			break
  5405  		}
  5406  		x := v_1.Args[0]
  5407  		mem := v_2
  5408  		v.reset(OpRISCV64MOVHstore)
  5409  		v.AuxInt = int32ToAuxInt(off)
  5410  		v.Aux = symToAux(sym)
  5411  		v.AddArg3(ptr, x, mem)
  5412  		return true
  5413  	}
  5414  	// match: (MOVHstore [off] {sym} ptr (MOVWreg x) mem)
  5415  	// result: (MOVHstore [off] {sym} ptr x mem)
  5416  	for {
  5417  		off := auxIntToInt32(v.AuxInt)
  5418  		sym := auxToSym(v.Aux)
  5419  		ptr := v_0
  5420  		if v_1.Op != OpRISCV64MOVWreg {
  5421  			break
  5422  		}
  5423  		x := v_1.Args[0]
  5424  		mem := v_2
  5425  		v.reset(OpRISCV64MOVHstore)
  5426  		v.AuxInt = int32ToAuxInt(off)
  5427  		v.Aux = symToAux(sym)
  5428  		v.AddArg3(ptr, x, mem)
  5429  		return true
  5430  	}
  5431  	// match: (MOVHstore [off] {sym} ptr (MOVHUreg x) mem)
  5432  	// result: (MOVHstore [off] {sym} ptr x mem)
  5433  	for {
  5434  		off := auxIntToInt32(v.AuxInt)
  5435  		sym := auxToSym(v.Aux)
  5436  		ptr := v_0
  5437  		if v_1.Op != OpRISCV64MOVHUreg {
  5438  			break
  5439  		}
  5440  		x := v_1.Args[0]
  5441  		mem := v_2
  5442  		v.reset(OpRISCV64MOVHstore)
  5443  		v.AuxInt = int32ToAuxInt(off)
  5444  		v.Aux = symToAux(sym)
  5445  		v.AddArg3(ptr, x, mem)
  5446  		return true
  5447  	}
  5448  	// match: (MOVHstore [off] {sym} ptr (MOVWUreg x) mem)
  5449  	// result: (MOVHstore [off] {sym} ptr x mem)
  5450  	for {
  5451  		off := auxIntToInt32(v.AuxInt)
  5452  		sym := auxToSym(v.Aux)
  5453  		ptr := v_0
  5454  		if v_1.Op != OpRISCV64MOVWUreg {
  5455  			break
  5456  		}
  5457  		x := v_1.Args[0]
  5458  		mem := v_2
  5459  		v.reset(OpRISCV64MOVHstore)
  5460  		v.AuxInt = int32ToAuxInt(off)
  5461  		v.Aux = symToAux(sym)
  5462  		v.AddArg3(ptr, x, mem)
  5463  		return true
  5464  	}
  5465  	return false
  5466  }
  5467  func rewriteValueRISCV64_OpRISCV64MOVHstorezero(v *Value) bool {
  5468  	v_1 := v.Args[1]
  5469  	v_0 := v.Args[0]
  5470  	b := v.Block
  5471  	config := b.Func.Config
  5472  	// match: (MOVHstorezero [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  5473  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  5474  	// result: (MOVHstorezero [off1+off2] {mergeSym(sym1,sym2)} base mem)
  5475  	for {
  5476  		off1 := auxIntToInt32(v.AuxInt)
  5477  		sym1 := auxToSym(v.Aux)
  5478  		if v_0.Op != OpRISCV64MOVaddr {
  5479  			break
  5480  		}
  5481  		off2 := auxIntToInt32(v_0.AuxInt)
  5482  		sym2 := auxToSym(v_0.Aux)
  5483  		base := v_0.Args[0]
  5484  		mem := v_1
  5485  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  5486  			break
  5487  		}
  5488  		v.reset(OpRISCV64MOVHstorezero)
  5489  		v.AuxInt = int32ToAuxInt(off1 + off2)
  5490  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5491  		v.AddArg2(base, mem)
  5492  		return true
  5493  	}
  5494  	// match: (MOVHstorezero [off1] {sym} (ADDI [off2] base) mem)
  5495  	// cond: is32Bit(int64(off1)+off2)
  5496  	// result: (MOVHstorezero [off1+int32(off2)] {sym} base mem)
  5497  	for {
  5498  		off1 := auxIntToInt32(v.AuxInt)
  5499  		sym := auxToSym(v.Aux)
  5500  		if v_0.Op != OpRISCV64ADDI {
  5501  			break
  5502  		}
  5503  		off2 := auxIntToInt64(v_0.AuxInt)
  5504  		base := v_0.Args[0]
  5505  		mem := v_1
  5506  		if !(is32Bit(int64(off1) + off2)) {
  5507  			break
  5508  		}
  5509  		v.reset(OpRISCV64MOVHstorezero)
  5510  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5511  		v.Aux = symToAux(sym)
  5512  		v.AddArg2(base, mem)
  5513  		return true
  5514  	}
  5515  	return false
  5516  }
  5517  func rewriteValueRISCV64_OpRISCV64MOVWUload(v *Value) bool {
  5518  	v_1 := v.Args[1]
  5519  	v_0 := v.Args[0]
  5520  	b := v.Block
  5521  	config := b.Func.Config
  5522  	// match: (MOVWUload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  5523  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  5524  	// result: (MOVWUload [off1+off2] {mergeSym(sym1,sym2)} base mem)
  5525  	for {
  5526  		off1 := auxIntToInt32(v.AuxInt)
  5527  		sym1 := auxToSym(v.Aux)
  5528  		if v_0.Op != OpRISCV64MOVaddr {
  5529  			break
  5530  		}
  5531  		off2 := auxIntToInt32(v_0.AuxInt)
  5532  		sym2 := auxToSym(v_0.Aux)
  5533  		base := v_0.Args[0]
  5534  		mem := v_1
  5535  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  5536  			break
  5537  		}
  5538  		v.reset(OpRISCV64MOVWUload)
  5539  		v.AuxInt = int32ToAuxInt(off1 + off2)
  5540  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5541  		v.AddArg2(base, mem)
  5542  		return true
  5543  	}
  5544  	// match: (MOVWUload [off1] {sym} (ADDI [off2] base) mem)
  5545  	// cond: is32Bit(int64(off1)+off2)
  5546  	// result: (MOVWUload [off1+int32(off2)] {sym} base mem)
  5547  	for {
  5548  		off1 := auxIntToInt32(v.AuxInt)
  5549  		sym := auxToSym(v.Aux)
  5550  		if v_0.Op != OpRISCV64ADDI {
  5551  			break
  5552  		}
  5553  		off2 := auxIntToInt64(v_0.AuxInt)
  5554  		base := v_0.Args[0]
  5555  		mem := v_1
  5556  		if !(is32Bit(int64(off1) + off2)) {
  5557  			break
  5558  		}
  5559  		v.reset(OpRISCV64MOVWUload)
  5560  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5561  		v.Aux = symToAux(sym)
  5562  		v.AddArg2(base, mem)
  5563  		return true
  5564  	}
  5565  	return false
  5566  }
  5567  func rewriteValueRISCV64_OpRISCV64MOVWUreg(v *Value) bool {
  5568  	v_0 := v.Args[0]
  5569  	b := v.Block
  5570  	typ := &b.Func.Config.Types
  5571  	// match: (MOVWUreg x:(ANDI [c] y))
  5572  	// cond: c >= 0 && int64(uint32(c)) == c
  5573  	// result: x
  5574  	for {
  5575  		x := v_0
  5576  		if x.Op != OpRISCV64ANDI {
  5577  			break
  5578  		}
  5579  		c := auxIntToInt64(x.AuxInt)
  5580  		if !(c >= 0 && int64(uint32(c)) == c) {
  5581  			break
  5582  		}
  5583  		v.copyOf(x)
  5584  		return true
  5585  	}
  5586  	// match: (MOVWUreg (ANDI [c] x))
  5587  	// cond: c < 0
  5588  	// result: (AND (MOVDconst [int64(uint32(c))]) x)
  5589  	for {
  5590  		if v_0.Op != OpRISCV64ANDI {
  5591  			break
  5592  		}
  5593  		c := auxIntToInt64(v_0.AuxInt)
  5594  		x := v_0.Args[0]
  5595  		if !(c < 0) {
  5596  			break
  5597  		}
  5598  		v.reset(OpRISCV64AND)
  5599  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  5600  		v0.AuxInt = int64ToAuxInt(int64(uint32(c)))
  5601  		v.AddArg2(v0, x)
  5602  		return true
  5603  	}
  5604  	// match: (MOVWUreg (MOVDconst [c]))
  5605  	// result: (MOVDconst [int64(uint32(c))])
  5606  	for {
  5607  		if v_0.Op != OpRISCV64MOVDconst {
  5608  			break
  5609  		}
  5610  		c := auxIntToInt64(v_0.AuxInt)
  5611  		v.reset(OpRISCV64MOVDconst)
  5612  		v.AuxInt = int64ToAuxInt(int64(uint32(c)))
  5613  		return true
  5614  	}
  5615  	// match: (MOVWUreg x:(MOVBUload _ _))
  5616  	// result: (MOVDreg x)
  5617  	for {
  5618  		x := v_0
  5619  		if x.Op != OpRISCV64MOVBUload {
  5620  			break
  5621  		}
  5622  		v.reset(OpRISCV64MOVDreg)
  5623  		v.AddArg(x)
  5624  		return true
  5625  	}
  5626  	// match: (MOVWUreg x:(MOVHUload _ _))
  5627  	// result: (MOVDreg x)
  5628  	for {
  5629  		x := v_0
  5630  		if x.Op != OpRISCV64MOVHUload {
  5631  			break
  5632  		}
  5633  		v.reset(OpRISCV64MOVDreg)
  5634  		v.AddArg(x)
  5635  		return true
  5636  	}
  5637  	// match: (MOVWUreg x:(MOVWUload _ _))
  5638  	// result: (MOVDreg x)
  5639  	for {
  5640  		x := v_0
  5641  		if x.Op != OpRISCV64MOVWUload {
  5642  			break
  5643  		}
  5644  		v.reset(OpRISCV64MOVDreg)
  5645  		v.AddArg(x)
  5646  		return true
  5647  	}
  5648  	// match: (MOVWUreg x:(MOVBUreg _))
  5649  	// result: (MOVDreg x)
  5650  	for {
  5651  		x := v_0
  5652  		if x.Op != OpRISCV64MOVBUreg {
  5653  			break
  5654  		}
  5655  		v.reset(OpRISCV64MOVDreg)
  5656  		v.AddArg(x)
  5657  		return true
  5658  	}
  5659  	// match: (MOVWUreg x:(MOVHUreg _))
  5660  	// result: (MOVDreg x)
  5661  	for {
  5662  		x := v_0
  5663  		if x.Op != OpRISCV64MOVHUreg {
  5664  			break
  5665  		}
  5666  		v.reset(OpRISCV64MOVDreg)
  5667  		v.AddArg(x)
  5668  		return true
  5669  	}
  5670  	// match: (MOVWUreg x:(MOVWUreg _))
  5671  	// result: (MOVDreg x)
  5672  	for {
  5673  		x := v_0
  5674  		if x.Op != OpRISCV64MOVWUreg {
  5675  			break
  5676  		}
  5677  		v.reset(OpRISCV64MOVDreg)
  5678  		v.AddArg(x)
  5679  		return true
  5680  	}
  5681  	// match: (MOVWUreg <t> x:(MOVWload [off] {sym} ptr mem))
  5682  	// cond: x.Uses == 1 && clobber(x)
  5683  	// result: @x.Block (MOVWUload <t> [off] {sym} ptr mem)
  5684  	for {
  5685  		t := v.Type
  5686  		x := v_0
  5687  		if x.Op != OpRISCV64MOVWload {
  5688  			break
  5689  		}
  5690  		off := auxIntToInt32(x.AuxInt)
  5691  		sym := auxToSym(x.Aux)
  5692  		mem := x.Args[1]
  5693  		ptr := x.Args[0]
  5694  		if !(x.Uses == 1 && clobber(x)) {
  5695  			break
  5696  		}
  5697  		b = x.Block
  5698  		v0 := b.NewValue0(x.Pos, OpRISCV64MOVWUload, t)
  5699  		v.copyOf(v0)
  5700  		v0.AuxInt = int32ToAuxInt(off)
  5701  		v0.Aux = symToAux(sym)
  5702  		v0.AddArg2(ptr, mem)
  5703  		return true
  5704  	}
  5705  	return false
  5706  }
  5707  func rewriteValueRISCV64_OpRISCV64MOVWload(v *Value) bool {
  5708  	v_1 := v.Args[1]
  5709  	v_0 := v.Args[0]
  5710  	b := v.Block
  5711  	config := b.Func.Config
  5712  	// match: (MOVWload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  5713  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  5714  	// result: (MOVWload [off1+off2] {mergeSym(sym1,sym2)} base mem)
  5715  	for {
  5716  		off1 := auxIntToInt32(v.AuxInt)
  5717  		sym1 := auxToSym(v.Aux)
  5718  		if v_0.Op != OpRISCV64MOVaddr {
  5719  			break
  5720  		}
  5721  		off2 := auxIntToInt32(v_0.AuxInt)
  5722  		sym2 := auxToSym(v_0.Aux)
  5723  		base := v_0.Args[0]
  5724  		mem := v_1
  5725  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  5726  			break
  5727  		}
  5728  		v.reset(OpRISCV64MOVWload)
  5729  		v.AuxInt = int32ToAuxInt(off1 + off2)
  5730  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5731  		v.AddArg2(base, mem)
  5732  		return true
  5733  	}
  5734  	// match: (MOVWload [off1] {sym} (ADDI [off2] base) mem)
  5735  	// cond: is32Bit(int64(off1)+off2)
  5736  	// result: (MOVWload [off1+int32(off2)] {sym} base mem)
  5737  	for {
  5738  		off1 := auxIntToInt32(v.AuxInt)
  5739  		sym := auxToSym(v.Aux)
  5740  		if v_0.Op != OpRISCV64ADDI {
  5741  			break
  5742  		}
  5743  		off2 := auxIntToInt64(v_0.AuxInt)
  5744  		base := v_0.Args[0]
  5745  		mem := v_1
  5746  		if !(is32Bit(int64(off1) + off2)) {
  5747  			break
  5748  		}
  5749  		v.reset(OpRISCV64MOVWload)
  5750  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5751  		v.Aux = symToAux(sym)
  5752  		v.AddArg2(base, mem)
  5753  		return true
  5754  	}
  5755  	return false
  5756  }
  5757  func rewriteValueRISCV64_OpRISCV64MOVWreg(v *Value) bool {
  5758  	v_0 := v.Args[0]
  5759  	b := v.Block
  5760  	// match: (MOVWreg x:(ANDI [c] y))
  5761  	// cond: c >= 0 && int64(int32(c)) == c
  5762  	// result: x
  5763  	for {
  5764  		x := v_0
  5765  		if x.Op != OpRISCV64ANDI {
  5766  			break
  5767  		}
  5768  		c := auxIntToInt64(x.AuxInt)
  5769  		if !(c >= 0 && int64(int32(c)) == c) {
  5770  			break
  5771  		}
  5772  		v.copyOf(x)
  5773  		return true
  5774  	}
  5775  	// match: (MOVWreg (NEG x))
  5776  	// result: (NEGW x)
  5777  	for {
  5778  		if v_0.Op != OpRISCV64NEG {
  5779  			break
  5780  		}
  5781  		x := v_0.Args[0]
  5782  		v.reset(OpRISCV64NEGW)
  5783  		v.AddArg(x)
  5784  		return true
  5785  	}
  5786  	// match: (MOVWreg (MOVDconst [c]))
  5787  	// result: (MOVDconst [int64(int32(c))])
  5788  	for {
  5789  		if v_0.Op != OpRISCV64MOVDconst {
  5790  			break
  5791  		}
  5792  		c := auxIntToInt64(v_0.AuxInt)
  5793  		v.reset(OpRISCV64MOVDconst)
  5794  		v.AuxInt = int64ToAuxInt(int64(int32(c)))
  5795  		return true
  5796  	}
  5797  	// match: (MOVWreg x:(MOVBload _ _))
  5798  	// result: (MOVDreg x)
  5799  	for {
  5800  		x := v_0
  5801  		if x.Op != OpRISCV64MOVBload {
  5802  			break
  5803  		}
  5804  		v.reset(OpRISCV64MOVDreg)
  5805  		v.AddArg(x)
  5806  		return true
  5807  	}
  5808  	// match: (MOVWreg x:(MOVBUload _ _))
  5809  	// result: (MOVDreg x)
  5810  	for {
  5811  		x := v_0
  5812  		if x.Op != OpRISCV64MOVBUload {
  5813  			break
  5814  		}
  5815  		v.reset(OpRISCV64MOVDreg)
  5816  		v.AddArg(x)
  5817  		return true
  5818  	}
  5819  	// match: (MOVWreg x:(MOVHload _ _))
  5820  	// result: (MOVDreg x)
  5821  	for {
  5822  		x := v_0
  5823  		if x.Op != OpRISCV64MOVHload {
  5824  			break
  5825  		}
  5826  		v.reset(OpRISCV64MOVDreg)
  5827  		v.AddArg(x)
  5828  		return true
  5829  	}
  5830  	// match: (MOVWreg x:(MOVHUload _ _))
  5831  	// result: (MOVDreg x)
  5832  	for {
  5833  		x := v_0
  5834  		if x.Op != OpRISCV64MOVHUload {
  5835  			break
  5836  		}
  5837  		v.reset(OpRISCV64MOVDreg)
  5838  		v.AddArg(x)
  5839  		return true
  5840  	}
  5841  	// match: (MOVWreg x:(MOVWload _ _))
  5842  	// result: (MOVDreg x)
  5843  	for {
  5844  		x := v_0
  5845  		if x.Op != OpRISCV64MOVWload {
  5846  			break
  5847  		}
  5848  		v.reset(OpRISCV64MOVDreg)
  5849  		v.AddArg(x)
  5850  		return true
  5851  	}
  5852  	// match: (MOVWreg x:(ADDIW _))
  5853  	// result: (MOVDreg x)
  5854  	for {
  5855  		x := v_0
  5856  		if x.Op != OpRISCV64ADDIW {
  5857  			break
  5858  		}
  5859  		v.reset(OpRISCV64MOVDreg)
  5860  		v.AddArg(x)
  5861  		return true
  5862  	}
  5863  	// match: (MOVWreg x:(SUBW _ _))
  5864  	// result: (MOVDreg x)
  5865  	for {
  5866  		x := v_0
  5867  		if x.Op != OpRISCV64SUBW {
  5868  			break
  5869  		}
  5870  		v.reset(OpRISCV64MOVDreg)
  5871  		v.AddArg(x)
  5872  		return true
  5873  	}
  5874  	// match: (MOVWreg x:(NEGW _))
  5875  	// result: (MOVDreg x)
  5876  	for {
  5877  		x := v_0
  5878  		if x.Op != OpRISCV64NEGW {
  5879  			break
  5880  		}
  5881  		v.reset(OpRISCV64MOVDreg)
  5882  		v.AddArg(x)
  5883  		return true
  5884  	}
  5885  	// match: (MOVWreg x:(MULW _ _))
  5886  	// result: (MOVDreg x)
  5887  	for {
  5888  		x := v_0
  5889  		if x.Op != OpRISCV64MULW {
  5890  			break
  5891  		}
  5892  		v.reset(OpRISCV64MOVDreg)
  5893  		v.AddArg(x)
  5894  		return true
  5895  	}
  5896  	// match: (MOVWreg x:(DIVW _ _))
  5897  	// result: (MOVDreg x)
  5898  	for {
  5899  		x := v_0
  5900  		if x.Op != OpRISCV64DIVW {
  5901  			break
  5902  		}
  5903  		v.reset(OpRISCV64MOVDreg)
  5904  		v.AddArg(x)
  5905  		return true
  5906  	}
  5907  	// match: (MOVWreg x:(DIVUW _ _))
  5908  	// result: (MOVDreg x)
  5909  	for {
  5910  		x := v_0
  5911  		if x.Op != OpRISCV64DIVUW {
  5912  			break
  5913  		}
  5914  		v.reset(OpRISCV64MOVDreg)
  5915  		v.AddArg(x)
  5916  		return true
  5917  	}
  5918  	// match: (MOVWreg x:(REMW _ _))
  5919  	// result: (MOVDreg x)
  5920  	for {
  5921  		x := v_0
  5922  		if x.Op != OpRISCV64REMW {
  5923  			break
  5924  		}
  5925  		v.reset(OpRISCV64MOVDreg)
  5926  		v.AddArg(x)
  5927  		return true
  5928  	}
  5929  	// match: (MOVWreg x:(REMUW _ _))
  5930  	// result: (MOVDreg x)
  5931  	for {
  5932  		x := v_0
  5933  		if x.Op != OpRISCV64REMUW {
  5934  			break
  5935  		}
  5936  		v.reset(OpRISCV64MOVDreg)
  5937  		v.AddArg(x)
  5938  		return true
  5939  	}
  5940  	// match: (MOVWreg x:(ROLW _ _))
  5941  	// result: (MOVDreg x)
  5942  	for {
  5943  		x := v_0
  5944  		if x.Op != OpRISCV64ROLW {
  5945  			break
  5946  		}
  5947  		v.reset(OpRISCV64MOVDreg)
  5948  		v.AddArg(x)
  5949  		return true
  5950  	}
  5951  	// match: (MOVWreg x:(RORW _ _))
  5952  	// result: (MOVDreg x)
  5953  	for {
  5954  		x := v_0
  5955  		if x.Op != OpRISCV64RORW {
  5956  			break
  5957  		}
  5958  		v.reset(OpRISCV64MOVDreg)
  5959  		v.AddArg(x)
  5960  		return true
  5961  	}
  5962  	// match: (MOVWreg x:(RORIW _))
  5963  	// result: (MOVDreg x)
  5964  	for {
  5965  		x := v_0
  5966  		if x.Op != OpRISCV64RORIW {
  5967  			break
  5968  		}
  5969  		v.reset(OpRISCV64MOVDreg)
  5970  		v.AddArg(x)
  5971  		return true
  5972  	}
  5973  	// match: (MOVWreg x:(MOVBreg _))
  5974  	// result: (MOVDreg x)
  5975  	for {
  5976  		x := v_0
  5977  		if x.Op != OpRISCV64MOVBreg {
  5978  			break
  5979  		}
  5980  		v.reset(OpRISCV64MOVDreg)
  5981  		v.AddArg(x)
  5982  		return true
  5983  	}
  5984  	// match: (MOVWreg x:(MOVBUreg _))
  5985  	// result: (MOVDreg x)
  5986  	for {
  5987  		x := v_0
  5988  		if x.Op != OpRISCV64MOVBUreg {
  5989  			break
  5990  		}
  5991  		v.reset(OpRISCV64MOVDreg)
  5992  		v.AddArg(x)
  5993  		return true
  5994  	}
  5995  	// match: (MOVWreg x:(MOVHreg _))
  5996  	// result: (MOVDreg x)
  5997  	for {
  5998  		x := v_0
  5999  		if x.Op != OpRISCV64MOVHreg {
  6000  			break
  6001  		}
  6002  		v.reset(OpRISCV64MOVDreg)
  6003  		v.AddArg(x)
  6004  		return true
  6005  	}
  6006  	// match: (MOVWreg x:(MOVWreg _))
  6007  	// result: (MOVDreg x)
  6008  	for {
  6009  		x := v_0
  6010  		if x.Op != OpRISCV64MOVWreg {
  6011  			break
  6012  		}
  6013  		v.reset(OpRISCV64MOVDreg)
  6014  		v.AddArg(x)
  6015  		return true
  6016  	}
  6017  	// match: (MOVWreg <t> x:(MOVWUload [off] {sym} ptr mem))
  6018  	// cond: x.Uses == 1 && clobber(x)
  6019  	// result: @x.Block (MOVWload <t> [off] {sym} ptr mem)
  6020  	for {
  6021  		t := v.Type
  6022  		x := v_0
  6023  		if x.Op != OpRISCV64MOVWUload {
  6024  			break
  6025  		}
  6026  		off := auxIntToInt32(x.AuxInt)
  6027  		sym := auxToSym(x.Aux)
  6028  		mem := x.Args[1]
  6029  		ptr := x.Args[0]
  6030  		if !(x.Uses == 1 && clobber(x)) {
  6031  			break
  6032  		}
  6033  		b = x.Block
  6034  		v0 := b.NewValue0(x.Pos, OpRISCV64MOVWload, t)
  6035  		v.copyOf(v0)
  6036  		v0.AuxInt = int32ToAuxInt(off)
  6037  		v0.Aux = symToAux(sym)
  6038  		v0.AddArg2(ptr, mem)
  6039  		return true
  6040  	}
  6041  	return false
  6042  }
  6043  func rewriteValueRISCV64_OpRISCV64MOVWstore(v *Value) bool {
  6044  	v_2 := v.Args[2]
  6045  	v_1 := v.Args[1]
  6046  	v_0 := v.Args[0]
  6047  	b := v.Block
  6048  	config := b.Func.Config
  6049  	// match: (MOVWstore [off1] {sym1} (MOVaddr [off2] {sym2} base) val mem)
  6050  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  6051  	// result: (MOVWstore [off1+off2] {mergeSym(sym1,sym2)} base val mem)
  6052  	for {
  6053  		off1 := auxIntToInt32(v.AuxInt)
  6054  		sym1 := auxToSym(v.Aux)
  6055  		if v_0.Op != OpRISCV64MOVaddr {
  6056  			break
  6057  		}
  6058  		off2 := auxIntToInt32(v_0.AuxInt)
  6059  		sym2 := auxToSym(v_0.Aux)
  6060  		base := v_0.Args[0]
  6061  		val := v_1
  6062  		mem := v_2
  6063  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  6064  			break
  6065  		}
  6066  		v.reset(OpRISCV64MOVWstore)
  6067  		v.AuxInt = int32ToAuxInt(off1 + off2)
  6068  		v.Aux = symToAux(mergeSym(sym1, sym2))
  6069  		v.AddArg3(base, val, mem)
  6070  		return true
  6071  	}
  6072  	// match: (MOVWstore [off1] {sym} (ADDI [off2] base) val mem)
  6073  	// cond: is32Bit(int64(off1)+off2)
  6074  	// result: (MOVWstore [off1+int32(off2)] {sym} base val mem)
  6075  	for {
  6076  		off1 := auxIntToInt32(v.AuxInt)
  6077  		sym := auxToSym(v.Aux)
  6078  		if v_0.Op != OpRISCV64ADDI {
  6079  			break
  6080  		}
  6081  		off2 := auxIntToInt64(v_0.AuxInt)
  6082  		base := v_0.Args[0]
  6083  		val := v_1
  6084  		mem := v_2
  6085  		if !(is32Bit(int64(off1) + off2)) {
  6086  			break
  6087  		}
  6088  		v.reset(OpRISCV64MOVWstore)
  6089  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  6090  		v.Aux = symToAux(sym)
  6091  		v.AddArg3(base, val, mem)
  6092  		return true
  6093  	}
  6094  	// match: (MOVWstore [off] {sym} ptr (MOVDconst [0]) mem)
  6095  	// result: (MOVWstorezero [off] {sym} ptr mem)
  6096  	for {
  6097  		off := auxIntToInt32(v.AuxInt)
  6098  		sym := auxToSym(v.Aux)
  6099  		ptr := v_0
  6100  		if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
  6101  			break
  6102  		}
  6103  		mem := v_2
  6104  		v.reset(OpRISCV64MOVWstorezero)
  6105  		v.AuxInt = int32ToAuxInt(off)
  6106  		v.Aux = symToAux(sym)
  6107  		v.AddArg2(ptr, mem)
  6108  		return true
  6109  	}
  6110  	// match: (MOVWstore [off] {sym} ptr (MOVWreg x) mem)
  6111  	// result: (MOVWstore [off] {sym} ptr x mem)
  6112  	for {
  6113  		off := auxIntToInt32(v.AuxInt)
  6114  		sym := auxToSym(v.Aux)
  6115  		ptr := v_0
  6116  		if v_1.Op != OpRISCV64MOVWreg {
  6117  			break
  6118  		}
  6119  		x := v_1.Args[0]
  6120  		mem := v_2
  6121  		v.reset(OpRISCV64MOVWstore)
  6122  		v.AuxInt = int32ToAuxInt(off)
  6123  		v.Aux = symToAux(sym)
  6124  		v.AddArg3(ptr, x, mem)
  6125  		return true
  6126  	}
  6127  	// match: (MOVWstore [off] {sym} ptr (MOVWUreg x) mem)
  6128  	// result: (MOVWstore [off] {sym} ptr x mem)
  6129  	for {
  6130  		off := auxIntToInt32(v.AuxInt)
  6131  		sym := auxToSym(v.Aux)
  6132  		ptr := v_0
  6133  		if v_1.Op != OpRISCV64MOVWUreg {
  6134  			break
  6135  		}
  6136  		x := v_1.Args[0]
  6137  		mem := v_2
  6138  		v.reset(OpRISCV64MOVWstore)
  6139  		v.AuxInt = int32ToAuxInt(off)
  6140  		v.Aux = symToAux(sym)
  6141  		v.AddArg3(ptr, x, mem)
  6142  		return true
  6143  	}
  6144  	return false
  6145  }
  6146  func rewriteValueRISCV64_OpRISCV64MOVWstorezero(v *Value) bool {
  6147  	v_1 := v.Args[1]
  6148  	v_0 := v.Args[0]
  6149  	b := v.Block
  6150  	config := b.Func.Config
  6151  	// match: (MOVWstorezero [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  6152  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  6153  	// result: (MOVWstorezero [off1+off2] {mergeSym(sym1,sym2)} base mem)
  6154  	for {
  6155  		off1 := auxIntToInt32(v.AuxInt)
  6156  		sym1 := auxToSym(v.Aux)
  6157  		if v_0.Op != OpRISCV64MOVaddr {
  6158  			break
  6159  		}
  6160  		off2 := auxIntToInt32(v_0.AuxInt)
  6161  		sym2 := auxToSym(v_0.Aux)
  6162  		base := v_0.Args[0]
  6163  		mem := v_1
  6164  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  6165  			break
  6166  		}
  6167  		v.reset(OpRISCV64MOVWstorezero)
  6168  		v.AuxInt = int32ToAuxInt(off1 + off2)
  6169  		v.Aux = symToAux(mergeSym(sym1, sym2))
  6170  		v.AddArg2(base, mem)
  6171  		return true
  6172  	}
  6173  	// match: (MOVWstorezero [off1] {sym} (ADDI [off2] base) mem)
  6174  	// cond: is32Bit(int64(off1)+off2)
  6175  	// result: (MOVWstorezero [off1+int32(off2)] {sym} base mem)
  6176  	for {
  6177  		off1 := auxIntToInt32(v.AuxInt)
  6178  		sym := auxToSym(v.Aux)
  6179  		if v_0.Op != OpRISCV64ADDI {
  6180  			break
  6181  		}
  6182  		off2 := auxIntToInt64(v_0.AuxInt)
  6183  		base := v_0.Args[0]
  6184  		mem := v_1
  6185  		if !(is32Bit(int64(off1) + off2)) {
  6186  			break
  6187  		}
  6188  		v.reset(OpRISCV64MOVWstorezero)
  6189  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  6190  		v.Aux = symToAux(sym)
  6191  		v.AddArg2(base, mem)
  6192  		return true
  6193  	}
  6194  	return false
  6195  }
  6196  func rewriteValueRISCV64_OpRISCV64NEG(v *Value) bool {
  6197  	v_0 := v.Args[0]
  6198  	b := v.Block
  6199  	// match: (NEG (SUB x y))
  6200  	// result: (SUB y x)
  6201  	for {
  6202  		if v_0.Op != OpRISCV64SUB {
  6203  			break
  6204  		}
  6205  		y := v_0.Args[1]
  6206  		x := v_0.Args[0]
  6207  		v.reset(OpRISCV64SUB)
  6208  		v.AddArg2(y, x)
  6209  		return true
  6210  	}
  6211  	// match: (NEG <t> s:(ADDI [val] (SUB x y)))
  6212  	// cond: s.Uses == 1 && is32Bit(-val)
  6213  	// result: (ADDI [-val] (SUB <t> y x))
  6214  	for {
  6215  		t := v.Type
  6216  		s := v_0
  6217  		if s.Op != OpRISCV64ADDI {
  6218  			break
  6219  		}
  6220  		val := auxIntToInt64(s.AuxInt)
  6221  		s_0 := s.Args[0]
  6222  		if s_0.Op != OpRISCV64SUB {
  6223  			break
  6224  		}
  6225  		y := s_0.Args[1]
  6226  		x := s_0.Args[0]
  6227  		if !(s.Uses == 1 && is32Bit(-val)) {
  6228  			break
  6229  		}
  6230  		v.reset(OpRISCV64ADDI)
  6231  		v.AuxInt = int64ToAuxInt(-val)
  6232  		v0 := b.NewValue0(v.Pos, OpRISCV64SUB, t)
  6233  		v0.AddArg2(y, x)
  6234  		v.AddArg(v0)
  6235  		return true
  6236  	}
  6237  	// match: (NEG (NEG x))
  6238  	// result: x
  6239  	for {
  6240  		if v_0.Op != OpRISCV64NEG {
  6241  			break
  6242  		}
  6243  		x := v_0.Args[0]
  6244  		v.copyOf(x)
  6245  		return true
  6246  	}
  6247  	// match: (NEG <t> s:(ADDI [val] (NEG x)))
  6248  	// cond: s.Uses == 1 && is32Bit(-val)
  6249  	// result: (ADDI [-val] x)
  6250  	for {
  6251  		s := v_0
  6252  		if s.Op != OpRISCV64ADDI {
  6253  			break
  6254  		}
  6255  		val := auxIntToInt64(s.AuxInt)
  6256  		s_0 := s.Args[0]
  6257  		if s_0.Op != OpRISCV64NEG {
  6258  			break
  6259  		}
  6260  		x := s_0.Args[0]
  6261  		if !(s.Uses == 1 && is32Bit(-val)) {
  6262  			break
  6263  		}
  6264  		v.reset(OpRISCV64ADDI)
  6265  		v.AuxInt = int64ToAuxInt(-val)
  6266  		v.AddArg(x)
  6267  		return true
  6268  	}
  6269  	// match: (NEG (MOVDconst [x]))
  6270  	// result: (MOVDconst [-x])
  6271  	for {
  6272  		if v_0.Op != OpRISCV64MOVDconst {
  6273  			break
  6274  		}
  6275  		x := auxIntToInt64(v_0.AuxInt)
  6276  		v.reset(OpRISCV64MOVDconst)
  6277  		v.AuxInt = int64ToAuxInt(-x)
  6278  		return true
  6279  	}
  6280  	return false
  6281  }
  6282  func rewriteValueRISCV64_OpRISCV64NEGW(v *Value) bool {
  6283  	v_0 := v.Args[0]
  6284  	// match: (NEGW (MOVDconst [x]))
  6285  	// result: (MOVDconst [int64(int32(-x))])
  6286  	for {
  6287  		if v_0.Op != OpRISCV64MOVDconst {
  6288  			break
  6289  		}
  6290  		x := auxIntToInt64(v_0.AuxInt)
  6291  		v.reset(OpRISCV64MOVDconst)
  6292  		v.AuxInt = int64ToAuxInt(int64(int32(-x)))
  6293  		return true
  6294  	}
  6295  	return false
  6296  }
  6297  func rewriteValueRISCV64_OpRISCV64OR(v *Value) bool {
  6298  	v_1 := v.Args[1]
  6299  	v_0 := v.Args[0]
  6300  	// match: (OR (MOVDconst [val]) x)
  6301  	// cond: is32Bit(val)
  6302  	// result: (ORI [val] x)
  6303  	for {
  6304  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  6305  			if v_0.Op != OpRISCV64MOVDconst {
  6306  				continue
  6307  			}
  6308  			val := auxIntToInt64(v_0.AuxInt)
  6309  			x := v_1
  6310  			if !(is32Bit(val)) {
  6311  				continue
  6312  			}
  6313  			v.reset(OpRISCV64ORI)
  6314  			v.AuxInt = int64ToAuxInt(val)
  6315  			v.AddArg(x)
  6316  			return true
  6317  		}
  6318  		break
  6319  	}
  6320  	return false
  6321  }
  6322  func rewriteValueRISCV64_OpRISCV64ORI(v *Value) bool {
  6323  	v_0 := v.Args[0]
  6324  	// match: (ORI [0] x)
  6325  	// result: x
  6326  	for {
  6327  		if auxIntToInt64(v.AuxInt) != 0 {
  6328  			break
  6329  		}
  6330  		x := v_0
  6331  		v.copyOf(x)
  6332  		return true
  6333  	}
  6334  	// match: (ORI [-1] x)
  6335  	// result: (MOVDconst [-1])
  6336  	for {
  6337  		if auxIntToInt64(v.AuxInt) != -1 {
  6338  			break
  6339  		}
  6340  		v.reset(OpRISCV64MOVDconst)
  6341  		v.AuxInt = int64ToAuxInt(-1)
  6342  		return true
  6343  	}
  6344  	// match: (ORI [x] (MOVDconst [y]))
  6345  	// result: (MOVDconst [x | y])
  6346  	for {
  6347  		x := auxIntToInt64(v.AuxInt)
  6348  		if v_0.Op != OpRISCV64MOVDconst {
  6349  			break
  6350  		}
  6351  		y := auxIntToInt64(v_0.AuxInt)
  6352  		v.reset(OpRISCV64MOVDconst)
  6353  		v.AuxInt = int64ToAuxInt(x | y)
  6354  		return true
  6355  	}
  6356  	// match: (ORI [x] (ORI [y] z))
  6357  	// result: (ORI [x | y] z)
  6358  	for {
  6359  		x := auxIntToInt64(v.AuxInt)
  6360  		if v_0.Op != OpRISCV64ORI {
  6361  			break
  6362  		}
  6363  		y := auxIntToInt64(v_0.AuxInt)
  6364  		z := v_0.Args[0]
  6365  		v.reset(OpRISCV64ORI)
  6366  		v.AuxInt = int64ToAuxInt(x | y)
  6367  		v.AddArg(z)
  6368  		return true
  6369  	}
  6370  	return false
  6371  }
  6372  func rewriteValueRISCV64_OpRISCV64ROL(v *Value) bool {
  6373  	v_1 := v.Args[1]
  6374  	v_0 := v.Args[0]
  6375  	// match: (ROL x (MOVDconst [val]))
  6376  	// result: (RORI [int64(int8(-val)&63)] x)
  6377  	for {
  6378  		x := v_0
  6379  		if v_1.Op != OpRISCV64MOVDconst {
  6380  			break
  6381  		}
  6382  		val := auxIntToInt64(v_1.AuxInt)
  6383  		v.reset(OpRISCV64RORI)
  6384  		v.AuxInt = int64ToAuxInt(int64(int8(-val) & 63))
  6385  		v.AddArg(x)
  6386  		return true
  6387  	}
  6388  	// match: (ROL x (NEG y))
  6389  	// result: (ROR x y)
  6390  	for {
  6391  		x := v_0
  6392  		if v_1.Op != OpRISCV64NEG {
  6393  			break
  6394  		}
  6395  		y := v_1.Args[0]
  6396  		v.reset(OpRISCV64ROR)
  6397  		v.AddArg2(x, y)
  6398  		return true
  6399  	}
  6400  	return false
  6401  }
  6402  func rewriteValueRISCV64_OpRISCV64ROLW(v *Value) bool {
  6403  	v_1 := v.Args[1]
  6404  	v_0 := v.Args[0]
  6405  	// match: (ROLW x (MOVDconst [val]))
  6406  	// result: (RORIW [int64(int8(-val)&31)] x)
  6407  	for {
  6408  		x := v_0
  6409  		if v_1.Op != OpRISCV64MOVDconst {
  6410  			break
  6411  		}
  6412  		val := auxIntToInt64(v_1.AuxInt)
  6413  		v.reset(OpRISCV64RORIW)
  6414  		v.AuxInt = int64ToAuxInt(int64(int8(-val) & 31))
  6415  		v.AddArg(x)
  6416  		return true
  6417  	}
  6418  	// match: (ROLW x (NEG y))
  6419  	// result: (RORW x y)
  6420  	for {
  6421  		x := v_0
  6422  		if v_1.Op != OpRISCV64NEG {
  6423  			break
  6424  		}
  6425  		y := v_1.Args[0]
  6426  		v.reset(OpRISCV64RORW)
  6427  		v.AddArg2(x, y)
  6428  		return true
  6429  	}
  6430  	return false
  6431  }
  6432  func rewriteValueRISCV64_OpRISCV64ROR(v *Value) bool {
  6433  	v_1 := v.Args[1]
  6434  	v_0 := v.Args[0]
  6435  	// match: (ROR x (MOVDconst [val]))
  6436  	// result: (RORI [int64(val&63)] x)
  6437  	for {
  6438  		x := v_0
  6439  		if v_1.Op != OpRISCV64MOVDconst {
  6440  			break
  6441  		}
  6442  		val := auxIntToInt64(v_1.AuxInt)
  6443  		v.reset(OpRISCV64RORI)
  6444  		v.AuxInt = int64ToAuxInt(int64(val & 63))
  6445  		v.AddArg(x)
  6446  		return true
  6447  	}
  6448  	return false
  6449  }
  6450  func rewriteValueRISCV64_OpRISCV64RORW(v *Value) bool {
  6451  	v_1 := v.Args[1]
  6452  	v_0 := v.Args[0]
  6453  	// match: (RORW x (MOVDconst [val]))
  6454  	// result: (RORIW [int64(val&31)] x)
  6455  	for {
  6456  		x := v_0
  6457  		if v_1.Op != OpRISCV64MOVDconst {
  6458  			break
  6459  		}
  6460  		val := auxIntToInt64(v_1.AuxInt)
  6461  		v.reset(OpRISCV64RORIW)
  6462  		v.AuxInt = int64ToAuxInt(int64(val & 31))
  6463  		v.AddArg(x)
  6464  		return true
  6465  	}
  6466  	return false
  6467  }
  6468  func rewriteValueRISCV64_OpRISCV64SEQZ(v *Value) bool {
  6469  	v_0 := v.Args[0]
  6470  	// match: (SEQZ (NEG x))
  6471  	// result: (SEQZ x)
  6472  	for {
  6473  		if v_0.Op != OpRISCV64NEG {
  6474  			break
  6475  		}
  6476  		x := v_0.Args[0]
  6477  		v.reset(OpRISCV64SEQZ)
  6478  		v.AddArg(x)
  6479  		return true
  6480  	}
  6481  	// match: (SEQZ (SEQZ x))
  6482  	// result: (SNEZ x)
  6483  	for {
  6484  		if v_0.Op != OpRISCV64SEQZ {
  6485  			break
  6486  		}
  6487  		x := v_0.Args[0]
  6488  		v.reset(OpRISCV64SNEZ)
  6489  		v.AddArg(x)
  6490  		return true
  6491  	}
  6492  	// match: (SEQZ (SNEZ x))
  6493  	// result: (SEQZ x)
  6494  	for {
  6495  		if v_0.Op != OpRISCV64SNEZ {
  6496  			break
  6497  		}
  6498  		x := v_0.Args[0]
  6499  		v.reset(OpRISCV64SEQZ)
  6500  		v.AddArg(x)
  6501  		return true
  6502  	}
  6503  	return false
  6504  }
  6505  func rewriteValueRISCV64_OpRISCV64SLL(v *Value) bool {
  6506  	v_1 := v.Args[1]
  6507  	v_0 := v.Args[0]
  6508  	// match: (SLL x (MOVDconst [val]))
  6509  	// result: (SLLI [int64(val&63)] x)
  6510  	for {
  6511  		x := v_0
  6512  		if v_1.Op != OpRISCV64MOVDconst {
  6513  			break
  6514  		}
  6515  		val := auxIntToInt64(v_1.AuxInt)
  6516  		v.reset(OpRISCV64SLLI)
  6517  		v.AuxInt = int64ToAuxInt(int64(val & 63))
  6518  		v.AddArg(x)
  6519  		return true
  6520  	}
  6521  	return false
  6522  }
  6523  func rewriteValueRISCV64_OpRISCV64SLLI(v *Value) bool {
  6524  	v_0 := v.Args[0]
  6525  	// match: (SLLI [x] (MOVDconst [y]))
  6526  	// cond: is32Bit(y << uint32(x))
  6527  	// result: (MOVDconst [y << uint32(x)])
  6528  	for {
  6529  		x := auxIntToInt64(v.AuxInt)
  6530  		if v_0.Op != OpRISCV64MOVDconst {
  6531  			break
  6532  		}
  6533  		y := auxIntToInt64(v_0.AuxInt)
  6534  		if !(is32Bit(y << uint32(x))) {
  6535  			break
  6536  		}
  6537  		v.reset(OpRISCV64MOVDconst)
  6538  		v.AuxInt = int64ToAuxInt(y << uint32(x))
  6539  		return true
  6540  	}
  6541  	return false
  6542  }
  6543  func rewriteValueRISCV64_OpRISCV64SLLW(v *Value) bool {
  6544  	v_1 := v.Args[1]
  6545  	v_0 := v.Args[0]
  6546  	// match: (SLLW x (MOVDconst [val]))
  6547  	// result: (SLLIW [int64(val&31)] x)
  6548  	for {
  6549  		x := v_0
  6550  		if v_1.Op != OpRISCV64MOVDconst {
  6551  			break
  6552  		}
  6553  		val := auxIntToInt64(v_1.AuxInt)
  6554  		v.reset(OpRISCV64SLLIW)
  6555  		v.AuxInt = int64ToAuxInt(int64(val & 31))
  6556  		v.AddArg(x)
  6557  		return true
  6558  	}
  6559  	return false
  6560  }
  6561  func rewriteValueRISCV64_OpRISCV64SLT(v *Value) bool {
  6562  	v_1 := v.Args[1]
  6563  	v_0 := v.Args[0]
  6564  	// match: (SLT x (MOVDconst [val]))
  6565  	// cond: val >= -2048 && val <= 2047
  6566  	// result: (SLTI [val] x)
  6567  	for {
  6568  		x := v_0
  6569  		if v_1.Op != OpRISCV64MOVDconst {
  6570  			break
  6571  		}
  6572  		val := auxIntToInt64(v_1.AuxInt)
  6573  		if !(val >= -2048 && val <= 2047) {
  6574  			break
  6575  		}
  6576  		v.reset(OpRISCV64SLTI)
  6577  		v.AuxInt = int64ToAuxInt(val)
  6578  		v.AddArg(x)
  6579  		return true
  6580  	}
  6581  	// match: (SLT x x)
  6582  	// result: (MOVDconst [0])
  6583  	for {
  6584  		x := v_0
  6585  		if x != v_1 {
  6586  			break
  6587  		}
  6588  		v.reset(OpRISCV64MOVDconst)
  6589  		v.AuxInt = int64ToAuxInt(0)
  6590  		return true
  6591  	}
  6592  	return false
  6593  }
  6594  func rewriteValueRISCV64_OpRISCV64SLTI(v *Value) bool {
  6595  	v_0 := v.Args[0]
  6596  	// match: (SLTI [x] (MOVDconst [y]))
  6597  	// result: (MOVDconst [b2i(int64(y) < int64(x))])
  6598  	for {
  6599  		x := auxIntToInt64(v.AuxInt)
  6600  		if v_0.Op != OpRISCV64MOVDconst {
  6601  			break
  6602  		}
  6603  		y := auxIntToInt64(v_0.AuxInt)
  6604  		v.reset(OpRISCV64MOVDconst)
  6605  		v.AuxInt = int64ToAuxInt(b2i(int64(y) < int64(x)))
  6606  		return true
  6607  	}
  6608  	// match: (SLTI [x] (ANDI [y] _))
  6609  	// cond: y >= 0 && int64(y) < int64(x)
  6610  	// result: (MOVDconst [1])
  6611  	for {
  6612  		x := auxIntToInt64(v.AuxInt)
  6613  		if v_0.Op != OpRISCV64ANDI {
  6614  			break
  6615  		}
  6616  		y := auxIntToInt64(v_0.AuxInt)
  6617  		if !(y >= 0 && int64(y) < int64(x)) {
  6618  			break
  6619  		}
  6620  		v.reset(OpRISCV64MOVDconst)
  6621  		v.AuxInt = int64ToAuxInt(1)
  6622  		return true
  6623  	}
  6624  	// match: (SLTI [x] (ORI [y] _))
  6625  	// cond: y >= 0 && int64(y) >= int64(x)
  6626  	// result: (MOVDconst [0])
  6627  	for {
  6628  		x := auxIntToInt64(v.AuxInt)
  6629  		if v_0.Op != OpRISCV64ORI {
  6630  			break
  6631  		}
  6632  		y := auxIntToInt64(v_0.AuxInt)
  6633  		if !(y >= 0 && int64(y) >= int64(x)) {
  6634  			break
  6635  		}
  6636  		v.reset(OpRISCV64MOVDconst)
  6637  		v.AuxInt = int64ToAuxInt(0)
  6638  		return true
  6639  	}
  6640  	return false
  6641  }
  6642  func rewriteValueRISCV64_OpRISCV64SLTIU(v *Value) bool {
  6643  	v_0 := v.Args[0]
  6644  	// match: (SLTIU [x] (MOVDconst [y]))
  6645  	// result: (MOVDconst [b2i(uint64(y) < uint64(x))])
  6646  	for {
  6647  		x := auxIntToInt64(v.AuxInt)
  6648  		if v_0.Op != OpRISCV64MOVDconst {
  6649  			break
  6650  		}
  6651  		y := auxIntToInt64(v_0.AuxInt)
  6652  		v.reset(OpRISCV64MOVDconst)
  6653  		v.AuxInt = int64ToAuxInt(b2i(uint64(y) < uint64(x)))
  6654  		return true
  6655  	}
  6656  	// match: (SLTIU [x] (ANDI [y] _))
  6657  	// cond: y >= 0 && uint64(y) < uint64(x)
  6658  	// result: (MOVDconst [1])
  6659  	for {
  6660  		x := auxIntToInt64(v.AuxInt)
  6661  		if v_0.Op != OpRISCV64ANDI {
  6662  			break
  6663  		}
  6664  		y := auxIntToInt64(v_0.AuxInt)
  6665  		if !(y >= 0 && uint64(y) < uint64(x)) {
  6666  			break
  6667  		}
  6668  		v.reset(OpRISCV64MOVDconst)
  6669  		v.AuxInt = int64ToAuxInt(1)
  6670  		return true
  6671  	}
  6672  	// match: (SLTIU [x] (ORI [y] _))
  6673  	// cond: y >= 0 && uint64(y) >= uint64(x)
  6674  	// result: (MOVDconst [0])
  6675  	for {
  6676  		x := auxIntToInt64(v.AuxInt)
  6677  		if v_0.Op != OpRISCV64ORI {
  6678  			break
  6679  		}
  6680  		y := auxIntToInt64(v_0.AuxInt)
  6681  		if !(y >= 0 && uint64(y) >= uint64(x)) {
  6682  			break
  6683  		}
  6684  		v.reset(OpRISCV64MOVDconst)
  6685  		v.AuxInt = int64ToAuxInt(0)
  6686  		return true
  6687  	}
  6688  	return false
  6689  }
  6690  func rewriteValueRISCV64_OpRISCV64SLTU(v *Value) bool {
  6691  	v_1 := v.Args[1]
  6692  	v_0 := v.Args[0]
  6693  	// match: (SLTU x (MOVDconst [val]))
  6694  	// cond: val >= -2048 && val <= 2047
  6695  	// result: (SLTIU [val] x)
  6696  	for {
  6697  		x := v_0
  6698  		if v_1.Op != OpRISCV64MOVDconst {
  6699  			break
  6700  		}
  6701  		val := auxIntToInt64(v_1.AuxInt)
  6702  		if !(val >= -2048 && val <= 2047) {
  6703  			break
  6704  		}
  6705  		v.reset(OpRISCV64SLTIU)
  6706  		v.AuxInt = int64ToAuxInt(val)
  6707  		v.AddArg(x)
  6708  		return true
  6709  	}
  6710  	// match: (SLTU x x)
  6711  	// result: (MOVDconst [0])
  6712  	for {
  6713  		x := v_0
  6714  		if x != v_1 {
  6715  			break
  6716  		}
  6717  		v.reset(OpRISCV64MOVDconst)
  6718  		v.AuxInt = int64ToAuxInt(0)
  6719  		return true
  6720  	}
  6721  	return false
  6722  }
  6723  func rewriteValueRISCV64_OpRISCV64SNEZ(v *Value) bool {
  6724  	v_0 := v.Args[0]
  6725  	// match: (SNEZ (NEG x))
  6726  	// result: (SNEZ x)
  6727  	for {
  6728  		if v_0.Op != OpRISCV64NEG {
  6729  			break
  6730  		}
  6731  		x := v_0.Args[0]
  6732  		v.reset(OpRISCV64SNEZ)
  6733  		v.AddArg(x)
  6734  		return true
  6735  	}
  6736  	// match: (SNEZ (SEQZ x))
  6737  	// result: (SEQZ x)
  6738  	for {
  6739  		if v_0.Op != OpRISCV64SEQZ {
  6740  			break
  6741  		}
  6742  		x := v_0.Args[0]
  6743  		v.reset(OpRISCV64SEQZ)
  6744  		v.AddArg(x)
  6745  		return true
  6746  	}
  6747  	// match: (SNEZ (SNEZ x))
  6748  	// result: (SNEZ x)
  6749  	for {
  6750  		if v_0.Op != OpRISCV64SNEZ {
  6751  			break
  6752  		}
  6753  		x := v_0.Args[0]
  6754  		v.reset(OpRISCV64SNEZ)
  6755  		v.AddArg(x)
  6756  		return true
  6757  	}
  6758  	return false
  6759  }
  6760  func rewriteValueRISCV64_OpRISCV64SRA(v *Value) bool {
  6761  	v_1 := v.Args[1]
  6762  	v_0 := v.Args[0]
  6763  	// match: (SRA x (MOVDconst [val]))
  6764  	// result: (SRAI [int64(val&63)] x)
  6765  	for {
  6766  		x := v_0
  6767  		if v_1.Op != OpRISCV64MOVDconst {
  6768  			break
  6769  		}
  6770  		val := auxIntToInt64(v_1.AuxInt)
  6771  		v.reset(OpRISCV64SRAI)
  6772  		v.AuxInt = int64ToAuxInt(int64(val & 63))
  6773  		v.AddArg(x)
  6774  		return true
  6775  	}
  6776  	return false
  6777  }
  6778  func rewriteValueRISCV64_OpRISCV64SRAI(v *Value) bool {
  6779  	v_0 := v.Args[0]
  6780  	b := v.Block
  6781  	// match: (SRAI <t> [x] (MOVWreg y))
  6782  	// cond: x >= 0 && x <= 31
  6783  	// result: (SRAIW <t> [int64(x)] y)
  6784  	for {
  6785  		t := v.Type
  6786  		x := auxIntToInt64(v.AuxInt)
  6787  		if v_0.Op != OpRISCV64MOVWreg {
  6788  			break
  6789  		}
  6790  		y := v_0.Args[0]
  6791  		if !(x >= 0 && x <= 31) {
  6792  			break
  6793  		}
  6794  		v.reset(OpRISCV64SRAIW)
  6795  		v.Type = t
  6796  		v.AuxInt = int64ToAuxInt(int64(x))
  6797  		v.AddArg(y)
  6798  		return true
  6799  	}
  6800  	// match: (SRAI <t> [x] (MOVBreg y))
  6801  	// cond: x >= 8
  6802  	// result: (SRAI [63] (SLLI <t> [56] y))
  6803  	for {
  6804  		t := v.Type
  6805  		x := auxIntToInt64(v.AuxInt)
  6806  		if v_0.Op != OpRISCV64MOVBreg {
  6807  			break
  6808  		}
  6809  		y := v_0.Args[0]
  6810  		if !(x >= 8) {
  6811  			break
  6812  		}
  6813  		v.reset(OpRISCV64SRAI)
  6814  		v.AuxInt = int64ToAuxInt(63)
  6815  		v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, t)
  6816  		v0.AuxInt = int64ToAuxInt(56)
  6817  		v0.AddArg(y)
  6818  		v.AddArg(v0)
  6819  		return true
  6820  	}
  6821  	// match: (SRAI <t> [x] (MOVHreg y))
  6822  	// cond: x >= 16
  6823  	// result: (SRAI [63] (SLLI <t> [48] y))
  6824  	for {
  6825  		t := v.Type
  6826  		x := auxIntToInt64(v.AuxInt)
  6827  		if v_0.Op != OpRISCV64MOVHreg {
  6828  			break
  6829  		}
  6830  		y := v_0.Args[0]
  6831  		if !(x >= 16) {
  6832  			break
  6833  		}
  6834  		v.reset(OpRISCV64SRAI)
  6835  		v.AuxInt = int64ToAuxInt(63)
  6836  		v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, t)
  6837  		v0.AuxInt = int64ToAuxInt(48)
  6838  		v0.AddArg(y)
  6839  		v.AddArg(v0)
  6840  		return true
  6841  	}
  6842  	// match: (SRAI <t> [x] (MOVWreg y))
  6843  	// cond: x >= 32
  6844  	// result: (SRAIW [31] y)
  6845  	for {
  6846  		x := auxIntToInt64(v.AuxInt)
  6847  		if v_0.Op != OpRISCV64MOVWreg {
  6848  			break
  6849  		}
  6850  		y := v_0.Args[0]
  6851  		if !(x >= 32) {
  6852  			break
  6853  		}
  6854  		v.reset(OpRISCV64SRAIW)
  6855  		v.AuxInt = int64ToAuxInt(31)
  6856  		v.AddArg(y)
  6857  		return true
  6858  	}
  6859  	// match: (SRAI [x] (MOVDconst [y]))
  6860  	// result: (MOVDconst [int64(y) >> uint32(x)])
  6861  	for {
  6862  		x := auxIntToInt64(v.AuxInt)
  6863  		if v_0.Op != OpRISCV64MOVDconst {
  6864  			break
  6865  		}
  6866  		y := auxIntToInt64(v_0.AuxInt)
  6867  		v.reset(OpRISCV64MOVDconst)
  6868  		v.AuxInt = int64ToAuxInt(int64(y) >> uint32(x))
  6869  		return true
  6870  	}
  6871  	return false
  6872  }
  6873  func rewriteValueRISCV64_OpRISCV64SRAW(v *Value) bool {
  6874  	v_1 := v.Args[1]
  6875  	v_0 := v.Args[0]
  6876  	// match: (SRAW x (MOVDconst [val]))
  6877  	// result: (SRAIW [int64(val&31)] x)
  6878  	for {
  6879  		x := v_0
  6880  		if v_1.Op != OpRISCV64MOVDconst {
  6881  			break
  6882  		}
  6883  		val := auxIntToInt64(v_1.AuxInt)
  6884  		v.reset(OpRISCV64SRAIW)
  6885  		v.AuxInt = int64ToAuxInt(int64(val & 31))
  6886  		v.AddArg(x)
  6887  		return true
  6888  	}
  6889  	return false
  6890  }
  6891  func rewriteValueRISCV64_OpRISCV64SRL(v *Value) bool {
  6892  	v_1 := v.Args[1]
  6893  	v_0 := v.Args[0]
  6894  	// match: (SRL x (MOVDconst [val]))
  6895  	// result: (SRLI [int64(val&63)] x)
  6896  	for {
  6897  		x := v_0
  6898  		if v_1.Op != OpRISCV64MOVDconst {
  6899  			break
  6900  		}
  6901  		val := auxIntToInt64(v_1.AuxInt)
  6902  		v.reset(OpRISCV64SRLI)
  6903  		v.AuxInt = int64ToAuxInt(int64(val & 63))
  6904  		v.AddArg(x)
  6905  		return true
  6906  	}
  6907  	return false
  6908  }
  6909  func rewriteValueRISCV64_OpRISCV64SRLI(v *Value) bool {
  6910  	v_0 := v.Args[0]
  6911  	// match: (SRLI <t> [x] (MOVWUreg y))
  6912  	// cond: x >= 0 && x <= 31
  6913  	// result: (SRLIW <t> [int64(x)] y)
  6914  	for {
  6915  		t := v.Type
  6916  		x := auxIntToInt64(v.AuxInt)
  6917  		if v_0.Op != OpRISCV64MOVWUreg {
  6918  			break
  6919  		}
  6920  		y := v_0.Args[0]
  6921  		if !(x >= 0 && x <= 31) {
  6922  			break
  6923  		}
  6924  		v.reset(OpRISCV64SRLIW)
  6925  		v.Type = t
  6926  		v.AuxInt = int64ToAuxInt(int64(x))
  6927  		v.AddArg(y)
  6928  		return true
  6929  	}
  6930  	// match: (SRLI <t> [x] (MOVBUreg y))
  6931  	// cond: x >= 8
  6932  	// result: (MOVDconst <t> [0])
  6933  	for {
  6934  		t := v.Type
  6935  		x := auxIntToInt64(v.AuxInt)
  6936  		if v_0.Op != OpRISCV64MOVBUreg {
  6937  			break
  6938  		}
  6939  		if !(x >= 8) {
  6940  			break
  6941  		}
  6942  		v.reset(OpRISCV64MOVDconst)
  6943  		v.Type = t
  6944  		v.AuxInt = int64ToAuxInt(0)
  6945  		return true
  6946  	}
  6947  	// match: (SRLI <t> [x] (MOVHUreg y))
  6948  	// cond: x >= 16
  6949  	// result: (MOVDconst <t> [0])
  6950  	for {
  6951  		t := v.Type
  6952  		x := auxIntToInt64(v.AuxInt)
  6953  		if v_0.Op != OpRISCV64MOVHUreg {
  6954  			break
  6955  		}
  6956  		if !(x >= 16) {
  6957  			break
  6958  		}
  6959  		v.reset(OpRISCV64MOVDconst)
  6960  		v.Type = t
  6961  		v.AuxInt = int64ToAuxInt(0)
  6962  		return true
  6963  	}
  6964  	// match: (SRLI <t> [x] (MOVWUreg y))
  6965  	// cond: x >= 32
  6966  	// result: (MOVDconst <t> [0])
  6967  	for {
  6968  		t := v.Type
  6969  		x := auxIntToInt64(v.AuxInt)
  6970  		if v_0.Op != OpRISCV64MOVWUreg {
  6971  			break
  6972  		}
  6973  		if !(x >= 32) {
  6974  			break
  6975  		}
  6976  		v.reset(OpRISCV64MOVDconst)
  6977  		v.Type = t
  6978  		v.AuxInt = int64ToAuxInt(0)
  6979  		return true
  6980  	}
  6981  	// match: (SRLI [x] (MOVDconst [y]))
  6982  	// result: (MOVDconst [int64(uint64(y) >> uint32(x))])
  6983  	for {
  6984  		x := auxIntToInt64(v.AuxInt)
  6985  		if v_0.Op != OpRISCV64MOVDconst {
  6986  			break
  6987  		}
  6988  		y := auxIntToInt64(v_0.AuxInt)
  6989  		v.reset(OpRISCV64MOVDconst)
  6990  		v.AuxInt = int64ToAuxInt(int64(uint64(y) >> uint32(x)))
  6991  		return true
  6992  	}
  6993  	return false
  6994  }
  6995  func rewriteValueRISCV64_OpRISCV64SRLW(v *Value) bool {
  6996  	v_1 := v.Args[1]
  6997  	v_0 := v.Args[0]
  6998  	// match: (SRLW x (MOVDconst [val]))
  6999  	// result: (SRLIW [int64(val&31)] x)
  7000  	for {
  7001  		x := v_0
  7002  		if v_1.Op != OpRISCV64MOVDconst {
  7003  			break
  7004  		}
  7005  		val := auxIntToInt64(v_1.AuxInt)
  7006  		v.reset(OpRISCV64SRLIW)
  7007  		v.AuxInt = int64ToAuxInt(int64(val & 31))
  7008  		v.AddArg(x)
  7009  		return true
  7010  	}
  7011  	return false
  7012  }
  7013  func rewriteValueRISCV64_OpRISCV64SUB(v *Value) bool {
  7014  	v_1 := v.Args[1]
  7015  	v_0 := v.Args[0]
  7016  	b := v.Block
  7017  	// match: (SUB x (MOVDconst [val]))
  7018  	// cond: is32Bit(-val)
  7019  	// result: (ADDI [-val] x)
  7020  	for {
  7021  		x := v_0
  7022  		if v_1.Op != OpRISCV64MOVDconst {
  7023  			break
  7024  		}
  7025  		val := auxIntToInt64(v_1.AuxInt)
  7026  		if !(is32Bit(-val)) {
  7027  			break
  7028  		}
  7029  		v.reset(OpRISCV64ADDI)
  7030  		v.AuxInt = int64ToAuxInt(-val)
  7031  		v.AddArg(x)
  7032  		return true
  7033  	}
  7034  	// match: (SUB <t> (MOVDconst [val]) y)
  7035  	// cond: is32Bit(-val)
  7036  	// result: (NEG (ADDI <t> [-val] y))
  7037  	for {
  7038  		t := v.Type
  7039  		if v_0.Op != OpRISCV64MOVDconst {
  7040  			break
  7041  		}
  7042  		val := auxIntToInt64(v_0.AuxInt)
  7043  		y := v_1
  7044  		if !(is32Bit(-val)) {
  7045  			break
  7046  		}
  7047  		v.reset(OpRISCV64NEG)
  7048  		v0 := b.NewValue0(v.Pos, OpRISCV64ADDI, t)
  7049  		v0.AuxInt = int64ToAuxInt(-val)
  7050  		v0.AddArg(y)
  7051  		v.AddArg(v0)
  7052  		return true
  7053  	}
  7054  	// match: (SUB x (MOVDconst [0]))
  7055  	// result: x
  7056  	for {
  7057  		x := v_0
  7058  		if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
  7059  			break
  7060  		}
  7061  		v.copyOf(x)
  7062  		return true
  7063  	}
  7064  	// match: (SUB (MOVDconst [0]) x)
  7065  	// result: (NEG x)
  7066  	for {
  7067  		if v_0.Op != OpRISCV64MOVDconst || auxIntToInt64(v_0.AuxInt) != 0 {
  7068  			break
  7069  		}
  7070  		x := v_1
  7071  		v.reset(OpRISCV64NEG)
  7072  		v.AddArg(x)
  7073  		return true
  7074  	}
  7075  	return false
  7076  }
  7077  func rewriteValueRISCV64_OpRISCV64SUBW(v *Value) bool {
  7078  	v_1 := v.Args[1]
  7079  	v_0 := v.Args[0]
  7080  	// match: (SUBW x (MOVDconst [0]))
  7081  	// result: (ADDIW [0] x)
  7082  	for {
  7083  		x := v_0
  7084  		if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
  7085  			break
  7086  		}
  7087  		v.reset(OpRISCV64ADDIW)
  7088  		v.AuxInt = int64ToAuxInt(0)
  7089  		v.AddArg(x)
  7090  		return true
  7091  	}
  7092  	// match: (SUBW (MOVDconst [0]) x)
  7093  	// result: (NEGW x)
  7094  	for {
  7095  		if v_0.Op != OpRISCV64MOVDconst || auxIntToInt64(v_0.AuxInt) != 0 {
  7096  			break
  7097  		}
  7098  		x := v_1
  7099  		v.reset(OpRISCV64NEGW)
  7100  		v.AddArg(x)
  7101  		return true
  7102  	}
  7103  	return false
  7104  }
  7105  func rewriteValueRISCV64_OpRISCV64XOR(v *Value) bool {
  7106  	v_1 := v.Args[1]
  7107  	v_0 := v.Args[0]
  7108  	// match: (XOR (MOVDconst [val]) x)
  7109  	// cond: is32Bit(val)
  7110  	// result: (XORI [val] x)
  7111  	for {
  7112  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7113  			if v_0.Op != OpRISCV64MOVDconst {
  7114  				continue
  7115  			}
  7116  			val := auxIntToInt64(v_0.AuxInt)
  7117  			x := v_1
  7118  			if !(is32Bit(val)) {
  7119  				continue
  7120  			}
  7121  			v.reset(OpRISCV64XORI)
  7122  			v.AuxInt = int64ToAuxInt(val)
  7123  			v.AddArg(x)
  7124  			return true
  7125  		}
  7126  		break
  7127  	}
  7128  	return false
  7129  }
  7130  func rewriteValueRISCV64_OpRotateLeft16(v *Value) bool {
  7131  	v_1 := v.Args[1]
  7132  	v_0 := v.Args[0]
  7133  	b := v.Block
  7134  	typ := &b.Func.Config.Types
  7135  	// match: (RotateLeft16 <t> x y)
  7136  	// result: (OR (SLL <t> x (ANDI [15] <y.Type> y)) (SRL <t> (ZeroExt16to64 x) (ANDI [15] <y.Type> (NEG <y.Type> y))))
  7137  	for {
  7138  		t := v.Type
  7139  		x := v_0
  7140  		y := v_1
  7141  		v.reset(OpRISCV64OR)
  7142  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  7143  		v1 := b.NewValue0(v.Pos, OpRISCV64ANDI, y.Type)
  7144  		v1.AuxInt = int64ToAuxInt(15)
  7145  		v1.AddArg(y)
  7146  		v0.AddArg2(x, v1)
  7147  		v2 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  7148  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7149  		v3.AddArg(x)
  7150  		v4 := b.NewValue0(v.Pos, OpRISCV64ANDI, y.Type)
  7151  		v4.AuxInt = int64ToAuxInt(15)
  7152  		v5 := b.NewValue0(v.Pos, OpRISCV64NEG, y.Type)
  7153  		v5.AddArg(y)
  7154  		v4.AddArg(v5)
  7155  		v2.AddArg2(v3, v4)
  7156  		v.AddArg2(v0, v2)
  7157  		return true
  7158  	}
  7159  }
  7160  func rewriteValueRISCV64_OpRotateLeft8(v *Value) bool {
  7161  	v_1 := v.Args[1]
  7162  	v_0 := v.Args[0]
  7163  	b := v.Block
  7164  	typ := &b.Func.Config.Types
  7165  	// match: (RotateLeft8 <t> x y)
  7166  	// result: (OR (SLL <t> x (ANDI [7] <y.Type> y)) (SRL <t> (ZeroExt8to64 x) (ANDI [7] <y.Type> (NEG <y.Type> y))))
  7167  	for {
  7168  		t := v.Type
  7169  		x := v_0
  7170  		y := v_1
  7171  		v.reset(OpRISCV64OR)
  7172  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  7173  		v1 := b.NewValue0(v.Pos, OpRISCV64ANDI, y.Type)
  7174  		v1.AuxInt = int64ToAuxInt(7)
  7175  		v1.AddArg(y)
  7176  		v0.AddArg2(x, v1)
  7177  		v2 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  7178  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7179  		v3.AddArg(x)
  7180  		v4 := b.NewValue0(v.Pos, OpRISCV64ANDI, y.Type)
  7181  		v4.AuxInt = int64ToAuxInt(7)
  7182  		v5 := b.NewValue0(v.Pos, OpRISCV64NEG, y.Type)
  7183  		v5.AddArg(y)
  7184  		v4.AddArg(v5)
  7185  		v2.AddArg2(v3, v4)
  7186  		v.AddArg2(v0, v2)
  7187  		return true
  7188  	}
  7189  }
  7190  func rewriteValueRISCV64_OpRsh16Ux16(v *Value) bool {
  7191  	v_1 := v.Args[1]
  7192  	v_0 := v.Args[0]
  7193  	b := v.Block
  7194  	typ := &b.Func.Config.Types
  7195  	// match: (Rsh16Ux16 <t> x y)
  7196  	// cond: !shiftIsBounded(v)
  7197  	// result: (AND (SRL <t> (ZeroExt16to64 x) y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
  7198  	for {
  7199  		t := v.Type
  7200  		x := v_0
  7201  		y := v_1
  7202  		if !(!shiftIsBounded(v)) {
  7203  			break
  7204  		}
  7205  		v.reset(OpRISCV64AND)
  7206  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  7207  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7208  		v1.AddArg(x)
  7209  		v0.AddArg2(v1, y)
  7210  		v2 := b.NewValue0(v.Pos, OpNeg16, t)
  7211  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  7212  		v3.AuxInt = int64ToAuxInt(64)
  7213  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7214  		v4.AddArg(y)
  7215  		v3.AddArg(v4)
  7216  		v2.AddArg(v3)
  7217  		v.AddArg2(v0, v2)
  7218  		return true
  7219  	}
  7220  	// match: (Rsh16Ux16 x y)
  7221  	// cond: shiftIsBounded(v)
  7222  	// result: (SRL (ZeroExt16to64 x) y)
  7223  	for {
  7224  		x := v_0
  7225  		y := v_1
  7226  		if !(shiftIsBounded(v)) {
  7227  			break
  7228  		}
  7229  		v.reset(OpRISCV64SRL)
  7230  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7231  		v0.AddArg(x)
  7232  		v.AddArg2(v0, y)
  7233  		return true
  7234  	}
  7235  	return false
  7236  }
  7237  func rewriteValueRISCV64_OpRsh16Ux32(v *Value) bool {
  7238  	v_1 := v.Args[1]
  7239  	v_0 := v.Args[0]
  7240  	b := v.Block
  7241  	typ := &b.Func.Config.Types
  7242  	// match: (Rsh16Ux32 <t> x y)
  7243  	// cond: !shiftIsBounded(v)
  7244  	// result: (AND (SRL <t> (ZeroExt16to64 x) y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
  7245  	for {
  7246  		t := v.Type
  7247  		x := v_0
  7248  		y := v_1
  7249  		if !(!shiftIsBounded(v)) {
  7250  			break
  7251  		}
  7252  		v.reset(OpRISCV64AND)
  7253  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  7254  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7255  		v1.AddArg(x)
  7256  		v0.AddArg2(v1, y)
  7257  		v2 := b.NewValue0(v.Pos, OpNeg16, t)
  7258  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  7259  		v3.AuxInt = int64ToAuxInt(64)
  7260  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7261  		v4.AddArg(y)
  7262  		v3.AddArg(v4)
  7263  		v2.AddArg(v3)
  7264  		v.AddArg2(v0, v2)
  7265  		return true
  7266  	}
  7267  	// match: (Rsh16Ux32 x y)
  7268  	// cond: shiftIsBounded(v)
  7269  	// result: (SRL (ZeroExt16to64 x) y)
  7270  	for {
  7271  		x := v_0
  7272  		y := v_1
  7273  		if !(shiftIsBounded(v)) {
  7274  			break
  7275  		}
  7276  		v.reset(OpRISCV64SRL)
  7277  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7278  		v0.AddArg(x)
  7279  		v.AddArg2(v0, y)
  7280  		return true
  7281  	}
  7282  	return false
  7283  }
  7284  func rewriteValueRISCV64_OpRsh16Ux64(v *Value) bool {
  7285  	v_1 := v.Args[1]
  7286  	v_0 := v.Args[0]
  7287  	b := v.Block
  7288  	typ := &b.Func.Config.Types
  7289  	// match: (Rsh16Ux64 <t> x y)
  7290  	// cond: !shiftIsBounded(v)
  7291  	// result: (AND (SRL <t> (ZeroExt16to64 x) y) (Neg16 <t> (SLTIU <t> [64] y)))
  7292  	for {
  7293  		t := v.Type
  7294  		x := v_0
  7295  		y := v_1
  7296  		if !(!shiftIsBounded(v)) {
  7297  			break
  7298  		}
  7299  		v.reset(OpRISCV64AND)
  7300  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  7301  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7302  		v1.AddArg(x)
  7303  		v0.AddArg2(v1, y)
  7304  		v2 := b.NewValue0(v.Pos, OpNeg16, t)
  7305  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  7306  		v3.AuxInt = int64ToAuxInt(64)
  7307  		v3.AddArg(y)
  7308  		v2.AddArg(v3)
  7309  		v.AddArg2(v0, v2)
  7310  		return true
  7311  	}
  7312  	// match: (Rsh16Ux64 x y)
  7313  	// cond: shiftIsBounded(v)
  7314  	// result: (SRL (ZeroExt16to64 x) y)
  7315  	for {
  7316  		x := v_0
  7317  		y := v_1
  7318  		if !(shiftIsBounded(v)) {
  7319  			break
  7320  		}
  7321  		v.reset(OpRISCV64SRL)
  7322  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7323  		v0.AddArg(x)
  7324  		v.AddArg2(v0, y)
  7325  		return true
  7326  	}
  7327  	return false
  7328  }
  7329  func rewriteValueRISCV64_OpRsh16Ux8(v *Value) bool {
  7330  	v_1 := v.Args[1]
  7331  	v_0 := v.Args[0]
  7332  	b := v.Block
  7333  	typ := &b.Func.Config.Types
  7334  	// match: (Rsh16Ux8 <t> x y)
  7335  	// cond: !shiftIsBounded(v)
  7336  	// result: (AND (SRL <t> (ZeroExt16to64 x) y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
  7337  	for {
  7338  		t := v.Type
  7339  		x := v_0
  7340  		y := v_1
  7341  		if !(!shiftIsBounded(v)) {
  7342  			break
  7343  		}
  7344  		v.reset(OpRISCV64AND)
  7345  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  7346  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7347  		v1.AddArg(x)
  7348  		v0.AddArg2(v1, y)
  7349  		v2 := b.NewValue0(v.Pos, OpNeg16, t)
  7350  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  7351  		v3.AuxInt = int64ToAuxInt(64)
  7352  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7353  		v4.AddArg(y)
  7354  		v3.AddArg(v4)
  7355  		v2.AddArg(v3)
  7356  		v.AddArg2(v0, v2)
  7357  		return true
  7358  	}
  7359  	// match: (Rsh16Ux8 x y)
  7360  	// cond: shiftIsBounded(v)
  7361  	// result: (SRL (ZeroExt16to64 x) y)
  7362  	for {
  7363  		x := v_0
  7364  		y := v_1
  7365  		if !(shiftIsBounded(v)) {
  7366  			break
  7367  		}
  7368  		v.reset(OpRISCV64SRL)
  7369  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7370  		v0.AddArg(x)
  7371  		v.AddArg2(v0, y)
  7372  		return true
  7373  	}
  7374  	return false
  7375  }
  7376  func rewriteValueRISCV64_OpRsh16x16(v *Value) bool {
  7377  	v_1 := v.Args[1]
  7378  	v_0 := v.Args[0]
  7379  	b := v.Block
  7380  	typ := &b.Func.Config.Types
  7381  	// match: (Rsh16x16 <t> x y)
  7382  	// cond: !shiftIsBounded(v)
  7383  	// result: (SRA <t> (SignExt16to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt16to64 y)))))
  7384  	for {
  7385  		t := v.Type
  7386  		x := v_0
  7387  		y := v_1
  7388  		if !(!shiftIsBounded(v)) {
  7389  			break
  7390  		}
  7391  		v.reset(OpRISCV64SRA)
  7392  		v.Type = t
  7393  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  7394  		v0.AddArg(x)
  7395  		v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  7396  		v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  7397  		v2.AuxInt = int64ToAuxInt(-1)
  7398  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  7399  		v3.AuxInt = int64ToAuxInt(64)
  7400  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7401  		v4.AddArg(y)
  7402  		v3.AddArg(v4)
  7403  		v2.AddArg(v3)
  7404  		v1.AddArg2(y, v2)
  7405  		v.AddArg2(v0, v1)
  7406  		return true
  7407  	}
  7408  	// match: (Rsh16x16 x y)
  7409  	// cond: shiftIsBounded(v)
  7410  	// result: (SRA (SignExt16to64 x) y)
  7411  	for {
  7412  		x := v_0
  7413  		y := v_1
  7414  		if !(shiftIsBounded(v)) {
  7415  			break
  7416  		}
  7417  		v.reset(OpRISCV64SRA)
  7418  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  7419  		v0.AddArg(x)
  7420  		v.AddArg2(v0, y)
  7421  		return true
  7422  	}
  7423  	return false
  7424  }
  7425  func rewriteValueRISCV64_OpRsh16x32(v *Value) bool {
  7426  	v_1 := v.Args[1]
  7427  	v_0 := v.Args[0]
  7428  	b := v.Block
  7429  	typ := &b.Func.Config.Types
  7430  	// match: (Rsh16x32 <t> x y)
  7431  	// cond: !shiftIsBounded(v)
  7432  	// result: (SRA <t> (SignExt16to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt32to64 y)))))
  7433  	for {
  7434  		t := v.Type
  7435  		x := v_0
  7436  		y := v_1
  7437  		if !(!shiftIsBounded(v)) {
  7438  			break
  7439  		}
  7440  		v.reset(OpRISCV64SRA)
  7441  		v.Type = t
  7442  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  7443  		v0.AddArg(x)
  7444  		v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  7445  		v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  7446  		v2.AuxInt = int64ToAuxInt(-1)
  7447  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  7448  		v3.AuxInt = int64ToAuxInt(64)
  7449  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7450  		v4.AddArg(y)
  7451  		v3.AddArg(v4)
  7452  		v2.AddArg(v3)
  7453  		v1.AddArg2(y, v2)
  7454  		v.AddArg2(v0, v1)
  7455  		return true
  7456  	}
  7457  	// match: (Rsh16x32 x y)
  7458  	// cond: shiftIsBounded(v)
  7459  	// result: (SRA (SignExt16to64 x) y)
  7460  	for {
  7461  		x := v_0
  7462  		y := v_1
  7463  		if !(shiftIsBounded(v)) {
  7464  			break
  7465  		}
  7466  		v.reset(OpRISCV64SRA)
  7467  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  7468  		v0.AddArg(x)
  7469  		v.AddArg2(v0, y)
  7470  		return true
  7471  	}
  7472  	return false
  7473  }
  7474  func rewriteValueRISCV64_OpRsh16x64(v *Value) bool {
  7475  	v_1 := v.Args[1]
  7476  	v_0 := v.Args[0]
  7477  	b := v.Block
  7478  	typ := &b.Func.Config.Types
  7479  	// match: (Rsh16x64 <t> x y)
  7480  	// cond: !shiftIsBounded(v)
  7481  	// result: (SRA <t> (SignExt16to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] y))))
  7482  	for {
  7483  		t := v.Type
  7484  		x := v_0
  7485  		y := v_1
  7486  		if !(!shiftIsBounded(v)) {
  7487  			break
  7488  		}
  7489  		v.reset(OpRISCV64SRA)
  7490  		v.Type = t
  7491  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  7492  		v0.AddArg(x)
  7493  		v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  7494  		v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  7495  		v2.AuxInt = int64ToAuxInt(-1)
  7496  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  7497  		v3.AuxInt = int64ToAuxInt(64)
  7498  		v3.AddArg(y)
  7499  		v2.AddArg(v3)
  7500  		v1.AddArg2(y, v2)
  7501  		v.AddArg2(v0, v1)
  7502  		return true
  7503  	}
  7504  	// match: (Rsh16x64 x y)
  7505  	// cond: shiftIsBounded(v)
  7506  	// result: (SRA (SignExt16to64 x) y)
  7507  	for {
  7508  		x := v_0
  7509  		y := v_1
  7510  		if !(shiftIsBounded(v)) {
  7511  			break
  7512  		}
  7513  		v.reset(OpRISCV64SRA)
  7514  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  7515  		v0.AddArg(x)
  7516  		v.AddArg2(v0, y)
  7517  		return true
  7518  	}
  7519  	return false
  7520  }
  7521  func rewriteValueRISCV64_OpRsh16x8(v *Value) bool {
  7522  	v_1 := v.Args[1]
  7523  	v_0 := v.Args[0]
  7524  	b := v.Block
  7525  	typ := &b.Func.Config.Types
  7526  	// match: (Rsh16x8 <t> x y)
  7527  	// cond: !shiftIsBounded(v)
  7528  	// result: (SRA <t> (SignExt16to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt8to64 y)))))
  7529  	for {
  7530  		t := v.Type
  7531  		x := v_0
  7532  		y := v_1
  7533  		if !(!shiftIsBounded(v)) {
  7534  			break
  7535  		}
  7536  		v.reset(OpRISCV64SRA)
  7537  		v.Type = t
  7538  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  7539  		v0.AddArg(x)
  7540  		v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  7541  		v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  7542  		v2.AuxInt = int64ToAuxInt(-1)
  7543  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  7544  		v3.AuxInt = int64ToAuxInt(64)
  7545  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7546  		v4.AddArg(y)
  7547  		v3.AddArg(v4)
  7548  		v2.AddArg(v3)
  7549  		v1.AddArg2(y, v2)
  7550  		v.AddArg2(v0, v1)
  7551  		return true
  7552  	}
  7553  	// match: (Rsh16x8 x y)
  7554  	// cond: shiftIsBounded(v)
  7555  	// result: (SRA (SignExt16to64 x) y)
  7556  	for {
  7557  		x := v_0
  7558  		y := v_1
  7559  		if !(shiftIsBounded(v)) {
  7560  			break
  7561  		}
  7562  		v.reset(OpRISCV64SRA)
  7563  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  7564  		v0.AddArg(x)
  7565  		v.AddArg2(v0, y)
  7566  		return true
  7567  	}
  7568  	return false
  7569  }
  7570  func rewriteValueRISCV64_OpRsh32Ux16(v *Value) bool {
  7571  	v_1 := v.Args[1]
  7572  	v_0 := v.Args[0]
  7573  	b := v.Block
  7574  	typ := &b.Func.Config.Types
  7575  	// match: (Rsh32Ux16 <t> x y)
  7576  	// cond: !shiftIsBounded(v)
  7577  	// result: (AND (SRLW <t> x y) (Neg32 <t> (SLTIU <t> [32] (ZeroExt16to64 y))))
  7578  	for {
  7579  		t := v.Type
  7580  		x := v_0
  7581  		y := v_1
  7582  		if !(!shiftIsBounded(v)) {
  7583  			break
  7584  		}
  7585  		v.reset(OpRISCV64AND)
  7586  		v0 := b.NewValue0(v.Pos, OpRISCV64SRLW, t)
  7587  		v0.AddArg2(x, y)
  7588  		v1 := b.NewValue0(v.Pos, OpNeg32, t)
  7589  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  7590  		v2.AuxInt = int64ToAuxInt(32)
  7591  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7592  		v3.AddArg(y)
  7593  		v2.AddArg(v3)
  7594  		v1.AddArg(v2)
  7595  		v.AddArg2(v0, v1)
  7596  		return true
  7597  	}
  7598  	// match: (Rsh32Ux16 x y)
  7599  	// cond: shiftIsBounded(v)
  7600  	// result: (SRLW x y)
  7601  	for {
  7602  		x := v_0
  7603  		y := v_1
  7604  		if !(shiftIsBounded(v)) {
  7605  			break
  7606  		}
  7607  		v.reset(OpRISCV64SRLW)
  7608  		v.AddArg2(x, y)
  7609  		return true
  7610  	}
  7611  	return false
  7612  }
  7613  func rewriteValueRISCV64_OpRsh32Ux32(v *Value) bool {
  7614  	v_1 := v.Args[1]
  7615  	v_0 := v.Args[0]
  7616  	b := v.Block
  7617  	typ := &b.Func.Config.Types
  7618  	// match: (Rsh32Ux32 <t> x y)
  7619  	// cond: !shiftIsBounded(v)
  7620  	// result: (AND (SRLW <t> x y) (Neg32 <t> (SLTIU <t> [32] (ZeroExt32to64 y))))
  7621  	for {
  7622  		t := v.Type
  7623  		x := v_0
  7624  		y := v_1
  7625  		if !(!shiftIsBounded(v)) {
  7626  			break
  7627  		}
  7628  		v.reset(OpRISCV64AND)
  7629  		v0 := b.NewValue0(v.Pos, OpRISCV64SRLW, t)
  7630  		v0.AddArg2(x, y)
  7631  		v1 := b.NewValue0(v.Pos, OpNeg32, t)
  7632  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  7633  		v2.AuxInt = int64ToAuxInt(32)
  7634  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7635  		v3.AddArg(y)
  7636  		v2.AddArg(v3)
  7637  		v1.AddArg(v2)
  7638  		v.AddArg2(v0, v1)
  7639  		return true
  7640  	}
  7641  	// match: (Rsh32Ux32 x y)
  7642  	// cond: shiftIsBounded(v)
  7643  	// result: (SRLW x y)
  7644  	for {
  7645  		x := v_0
  7646  		y := v_1
  7647  		if !(shiftIsBounded(v)) {
  7648  			break
  7649  		}
  7650  		v.reset(OpRISCV64SRLW)
  7651  		v.AddArg2(x, y)
  7652  		return true
  7653  	}
  7654  	return false
  7655  }
  7656  func rewriteValueRISCV64_OpRsh32Ux64(v *Value) bool {
  7657  	v_1 := v.Args[1]
  7658  	v_0 := v.Args[0]
  7659  	b := v.Block
  7660  	// match: (Rsh32Ux64 <t> x y)
  7661  	// cond: !shiftIsBounded(v)
  7662  	// result: (AND (SRLW <t> x y) (Neg32 <t> (SLTIU <t> [32] y)))
  7663  	for {
  7664  		t := v.Type
  7665  		x := v_0
  7666  		y := v_1
  7667  		if !(!shiftIsBounded(v)) {
  7668  			break
  7669  		}
  7670  		v.reset(OpRISCV64AND)
  7671  		v0 := b.NewValue0(v.Pos, OpRISCV64SRLW, t)
  7672  		v0.AddArg2(x, y)
  7673  		v1 := b.NewValue0(v.Pos, OpNeg32, t)
  7674  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  7675  		v2.AuxInt = int64ToAuxInt(32)
  7676  		v2.AddArg(y)
  7677  		v1.AddArg(v2)
  7678  		v.AddArg2(v0, v1)
  7679  		return true
  7680  	}
  7681  	// match: (Rsh32Ux64 x y)
  7682  	// cond: shiftIsBounded(v)
  7683  	// result: (SRLW x y)
  7684  	for {
  7685  		x := v_0
  7686  		y := v_1
  7687  		if !(shiftIsBounded(v)) {
  7688  			break
  7689  		}
  7690  		v.reset(OpRISCV64SRLW)
  7691  		v.AddArg2(x, y)
  7692  		return true
  7693  	}
  7694  	return false
  7695  }
  7696  func rewriteValueRISCV64_OpRsh32Ux8(v *Value) bool {
  7697  	v_1 := v.Args[1]
  7698  	v_0 := v.Args[0]
  7699  	b := v.Block
  7700  	typ := &b.Func.Config.Types
  7701  	// match: (Rsh32Ux8 <t> x y)
  7702  	// cond: !shiftIsBounded(v)
  7703  	// result: (AND (SRLW <t> x y) (Neg32 <t> (SLTIU <t> [32] (ZeroExt8to64 y))))
  7704  	for {
  7705  		t := v.Type
  7706  		x := v_0
  7707  		y := v_1
  7708  		if !(!shiftIsBounded(v)) {
  7709  			break
  7710  		}
  7711  		v.reset(OpRISCV64AND)
  7712  		v0 := b.NewValue0(v.Pos, OpRISCV64SRLW, t)
  7713  		v0.AddArg2(x, y)
  7714  		v1 := b.NewValue0(v.Pos, OpNeg32, t)
  7715  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  7716  		v2.AuxInt = int64ToAuxInt(32)
  7717  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7718  		v3.AddArg(y)
  7719  		v2.AddArg(v3)
  7720  		v1.AddArg(v2)
  7721  		v.AddArg2(v0, v1)
  7722  		return true
  7723  	}
  7724  	// match: (Rsh32Ux8 x y)
  7725  	// cond: shiftIsBounded(v)
  7726  	// result: (SRLW x y)
  7727  	for {
  7728  		x := v_0
  7729  		y := v_1
  7730  		if !(shiftIsBounded(v)) {
  7731  			break
  7732  		}
  7733  		v.reset(OpRISCV64SRLW)
  7734  		v.AddArg2(x, y)
  7735  		return true
  7736  	}
  7737  	return false
  7738  }
  7739  func rewriteValueRISCV64_OpRsh32x16(v *Value) bool {
  7740  	v_1 := v.Args[1]
  7741  	v_0 := v.Args[0]
  7742  	b := v.Block
  7743  	typ := &b.Func.Config.Types
  7744  	// match: (Rsh32x16 <t> x y)
  7745  	// cond: !shiftIsBounded(v)
  7746  	// result: (SRAW <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [32] (ZeroExt16to64 y)))))
  7747  	for {
  7748  		t := v.Type
  7749  		x := v_0
  7750  		y := v_1
  7751  		if !(!shiftIsBounded(v)) {
  7752  			break
  7753  		}
  7754  		v.reset(OpRISCV64SRAW)
  7755  		v.Type = t
  7756  		v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  7757  		v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  7758  		v1.AuxInt = int64ToAuxInt(-1)
  7759  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  7760  		v2.AuxInt = int64ToAuxInt(32)
  7761  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7762  		v3.AddArg(y)
  7763  		v2.AddArg(v3)
  7764  		v1.AddArg(v2)
  7765  		v0.AddArg2(y, v1)
  7766  		v.AddArg2(x, v0)
  7767  		return true
  7768  	}
  7769  	// match: (Rsh32x16 x y)
  7770  	// cond: shiftIsBounded(v)
  7771  	// result: (SRAW x y)
  7772  	for {
  7773  		x := v_0
  7774  		y := v_1
  7775  		if !(shiftIsBounded(v)) {
  7776  			break
  7777  		}
  7778  		v.reset(OpRISCV64SRAW)
  7779  		v.AddArg2(x, y)
  7780  		return true
  7781  	}
  7782  	return false
  7783  }
  7784  func rewriteValueRISCV64_OpRsh32x32(v *Value) bool {
  7785  	v_1 := v.Args[1]
  7786  	v_0 := v.Args[0]
  7787  	b := v.Block
  7788  	typ := &b.Func.Config.Types
  7789  	// match: (Rsh32x32 <t> x y)
  7790  	// cond: !shiftIsBounded(v)
  7791  	// result: (SRAW <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [32] (ZeroExt32to64 y)))))
  7792  	for {
  7793  		t := v.Type
  7794  		x := v_0
  7795  		y := v_1
  7796  		if !(!shiftIsBounded(v)) {
  7797  			break
  7798  		}
  7799  		v.reset(OpRISCV64SRAW)
  7800  		v.Type = t
  7801  		v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  7802  		v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  7803  		v1.AuxInt = int64ToAuxInt(-1)
  7804  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  7805  		v2.AuxInt = int64ToAuxInt(32)
  7806  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7807  		v3.AddArg(y)
  7808  		v2.AddArg(v3)
  7809  		v1.AddArg(v2)
  7810  		v0.AddArg2(y, v1)
  7811  		v.AddArg2(x, v0)
  7812  		return true
  7813  	}
  7814  	// match: (Rsh32x32 x y)
  7815  	// cond: shiftIsBounded(v)
  7816  	// result: (SRAW x y)
  7817  	for {
  7818  		x := v_0
  7819  		y := v_1
  7820  		if !(shiftIsBounded(v)) {
  7821  			break
  7822  		}
  7823  		v.reset(OpRISCV64SRAW)
  7824  		v.AddArg2(x, y)
  7825  		return true
  7826  	}
  7827  	return false
  7828  }
  7829  func rewriteValueRISCV64_OpRsh32x64(v *Value) bool {
  7830  	v_1 := v.Args[1]
  7831  	v_0 := v.Args[0]
  7832  	b := v.Block
  7833  	// match: (Rsh32x64 <t> x y)
  7834  	// cond: !shiftIsBounded(v)
  7835  	// result: (SRAW <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [32] y))))
  7836  	for {
  7837  		t := v.Type
  7838  		x := v_0
  7839  		y := v_1
  7840  		if !(!shiftIsBounded(v)) {
  7841  			break
  7842  		}
  7843  		v.reset(OpRISCV64SRAW)
  7844  		v.Type = t
  7845  		v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  7846  		v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  7847  		v1.AuxInt = int64ToAuxInt(-1)
  7848  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  7849  		v2.AuxInt = int64ToAuxInt(32)
  7850  		v2.AddArg(y)
  7851  		v1.AddArg(v2)
  7852  		v0.AddArg2(y, v1)
  7853  		v.AddArg2(x, v0)
  7854  		return true
  7855  	}
  7856  	// match: (Rsh32x64 x y)
  7857  	// cond: shiftIsBounded(v)
  7858  	// result: (SRAW x y)
  7859  	for {
  7860  		x := v_0
  7861  		y := v_1
  7862  		if !(shiftIsBounded(v)) {
  7863  			break
  7864  		}
  7865  		v.reset(OpRISCV64SRAW)
  7866  		v.AddArg2(x, y)
  7867  		return true
  7868  	}
  7869  	return false
  7870  }
  7871  func rewriteValueRISCV64_OpRsh32x8(v *Value) bool {
  7872  	v_1 := v.Args[1]
  7873  	v_0 := v.Args[0]
  7874  	b := v.Block
  7875  	typ := &b.Func.Config.Types
  7876  	// match: (Rsh32x8 <t> x y)
  7877  	// cond: !shiftIsBounded(v)
  7878  	// result: (SRAW <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [32] (ZeroExt8to64 y)))))
  7879  	for {
  7880  		t := v.Type
  7881  		x := v_0
  7882  		y := v_1
  7883  		if !(!shiftIsBounded(v)) {
  7884  			break
  7885  		}
  7886  		v.reset(OpRISCV64SRAW)
  7887  		v.Type = t
  7888  		v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  7889  		v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  7890  		v1.AuxInt = int64ToAuxInt(-1)
  7891  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  7892  		v2.AuxInt = int64ToAuxInt(32)
  7893  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7894  		v3.AddArg(y)
  7895  		v2.AddArg(v3)
  7896  		v1.AddArg(v2)
  7897  		v0.AddArg2(y, v1)
  7898  		v.AddArg2(x, v0)
  7899  		return true
  7900  	}
  7901  	// match: (Rsh32x8 x y)
  7902  	// cond: shiftIsBounded(v)
  7903  	// result: (SRAW x y)
  7904  	for {
  7905  		x := v_0
  7906  		y := v_1
  7907  		if !(shiftIsBounded(v)) {
  7908  			break
  7909  		}
  7910  		v.reset(OpRISCV64SRAW)
  7911  		v.AddArg2(x, y)
  7912  		return true
  7913  	}
  7914  	return false
  7915  }
  7916  func rewriteValueRISCV64_OpRsh64Ux16(v *Value) bool {
  7917  	v_1 := v.Args[1]
  7918  	v_0 := v.Args[0]
  7919  	b := v.Block
  7920  	typ := &b.Func.Config.Types
  7921  	// match: (Rsh64Ux16 <t> x y)
  7922  	// cond: !shiftIsBounded(v)
  7923  	// result: (AND (SRL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
  7924  	for {
  7925  		t := v.Type
  7926  		x := v_0
  7927  		y := v_1
  7928  		if !(!shiftIsBounded(v)) {
  7929  			break
  7930  		}
  7931  		v.reset(OpRISCV64AND)
  7932  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  7933  		v0.AddArg2(x, y)
  7934  		v1 := b.NewValue0(v.Pos, OpNeg64, t)
  7935  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  7936  		v2.AuxInt = int64ToAuxInt(64)
  7937  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7938  		v3.AddArg(y)
  7939  		v2.AddArg(v3)
  7940  		v1.AddArg(v2)
  7941  		v.AddArg2(v0, v1)
  7942  		return true
  7943  	}
  7944  	// match: (Rsh64Ux16 x y)
  7945  	// cond: shiftIsBounded(v)
  7946  	// result: (SRL x y)
  7947  	for {
  7948  		x := v_0
  7949  		y := v_1
  7950  		if !(shiftIsBounded(v)) {
  7951  			break
  7952  		}
  7953  		v.reset(OpRISCV64SRL)
  7954  		v.AddArg2(x, y)
  7955  		return true
  7956  	}
  7957  	return false
  7958  }
  7959  func rewriteValueRISCV64_OpRsh64Ux32(v *Value) bool {
  7960  	v_1 := v.Args[1]
  7961  	v_0 := v.Args[0]
  7962  	b := v.Block
  7963  	typ := &b.Func.Config.Types
  7964  	// match: (Rsh64Ux32 <t> x y)
  7965  	// cond: !shiftIsBounded(v)
  7966  	// result: (AND (SRL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
  7967  	for {
  7968  		t := v.Type
  7969  		x := v_0
  7970  		y := v_1
  7971  		if !(!shiftIsBounded(v)) {
  7972  			break
  7973  		}
  7974  		v.reset(OpRISCV64AND)
  7975  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  7976  		v0.AddArg2(x, y)
  7977  		v1 := b.NewValue0(v.Pos, OpNeg64, t)
  7978  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  7979  		v2.AuxInt = int64ToAuxInt(64)
  7980  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7981  		v3.AddArg(y)
  7982  		v2.AddArg(v3)
  7983  		v1.AddArg(v2)
  7984  		v.AddArg2(v0, v1)
  7985  		return true
  7986  	}
  7987  	// match: (Rsh64Ux32 x y)
  7988  	// cond: shiftIsBounded(v)
  7989  	// result: (SRL x y)
  7990  	for {
  7991  		x := v_0
  7992  		y := v_1
  7993  		if !(shiftIsBounded(v)) {
  7994  			break
  7995  		}
  7996  		v.reset(OpRISCV64SRL)
  7997  		v.AddArg2(x, y)
  7998  		return true
  7999  	}
  8000  	return false
  8001  }
  8002  func rewriteValueRISCV64_OpRsh64Ux64(v *Value) bool {
  8003  	v_1 := v.Args[1]
  8004  	v_0 := v.Args[0]
  8005  	b := v.Block
  8006  	// match: (Rsh64Ux64 <t> x y)
  8007  	// cond: !shiftIsBounded(v)
  8008  	// result: (AND (SRL <t> x y) (Neg64 <t> (SLTIU <t> [64] y)))
  8009  	for {
  8010  		t := v.Type
  8011  		x := v_0
  8012  		y := v_1
  8013  		if !(!shiftIsBounded(v)) {
  8014  			break
  8015  		}
  8016  		v.reset(OpRISCV64AND)
  8017  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  8018  		v0.AddArg2(x, y)
  8019  		v1 := b.NewValue0(v.Pos, OpNeg64, t)
  8020  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  8021  		v2.AuxInt = int64ToAuxInt(64)
  8022  		v2.AddArg(y)
  8023  		v1.AddArg(v2)
  8024  		v.AddArg2(v0, v1)
  8025  		return true
  8026  	}
  8027  	// match: (Rsh64Ux64 x y)
  8028  	// cond: shiftIsBounded(v)
  8029  	// result: (SRL x y)
  8030  	for {
  8031  		x := v_0
  8032  		y := v_1
  8033  		if !(shiftIsBounded(v)) {
  8034  			break
  8035  		}
  8036  		v.reset(OpRISCV64SRL)
  8037  		v.AddArg2(x, y)
  8038  		return true
  8039  	}
  8040  	return false
  8041  }
  8042  func rewriteValueRISCV64_OpRsh64Ux8(v *Value) bool {
  8043  	v_1 := v.Args[1]
  8044  	v_0 := v.Args[0]
  8045  	b := v.Block
  8046  	typ := &b.Func.Config.Types
  8047  	// match: (Rsh64Ux8 <t> x y)
  8048  	// cond: !shiftIsBounded(v)
  8049  	// result: (AND (SRL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
  8050  	for {
  8051  		t := v.Type
  8052  		x := v_0
  8053  		y := v_1
  8054  		if !(!shiftIsBounded(v)) {
  8055  			break
  8056  		}
  8057  		v.reset(OpRISCV64AND)
  8058  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  8059  		v0.AddArg2(x, y)
  8060  		v1 := b.NewValue0(v.Pos, OpNeg64, t)
  8061  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  8062  		v2.AuxInt = int64ToAuxInt(64)
  8063  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8064  		v3.AddArg(y)
  8065  		v2.AddArg(v3)
  8066  		v1.AddArg(v2)
  8067  		v.AddArg2(v0, v1)
  8068  		return true
  8069  	}
  8070  	// match: (Rsh64Ux8 x y)
  8071  	// cond: shiftIsBounded(v)
  8072  	// result: (SRL x y)
  8073  	for {
  8074  		x := v_0
  8075  		y := v_1
  8076  		if !(shiftIsBounded(v)) {
  8077  			break
  8078  		}
  8079  		v.reset(OpRISCV64SRL)
  8080  		v.AddArg2(x, y)
  8081  		return true
  8082  	}
  8083  	return false
  8084  }
  8085  func rewriteValueRISCV64_OpRsh64x16(v *Value) bool {
  8086  	v_1 := v.Args[1]
  8087  	v_0 := v.Args[0]
  8088  	b := v.Block
  8089  	typ := &b.Func.Config.Types
  8090  	// match: (Rsh64x16 <t> x y)
  8091  	// cond: !shiftIsBounded(v)
  8092  	// result: (SRA <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt16to64 y)))))
  8093  	for {
  8094  		t := v.Type
  8095  		x := v_0
  8096  		y := v_1
  8097  		if !(!shiftIsBounded(v)) {
  8098  			break
  8099  		}
  8100  		v.reset(OpRISCV64SRA)
  8101  		v.Type = t
  8102  		v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  8103  		v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  8104  		v1.AuxInt = int64ToAuxInt(-1)
  8105  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  8106  		v2.AuxInt = int64ToAuxInt(64)
  8107  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8108  		v3.AddArg(y)
  8109  		v2.AddArg(v3)
  8110  		v1.AddArg(v2)
  8111  		v0.AddArg2(y, v1)
  8112  		v.AddArg2(x, v0)
  8113  		return true
  8114  	}
  8115  	// match: (Rsh64x16 x y)
  8116  	// cond: shiftIsBounded(v)
  8117  	// result: (SRA x y)
  8118  	for {
  8119  		x := v_0
  8120  		y := v_1
  8121  		if !(shiftIsBounded(v)) {
  8122  			break
  8123  		}
  8124  		v.reset(OpRISCV64SRA)
  8125  		v.AddArg2(x, y)
  8126  		return true
  8127  	}
  8128  	return false
  8129  }
  8130  func rewriteValueRISCV64_OpRsh64x32(v *Value) bool {
  8131  	v_1 := v.Args[1]
  8132  	v_0 := v.Args[0]
  8133  	b := v.Block
  8134  	typ := &b.Func.Config.Types
  8135  	// match: (Rsh64x32 <t> x y)
  8136  	// cond: !shiftIsBounded(v)
  8137  	// result: (SRA <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt32to64 y)))))
  8138  	for {
  8139  		t := v.Type
  8140  		x := v_0
  8141  		y := v_1
  8142  		if !(!shiftIsBounded(v)) {
  8143  			break
  8144  		}
  8145  		v.reset(OpRISCV64SRA)
  8146  		v.Type = t
  8147  		v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  8148  		v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  8149  		v1.AuxInt = int64ToAuxInt(-1)
  8150  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  8151  		v2.AuxInt = int64ToAuxInt(64)
  8152  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8153  		v3.AddArg(y)
  8154  		v2.AddArg(v3)
  8155  		v1.AddArg(v2)
  8156  		v0.AddArg2(y, v1)
  8157  		v.AddArg2(x, v0)
  8158  		return true
  8159  	}
  8160  	// match: (Rsh64x32 x y)
  8161  	// cond: shiftIsBounded(v)
  8162  	// result: (SRA x y)
  8163  	for {
  8164  		x := v_0
  8165  		y := v_1
  8166  		if !(shiftIsBounded(v)) {
  8167  			break
  8168  		}
  8169  		v.reset(OpRISCV64SRA)
  8170  		v.AddArg2(x, y)
  8171  		return true
  8172  	}
  8173  	return false
  8174  }
  8175  func rewriteValueRISCV64_OpRsh64x64(v *Value) bool {
  8176  	v_1 := v.Args[1]
  8177  	v_0 := v.Args[0]
  8178  	b := v.Block
  8179  	// match: (Rsh64x64 <t> x y)
  8180  	// cond: !shiftIsBounded(v)
  8181  	// result: (SRA <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] y))))
  8182  	for {
  8183  		t := v.Type
  8184  		x := v_0
  8185  		y := v_1
  8186  		if !(!shiftIsBounded(v)) {
  8187  			break
  8188  		}
  8189  		v.reset(OpRISCV64SRA)
  8190  		v.Type = t
  8191  		v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  8192  		v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  8193  		v1.AuxInt = int64ToAuxInt(-1)
  8194  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  8195  		v2.AuxInt = int64ToAuxInt(64)
  8196  		v2.AddArg(y)
  8197  		v1.AddArg(v2)
  8198  		v0.AddArg2(y, v1)
  8199  		v.AddArg2(x, v0)
  8200  		return true
  8201  	}
  8202  	// match: (Rsh64x64 x y)
  8203  	// cond: shiftIsBounded(v)
  8204  	// result: (SRA x y)
  8205  	for {
  8206  		x := v_0
  8207  		y := v_1
  8208  		if !(shiftIsBounded(v)) {
  8209  			break
  8210  		}
  8211  		v.reset(OpRISCV64SRA)
  8212  		v.AddArg2(x, y)
  8213  		return true
  8214  	}
  8215  	return false
  8216  }
  8217  func rewriteValueRISCV64_OpRsh64x8(v *Value) bool {
  8218  	v_1 := v.Args[1]
  8219  	v_0 := v.Args[0]
  8220  	b := v.Block
  8221  	typ := &b.Func.Config.Types
  8222  	// match: (Rsh64x8 <t> x y)
  8223  	// cond: !shiftIsBounded(v)
  8224  	// result: (SRA <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt8to64 y)))))
  8225  	for {
  8226  		t := v.Type
  8227  		x := v_0
  8228  		y := v_1
  8229  		if !(!shiftIsBounded(v)) {
  8230  			break
  8231  		}
  8232  		v.reset(OpRISCV64SRA)
  8233  		v.Type = t
  8234  		v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  8235  		v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  8236  		v1.AuxInt = int64ToAuxInt(-1)
  8237  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  8238  		v2.AuxInt = int64ToAuxInt(64)
  8239  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8240  		v3.AddArg(y)
  8241  		v2.AddArg(v3)
  8242  		v1.AddArg(v2)
  8243  		v0.AddArg2(y, v1)
  8244  		v.AddArg2(x, v0)
  8245  		return true
  8246  	}
  8247  	// match: (Rsh64x8 x y)
  8248  	// cond: shiftIsBounded(v)
  8249  	// result: (SRA x y)
  8250  	for {
  8251  		x := v_0
  8252  		y := v_1
  8253  		if !(shiftIsBounded(v)) {
  8254  			break
  8255  		}
  8256  		v.reset(OpRISCV64SRA)
  8257  		v.AddArg2(x, y)
  8258  		return true
  8259  	}
  8260  	return false
  8261  }
  8262  func rewriteValueRISCV64_OpRsh8Ux16(v *Value) bool {
  8263  	v_1 := v.Args[1]
  8264  	v_0 := v.Args[0]
  8265  	b := v.Block
  8266  	typ := &b.Func.Config.Types
  8267  	// match: (Rsh8Ux16 <t> x y)
  8268  	// cond: !shiftIsBounded(v)
  8269  	// result: (AND (SRL <t> (ZeroExt8to64 x) y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
  8270  	for {
  8271  		t := v.Type
  8272  		x := v_0
  8273  		y := v_1
  8274  		if !(!shiftIsBounded(v)) {
  8275  			break
  8276  		}
  8277  		v.reset(OpRISCV64AND)
  8278  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  8279  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8280  		v1.AddArg(x)
  8281  		v0.AddArg2(v1, y)
  8282  		v2 := b.NewValue0(v.Pos, OpNeg8, t)
  8283  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  8284  		v3.AuxInt = int64ToAuxInt(64)
  8285  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8286  		v4.AddArg(y)
  8287  		v3.AddArg(v4)
  8288  		v2.AddArg(v3)
  8289  		v.AddArg2(v0, v2)
  8290  		return true
  8291  	}
  8292  	// match: (Rsh8Ux16 x y)
  8293  	// cond: shiftIsBounded(v)
  8294  	// result: (SRL (ZeroExt8to64 x) y)
  8295  	for {
  8296  		x := v_0
  8297  		y := v_1
  8298  		if !(shiftIsBounded(v)) {
  8299  			break
  8300  		}
  8301  		v.reset(OpRISCV64SRL)
  8302  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8303  		v0.AddArg(x)
  8304  		v.AddArg2(v0, y)
  8305  		return true
  8306  	}
  8307  	return false
  8308  }
  8309  func rewriteValueRISCV64_OpRsh8Ux32(v *Value) bool {
  8310  	v_1 := v.Args[1]
  8311  	v_0 := v.Args[0]
  8312  	b := v.Block
  8313  	typ := &b.Func.Config.Types
  8314  	// match: (Rsh8Ux32 <t> x y)
  8315  	// cond: !shiftIsBounded(v)
  8316  	// result: (AND (SRL <t> (ZeroExt8to64 x) y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
  8317  	for {
  8318  		t := v.Type
  8319  		x := v_0
  8320  		y := v_1
  8321  		if !(!shiftIsBounded(v)) {
  8322  			break
  8323  		}
  8324  		v.reset(OpRISCV64AND)
  8325  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  8326  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8327  		v1.AddArg(x)
  8328  		v0.AddArg2(v1, y)
  8329  		v2 := b.NewValue0(v.Pos, OpNeg8, t)
  8330  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  8331  		v3.AuxInt = int64ToAuxInt(64)
  8332  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8333  		v4.AddArg(y)
  8334  		v3.AddArg(v4)
  8335  		v2.AddArg(v3)
  8336  		v.AddArg2(v0, v2)
  8337  		return true
  8338  	}
  8339  	// match: (Rsh8Ux32 x y)
  8340  	// cond: shiftIsBounded(v)
  8341  	// result: (SRL (ZeroExt8to64 x) y)
  8342  	for {
  8343  		x := v_0
  8344  		y := v_1
  8345  		if !(shiftIsBounded(v)) {
  8346  			break
  8347  		}
  8348  		v.reset(OpRISCV64SRL)
  8349  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8350  		v0.AddArg(x)
  8351  		v.AddArg2(v0, y)
  8352  		return true
  8353  	}
  8354  	return false
  8355  }
  8356  func rewriteValueRISCV64_OpRsh8Ux64(v *Value) bool {
  8357  	v_1 := v.Args[1]
  8358  	v_0 := v.Args[0]
  8359  	b := v.Block
  8360  	typ := &b.Func.Config.Types
  8361  	// match: (Rsh8Ux64 <t> x y)
  8362  	// cond: !shiftIsBounded(v)
  8363  	// result: (AND (SRL <t> (ZeroExt8to64 x) y) (Neg8 <t> (SLTIU <t> [64] y)))
  8364  	for {
  8365  		t := v.Type
  8366  		x := v_0
  8367  		y := v_1
  8368  		if !(!shiftIsBounded(v)) {
  8369  			break
  8370  		}
  8371  		v.reset(OpRISCV64AND)
  8372  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  8373  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8374  		v1.AddArg(x)
  8375  		v0.AddArg2(v1, y)
  8376  		v2 := b.NewValue0(v.Pos, OpNeg8, t)
  8377  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  8378  		v3.AuxInt = int64ToAuxInt(64)
  8379  		v3.AddArg(y)
  8380  		v2.AddArg(v3)
  8381  		v.AddArg2(v0, v2)
  8382  		return true
  8383  	}
  8384  	// match: (Rsh8Ux64 x y)
  8385  	// cond: shiftIsBounded(v)
  8386  	// result: (SRL (ZeroExt8to64 x) y)
  8387  	for {
  8388  		x := v_0
  8389  		y := v_1
  8390  		if !(shiftIsBounded(v)) {
  8391  			break
  8392  		}
  8393  		v.reset(OpRISCV64SRL)
  8394  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8395  		v0.AddArg(x)
  8396  		v.AddArg2(v0, y)
  8397  		return true
  8398  	}
  8399  	return false
  8400  }
  8401  func rewriteValueRISCV64_OpRsh8Ux8(v *Value) bool {
  8402  	v_1 := v.Args[1]
  8403  	v_0 := v.Args[0]
  8404  	b := v.Block
  8405  	typ := &b.Func.Config.Types
  8406  	// match: (Rsh8Ux8 <t> x y)
  8407  	// cond: !shiftIsBounded(v)
  8408  	// result: (AND (SRL <t> (ZeroExt8to64 x) y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
  8409  	for {
  8410  		t := v.Type
  8411  		x := v_0
  8412  		y := v_1
  8413  		if !(!shiftIsBounded(v)) {
  8414  			break
  8415  		}
  8416  		v.reset(OpRISCV64AND)
  8417  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  8418  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8419  		v1.AddArg(x)
  8420  		v0.AddArg2(v1, y)
  8421  		v2 := b.NewValue0(v.Pos, OpNeg8, t)
  8422  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  8423  		v3.AuxInt = int64ToAuxInt(64)
  8424  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8425  		v4.AddArg(y)
  8426  		v3.AddArg(v4)
  8427  		v2.AddArg(v3)
  8428  		v.AddArg2(v0, v2)
  8429  		return true
  8430  	}
  8431  	// match: (Rsh8Ux8 x y)
  8432  	// cond: shiftIsBounded(v)
  8433  	// result: (SRL (ZeroExt8to64 x) y)
  8434  	for {
  8435  		x := v_0
  8436  		y := v_1
  8437  		if !(shiftIsBounded(v)) {
  8438  			break
  8439  		}
  8440  		v.reset(OpRISCV64SRL)
  8441  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8442  		v0.AddArg(x)
  8443  		v.AddArg2(v0, y)
  8444  		return true
  8445  	}
  8446  	return false
  8447  }
  8448  func rewriteValueRISCV64_OpRsh8x16(v *Value) bool {
  8449  	v_1 := v.Args[1]
  8450  	v_0 := v.Args[0]
  8451  	b := v.Block
  8452  	typ := &b.Func.Config.Types
  8453  	// match: (Rsh8x16 <t> x y)
  8454  	// cond: !shiftIsBounded(v)
  8455  	// result: (SRA <t> (SignExt8to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt16to64 y)))))
  8456  	for {
  8457  		t := v.Type
  8458  		x := v_0
  8459  		y := v_1
  8460  		if !(!shiftIsBounded(v)) {
  8461  			break
  8462  		}
  8463  		v.reset(OpRISCV64SRA)
  8464  		v.Type = t
  8465  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  8466  		v0.AddArg(x)
  8467  		v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  8468  		v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  8469  		v2.AuxInt = int64ToAuxInt(-1)
  8470  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  8471  		v3.AuxInt = int64ToAuxInt(64)
  8472  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8473  		v4.AddArg(y)
  8474  		v3.AddArg(v4)
  8475  		v2.AddArg(v3)
  8476  		v1.AddArg2(y, v2)
  8477  		v.AddArg2(v0, v1)
  8478  		return true
  8479  	}
  8480  	// match: (Rsh8x16 x y)
  8481  	// cond: shiftIsBounded(v)
  8482  	// result: (SRA (SignExt8to64 x) y)
  8483  	for {
  8484  		x := v_0
  8485  		y := v_1
  8486  		if !(shiftIsBounded(v)) {
  8487  			break
  8488  		}
  8489  		v.reset(OpRISCV64SRA)
  8490  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  8491  		v0.AddArg(x)
  8492  		v.AddArg2(v0, y)
  8493  		return true
  8494  	}
  8495  	return false
  8496  }
  8497  func rewriteValueRISCV64_OpRsh8x32(v *Value) bool {
  8498  	v_1 := v.Args[1]
  8499  	v_0 := v.Args[0]
  8500  	b := v.Block
  8501  	typ := &b.Func.Config.Types
  8502  	// match: (Rsh8x32 <t> x y)
  8503  	// cond: !shiftIsBounded(v)
  8504  	// result: (SRA <t> (SignExt8to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt32to64 y)))))
  8505  	for {
  8506  		t := v.Type
  8507  		x := v_0
  8508  		y := v_1
  8509  		if !(!shiftIsBounded(v)) {
  8510  			break
  8511  		}
  8512  		v.reset(OpRISCV64SRA)
  8513  		v.Type = t
  8514  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  8515  		v0.AddArg(x)
  8516  		v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  8517  		v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  8518  		v2.AuxInt = int64ToAuxInt(-1)
  8519  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  8520  		v3.AuxInt = int64ToAuxInt(64)
  8521  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8522  		v4.AddArg(y)
  8523  		v3.AddArg(v4)
  8524  		v2.AddArg(v3)
  8525  		v1.AddArg2(y, v2)
  8526  		v.AddArg2(v0, v1)
  8527  		return true
  8528  	}
  8529  	// match: (Rsh8x32 x y)
  8530  	// cond: shiftIsBounded(v)
  8531  	// result: (SRA (SignExt8to64 x) y)
  8532  	for {
  8533  		x := v_0
  8534  		y := v_1
  8535  		if !(shiftIsBounded(v)) {
  8536  			break
  8537  		}
  8538  		v.reset(OpRISCV64SRA)
  8539  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  8540  		v0.AddArg(x)
  8541  		v.AddArg2(v0, y)
  8542  		return true
  8543  	}
  8544  	return false
  8545  }
  8546  func rewriteValueRISCV64_OpRsh8x64(v *Value) bool {
  8547  	v_1 := v.Args[1]
  8548  	v_0 := v.Args[0]
  8549  	b := v.Block
  8550  	typ := &b.Func.Config.Types
  8551  	// match: (Rsh8x64 <t> x y)
  8552  	// cond: !shiftIsBounded(v)
  8553  	// result: (SRA <t> (SignExt8to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] y))))
  8554  	for {
  8555  		t := v.Type
  8556  		x := v_0
  8557  		y := v_1
  8558  		if !(!shiftIsBounded(v)) {
  8559  			break
  8560  		}
  8561  		v.reset(OpRISCV64SRA)
  8562  		v.Type = t
  8563  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  8564  		v0.AddArg(x)
  8565  		v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  8566  		v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  8567  		v2.AuxInt = int64ToAuxInt(-1)
  8568  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  8569  		v3.AuxInt = int64ToAuxInt(64)
  8570  		v3.AddArg(y)
  8571  		v2.AddArg(v3)
  8572  		v1.AddArg2(y, v2)
  8573  		v.AddArg2(v0, v1)
  8574  		return true
  8575  	}
  8576  	// match: (Rsh8x64 x y)
  8577  	// cond: shiftIsBounded(v)
  8578  	// result: (SRA (SignExt8to64 x) y)
  8579  	for {
  8580  		x := v_0
  8581  		y := v_1
  8582  		if !(shiftIsBounded(v)) {
  8583  			break
  8584  		}
  8585  		v.reset(OpRISCV64SRA)
  8586  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  8587  		v0.AddArg(x)
  8588  		v.AddArg2(v0, y)
  8589  		return true
  8590  	}
  8591  	return false
  8592  }
  8593  func rewriteValueRISCV64_OpRsh8x8(v *Value) bool {
  8594  	v_1 := v.Args[1]
  8595  	v_0 := v.Args[0]
  8596  	b := v.Block
  8597  	typ := &b.Func.Config.Types
  8598  	// match: (Rsh8x8 <t> x y)
  8599  	// cond: !shiftIsBounded(v)
  8600  	// result: (SRA <t> (SignExt8to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt8to64 y)))))
  8601  	for {
  8602  		t := v.Type
  8603  		x := v_0
  8604  		y := v_1
  8605  		if !(!shiftIsBounded(v)) {
  8606  			break
  8607  		}
  8608  		v.reset(OpRISCV64SRA)
  8609  		v.Type = t
  8610  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  8611  		v0.AddArg(x)
  8612  		v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  8613  		v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  8614  		v2.AuxInt = int64ToAuxInt(-1)
  8615  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  8616  		v3.AuxInt = int64ToAuxInt(64)
  8617  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8618  		v4.AddArg(y)
  8619  		v3.AddArg(v4)
  8620  		v2.AddArg(v3)
  8621  		v1.AddArg2(y, v2)
  8622  		v.AddArg2(v0, v1)
  8623  		return true
  8624  	}
  8625  	// match: (Rsh8x8 x y)
  8626  	// cond: shiftIsBounded(v)
  8627  	// result: (SRA (SignExt8to64 x) y)
  8628  	for {
  8629  		x := v_0
  8630  		y := v_1
  8631  		if !(shiftIsBounded(v)) {
  8632  			break
  8633  		}
  8634  		v.reset(OpRISCV64SRA)
  8635  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  8636  		v0.AddArg(x)
  8637  		v.AddArg2(v0, y)
  8638  		return true
  8639  	}
  8640  	return false
  8641  }
  8642  func rewriteValueRISCV64_OpSelect0(v *Value) bool {
  8643  	v_0 := v.Args[0]
  8644  	b := v.Block
  8645  	typ := &b.Func.Config.Types
  8646  	// match: (Select0 (Add64carry x y c))
  8647  	// result: (ADD (ADD <typ.UInt64> x y) c)
  8648  	for {
  8649  		if v_0.Op != OpAdd64carry {
  8650  			break
  8651  		}
  8652  		c := v_0.Args[2]
  8653  		x := v_0.Args[0]
  8654  		y := v_0.Args[1]
  8655  		v.reset(OpRISCV64ADD)
  8656  		v0 := b.NewValue0(v.Pos, OpRISCV64ADD, typ.UInt64)
  8657  		v0.AddArg2(x, y)
  8658  		v.AddArg2(v0, c)
  8659  		return true
  8660  	}
  8661  	// match: (Select0 (Sub64borrow x y c))
  8662  	// result: (SUB (SUB <typ.UInt64> x y) c)
  8663  	for {
  8664  		if v_0.Op != OpSub64borrow {
  8665  			break
  8666  		}
  8667  		c := v_0.Args[2]
  8668  		x := v_0.Args[0]
  8669  		y := v_0.Args[1]
  8670  		v.reset(OpRISCV64SUB)
  8671  		v0 := b.NewValue0(v.Pos, OpRISCV64SUB, typ.UInt64)
  8672  		v0.AddArg2(x, y)
  8673  		v.AddArg2(v0, c)
  8674  		return true
  8675  	}
  8676  	// match: (Select0 m:(LoweredMuluhilo x y))
  8677  	// cond: m.Uses == 1
  8678  	// result: (MULHU x y)
  8679  	for {
  8680  		m := v_0
  8681  		if m.Op != OpRISCV64LoweredMuluhilo {
  8682  			break
  8683  		}
  8684  		y := m.Args[1]
  8685  		x := m.Args[0]
  8686  		if !(m.Uses == 1) {
  8687  			break
  8688  		}
  8689  		v.reset(OpRISCV64MULHU)
  8690  		v.AddArg2(x, y)
  8691  		return true
  8692  	}
  8693  	return false
  8694  }
  8695  func rewriteValueRISCV64_OpSelect1(v *Value) bool {
  8696  	v_0 := v.Args[0]
  8697  	b := v.Block
  8698  	typ := &b.Func.Config.Types
  8699  	// match: (Select1 (Add64carry x y c))
  8700  	// result: (OR (SLTU <typ.UInt64> s:(ADD <typ.UInt64> x y) x) (SLTU <typ.UInt64> (ADD <typ.UInt64> s c) s))
  8701  	for {
  8702  		if v_0.Op != OpAdd64carry {
  8703  			break
  8704  		}
  8705  		c := v_0.Args[2]
  8706  		x := v_0.Args[0]
  8707  		y := v_0.Args[1]
  8708  		v.reset(OpRISCV64OR)
  8709  		v0 := b.NewValue0(v.Pos, OpRISCV64SLTU, typ.UInt64)
  8710  		s := b.NewValue0(v.Pos, OpRISCV64ADD, typ.UInt64)
  8711  		s.AddArg2(x, y)
  8712  		v0.AddArg2(s, x)
  8713  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTU, typ.UInt64)
  8714  		v3 := b.NewValue0(v.Pos, OpRISCV64ADD, typ.UInt64)
  8715  		v3.AddArg2(s, c)
  8716  		v2.AddArg2(v3, s)
  8717  		v.AddArg2(v0, v2)
  8718  		return true
  8719  	}
  8720  	// match: (Select1 (Sub64borrow x y c))
  8721  	// result: (OR (SLTU <typ.UInt64> x s:(SUB <typ.UInt64> x y)) (SLTU <typ.UInt64> s (SUB <typ.UInt64> s c)))
  8722  	for {
  8723  		if v_0.Op != OpSub64borrow {
  8724  			break
  8725  		}
  8726  		c := v_0.Args[2]
  8727  		x := v_0.Args[0]
  8728  		y := v_0.Args[1]
  8729  		v.reset(OpRISCV64OR)
  8730  		v0 := b.NewValue0(v.Pos, OpRISCV64SLTU, typ.UInt64)
  8731  		s := b.NewValue0(v.Pos, OpRISCV64SUB, typ.UInt64)
  8732  		s.AddArg2(x, y)
  8733  		v0.AddArg2(x, s)
  8734  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTU, typ.UInt64)
  8735  		v3 := b.NewValue0(v.Pos, OpRISCV64SUB, typ.UInt64)
  8736  		v3.AddArg2(s, c)
  8737  		v2.AddArg2(s, v3)
  8738  		v.AddArg2(v0, v2)
  8739  		return true
  8740  	}
  8741  	// match: (Select1 m:(LoweredMuluhilo x y))
  8742  	// cond: m.Uses == 1
  8743  	// result: (MUL x y)
  8744  	for {
  8745  		m := v_0
  8746  		if m.Op != OpRISCV64LoweredMuluhilo {
  8747  			break
  8748  		}
  8749  		y := m.Args[1]
  8750  		x := m.Args[0]
  8751  		if !(m.Uses == 1) {
  8752  			break
  8753  		}
  8754  		v.reset(OpRISCV64MUL)
  8755  		v.AddArg2(x, y)
  8756  		return true
  8757  	}
  8758  	return false
  8759  }
  8760  func rewriteValueRISCV64_OpSlicemask(v *Value) bool {
  8761  	v_0 := v.Args[0]
  8762  	b := v.Block
  8763  	// match: (Slicemask <t> x)
  8764  	// result: (SRAI [63] (NEG <t> x))
  8765  	for {
  8766  		t := v.Type
  8767  		x := v_0
  8768  		v.reset(OpRISCV64SRAI)
  8769  		v.AuxInt = int64ToAuxInt(63)
  8770  		v0 := b.NewValue0(v.Pos, OpRISCV64NEG, t)
  8771  		v0.AddArg(x)
  8772  		v.AddArg(v0)
  8773  		return true
  8774  	}
  8775  }
  8776  func rewriteValueRISCV64_OpStore(v *Value) bool {
  8777  	v_2 := v.Args[2]
  8778  	v_1 := v.Args[1]
  8779  	v_0 := v.Args[0]
  8780  	// match: (Store {t} ptr val mem)
  8781  	// cond: t.Size() == 1
  8782  	// result: (MOVBstore ptr val mem)
  8783  	for {
  8784  		t := auxToType(v.Aux)
  8785  		ptr := v_0
  8786  		val := v_1
  8787  		mem := v_2
  8788  		if !(t.Size() == 1) {
  8789  			break
  8790  		}
  8791  		v.reset(OpRISCV64MOVBstore)
  8792  		v.AddArg3(ptr, val, mem)
  8793  		return true
  8794  	}
  8795  	// match: (Store {t} ptr val mem)
  8796  	// cond: t.Size() == 2
  8797  	// result: (MOVHstore ptr val mem)
  8798  	for {
  8799  		t := auxToType(v.Aux)
  8800  		ptr := v_0
  8801  		val := v_1
  8802  		mem := v_2
  8803  		if !(t.Size() == 2) {
  8804  			break
  8805  		}
  8806  		v.reset(OpRISCV64MOVHstore)
  8807  		v.AddArg3(ptr, val, mem)
  8808  		return true
  8809  	}
  8810  	// match: (Store {t} ptr val mem)
  8811  	// cond: t.Size() == 4 && !t.IsFloat()
  8812  	// result: (MOVWstore ptr val mem)
  8813  	for {
  8814  		t := auxToType(v.Aux)
  8815  		ptr := v_0
  8816  		val := v_1
  8817  		mem := v_2
  8818  		if !(t.Size() == 4 && !t.IsFloat()) {
  8819  			break
  8820  		}
  8821  		v.reset(OpRISCV64MOVWstore)
  8822  		v.AddArg3(ptr, val, mem)
  8823  		return true
  8824  	}
  8825  	// match: (Store {t} ptr val mem)
  8826  	// cond: t.Size() == 8 && !t.IsFloat()
  8827  	// result: (MOVDstore ptr val mem)
  8828  	for {
  8829  		t := auxToType(v.Aux)
  8830  		ptr := v_0
  8831  		val := v_1
  8832  		mem := v_2
  8833  		if !(t.Size() == 8 && !t.IsFloat()) {
  8834  			break
  8835  		}
  8836  		v.reset(OpRISCV64MOVDstore)
  8837  		v.AddArg3(ptr, val, mem)
  8838  		return true
  8839  	}
  8840  	// match: (Store {t} ptr val mem)
  8841  	// cond: t.Size() == 4 && t.IsFloat()
  8842  	// result: (FMOVWstore ptr val mem)
  8843  	for {
  8844  		t := auxToType(v.Aux)
  8845  		ptr := v_0
  8846  		val := v_1
  8847  		mem := v_2
  8848  		if !(t.Size() == 4 && t.IsFloat()) {
  8849  			break
  8850  		}
  8851  		v.reset(OpRISCV64FMOVWstore)
  8852  		v.AddArg3(ptr, val, mem)
  8853  		return true
  8854  	}
  8855  	// match: (Store {t} ptr val mem)
  8856  	// cond: t.Size() == 8 && t.IsFloat()
  8857  	// result: (FMOVDstore ptr val mem)
  8858  	for {
  8859  		t := auxToType(v.Aux)
  8860  		ptr := v_0
  8861  		val := v_1
  8862  		mem := v_2
  8863  		if !(t.Size() == 8 && t.IsFloat()) {
  8864  			break
  8865  		}
  8866  		v.reset(OpRISCV64FMOVDstore)
  8867  		v.AddArg3(ptr, val, mem)
  8868  		return true
  8869  	}
  8870  	return false
  8871  }
  8872  func rewriteValueRISCV64_OpZero(v *Value) bool {
  8873  	v_1 := v.Args[1]
  8874  	v_0 := v.Args[0]
  8875  	b := v.Block
  8876  	config := b.Func.Config
  8877  	typ := &b.Func.Config.Types
  8878  	// match: (Zero [0] _ mem)
  8879  	// result: mem
  8880  	for {
  8881  		if auxIntToInt64(v.AuxInt) != 0 {
  8882  			break
  8883  		}
  8884  		mem := v_1
  8885  		v.copyOf(mem)
  8886  		return true
  8887  	}
  8888  	// match: (Zero [1] ptr mem)
  8889  	// result: (MOVBstore ptr (MOVDconst [0]) mem)
  8890  	for {
  8891  		if auxIntToInt64(v.AuxInt) != 1 {
  8892  			break
  8893  		}
  8894  		ptr := v_0
  8895  		mem := v_1
  8896  		v.reset(OpRISCV64MOVBstore)
  8897  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  8898  		v0.AuxInt = int64ToAuxInt(0)
  8899  		v.AddArg3(ptr, v0, mem)
  8900  		return true
  8901  	}
  8902  	// match: (Zero [2] {t} ptr mem)
  8903  	// cond: t.Alignment()%2 == 0
  8904  	// result: (MOVHstore ptr (MOVDconst [0]) mem)
  8905  	for {
  8906  		if auxIntToInt64(v.AuxInt) != 2 {
  8907  			break
  8908  		}
  8909  		t := auxToType(v.Aux)
  8910  		ptr := v_0
  8911  		mem := v_1
  8912  		if !(t.Alignment()%2 == 0) {
  8913  			break
  8914  		}
  8915  		v.reset(OpRISCV64MOVHstore)
  8916  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  8917  		v0.AuxInt = int64ToAuxInt(0)
  8918  		v.AddArg3(ptr, v0, mem)
  8919  		return true
  8920  	}
  8921  	// match: (Zero [2] ptr mem)
  8922  	// result: (MOVBstore [1] ptr (MOVDconst [0]) (MOVBstore ptr (MOVDconst [0]) mem))
  8923  	for {
  8924  		if auxIntToInt64(v.AuxInt) != 2 {
  8925  			break
  8926  		}
  8927  		ptr := v_0
  8928  		mem := v_1
  8929  		v.reset(OpRISCV64MOVBstore)
  8930  		v.AuxInt = int32ToAuxInt(1)
  8931  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  8932  		v0.AuxInt = int64ToAuxInt(0)
  8933  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  8934  		v1.AddArg3(ptr, v0, mem)
  8935  		v.AddArg3(ptr, v0, v1)
  8936  		return true
  8937  	}
  8938  	// match: (Zero [4] {t} ptr mem)
  8939  	// cond: t.Alignment()%4 == 0
  8940  	// result: (MOVWstore ptr (MOVDconst [0]) mem)
  8941  	for {
  8942  		if auxIntToInt64(v.AuxInt) != 4 {
  8943  			break
  8944  		}
  8945  		t := auxToType(v.Aux)
  8946  		ptr := v_0
  8947  		mem := v_1
  8948  		if !(t.Alignment()%4 == 0) {
  8949  			break
  8950  		}
  8951  		v.reset(OpRISCV64MOVWstore)
  8952  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  8953  		v0.AuxInt = int64ToAuxInt(0)
  8954  		v.AddArg3(ptr, v0, mem)
  8955  		return true
  8956  	}
  8957  	// match: (Zero [4] {t} ptr mem)
  8958  	// cond: t.Alignment()%2 == 0
  8959  	// result: (MOVHstore [2] ptr (MOVDconst [0]) (MOVHstore ptr (MOVDconst [0]) mem))
  8960  	for {
  8961  		if auxIntToInt64(v.AuxInt) != 4 {
  8962  			break
  8963  		}
  8964  		t := auxToType(v.Aux)
  8965  		ptr := v_0
  8966  		mem := v_1
  8967  		if !(t.Alignment()%2 == 0) {
  8968  			break
  8969  		}
  8970  		v.reset(OpRISCV64MOVHstore)
  8971  		v.AuxInt = int32ToAuxInt(2)
  8972  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  8973  		v0.AuxInt = int64ToAuxInt(0)
  8974  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  8975  		v1.AddArg3(ptr, v0, mem)
  8976  		v.AddArg3(ptr, v0, v1)
  8977  		return true
  8978  	}
  8979  	// match: (Zero [4] ptr mem)
  8980  	// result: (MOVBstore [3] ptr (MOVDconst [0]) (MOVBstore [2] ptr (MOVDconst [0]) (MOVBstore [1] ptr (MOVDconst [0]) (MOVBstore ptr (MOVDconst [0]) mem))))
  8981  	for {
  8982  		if auxIntToInt64(v.AuxInt) != 4 {
  8983  			break
  8984  		}
  8985  		ptr := v_0
  8986  		mem := v_1
  8987  		v.reset(OpRISCV64MOVBstore)
  8988  		v.AuxInt = int32ToAuxInt(3)
  8989  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  8990  		v0.AuxInt = int64ToAuxInt(0)
  8991  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  8992  		v1.AuxInt = int32ToAuxInt(2)
  8993  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  8994  		v2.AuxInt = int32ToAuxInt(1)
  8995  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  8996  		v3.AddArg3(ptr, v0, mem)
  8997  		v2.AddArg3(ptr, v0, v3)
  8998  		v1.AddArg3(ptr, v0, v2)
  8999  		v.AddArg3(ptr, v0, v1)
  9000  		return true
  9001  	}
  9002  	// match: (Zero [8] {t} ptr mem)
  9003  	// cond: t.Alignment()%8 == 0
  9004  	// result: (MOVDstore ptr (MOVDconst [0]) mem)
  9005  	for {
  9006  		if auxIntToInt64(v.AuxInt) != 8 {
  9007  			break
  9008  		}
  9009  		t := auxToType(v.Aux)
  9010  		ptr := v_0
  9011  		mem := v_1
  9012  		if !(t.Alignment()%8 == 0) {
  9013  			break
  9014  		}
  9015  		v.reset(OpRISCV64MOVDstore)
  9016  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9017  		v0.AuxInt = int64ToAuxInt(0)
  9018  		v.AddArg3(ptr, v0, mem)
  9019  		return true
  9020  	}
  9021  	// match: (Zero [8] {t} ptr mem)
  9022  	// cond: t.Alignment()%4 == 0
  9023  	// result: (MOVWstore [4] ptr (MOVDconst [0]) (MOVWstore ptr (MOVDconst [0]) mem))
  9024  	for {
  9025  		if auxIntToInt64(v.AuxInt) != 8 {
  9026  			break
  9027  		}
  9028  		t := auxToType(v.Aux)
  9029  		ptr := v_0
  9030  		mem := v_1
  9031  		if !(t.Alignment()%4 == 0) {
  9032  			break
  9033  		}
  9034  		v.reset(OpRISCV64MOVWstore)
  9035  		v.AuxInt = int32ToAuxInt(4)
  9036  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9037  		v0.AuxInt = int64ToAuxInt(0)
  9038  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVWstore, types.TypeMem)
  9039  		v1.AddArg3(ptr, v0, mem)
  9040  		v.AddArg3(ptr, v0, v1)
  9041  		return true
  9042  	}
  9043  	// match: (Zero [8] {t} ptr mem)
  9044  	// cond: t.Alignment()%2 == 0
  9045  	// result: (MOVHstore [6] ptr (MOVDconst [0]) (MOVHstore [4] ptr (MOVDconst [0]) (MOVHstore [2] ptr (MOVDconst [0]) (MOVHstore ptr (MOVDconst [0]) mem))))
  9046  	for {
  9047  		if auxIntToInt64(v.AuxInt) != 8 {
  9048  			break
  9049  		}
  9050  		t := auxToType(v.Aux)
  9051  		ptr := v_0
  9052  		mem := v_1
  9053  		if !(t.Alignment()%2 == 0) {
  9054  			break
  9055  		}
  9056  		v.reset(OpRISCV64MOVHstore)
  9057  		v.AuxInt = int32ToAuxInt(6)
  9058  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9059  		v0.AuxInt = int64ToAuxInt(0)
  9060  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  9061  		v1.AuxInt = int32ToAuxInt(4)
  9062  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  9063  		v2.AuxInt = int32ToAuxInt(2)
  9064  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  9065  		v3.AddArg3(ptr, v0, mem)
  9066  		v2.AddArg3(ptr, v0, v3)
  9067  		v1.AddArg3(ptr, v0, v2)
  9068  		v.AddArg3(ptr, v0, v1)
  9069  		return true
  9070  	}
  9071  	// match: (Zero [3] ptr mem)
  9072  	// result: (MOVBstore [2] ptr (MOVDconst [0]) (MOVBstore [1] ptr (MOVDconst [0]) (MOVBstore ptr (MOVDconst [0]) mem)))
  9073  	for {
  9074  		if auxIntToInt64(v.AuxInt) != 3 {
  9075  			break
  9076  		}
  9077  		ptr := v_0
  9078  		mem := v_1
  9079  		v.reset(OpRISCV64MOVBstore)
  9080  		v.AuxInt = int32ToAuxInt(2)
  9081  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9082  		v0.AuxInt = int64ToAuxInt(0)
  9083  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  9084  		v1.AuxInt = int32ToAuxInt(1)
  9085  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  9086  		v2.AddArg3(ptr, v0, mem)
  9087  		v1.AddArg3(ptr, v0, v2)
  9088  		v.AddArg3(ptr, v0, v1)
  9089  		return true
  9090  	}
  9091  	// match: (Zero [6] {t} ptr mem)
  9092  	// cond: t.Alignment()%2 == 0
  9093  	// result: (MOVHstore [4] ptr (MOVDconst [0]) (MOVHstore [2] ptr (MOVDconst [0]) (MOVHstore ptr (MOVDconst [0]) mem)))
  9094  	for {
  9095  		if auxIntToInt64(v.AuxInt) != 6 {
  9096  			break
  9097  		}
  9098  		t := auxToType(v.Aux)
  9099  		ptr := v_0
  9100  		mem := v_1
  9101  		if !(t.Alignment()%2 == 0) {
  9102  			break
  9103  		}
  9104  		v.reset(OpRISCV64MOVHstore)
  9105  		v.AuxInt = int32ToAuxInt(4)
  9106  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9107  		v0.AuxInt = int64ToAuxInt(0)
  9108  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  9109  		v1.AuxInt = int32ToAuxInt(2)
  9110  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  9111  		v2.AddArg3(ptr, v0, mem)
  9112  		v1.AddArg3(ptr, v0, v2)
  9113  		v.AddArg3(ptr, v0, v1)
  9114  		return true
  9115  	}
  9116  	// match: (Zero [12] {t} ptr mem)
  9117  	// cond: t.Alignment()%4 == 0
  9118  	// result: (MOVWstore [8] ptr (MOVDconst [0]) (MOVWstore [4] ptr (MOVDconst [0]) (MOVWstore ptr (MOVDconst [0]) mem)))
  9119  	for {
  9120  		if auxIntToInt64(v.AuxInt) != 12 {
  9121  			break
  9122  		}
  9123  		t := auxToType(v.Aux)
  9124  		ptr := v_0
  9125  		mem := v_1
  9126  		if !(t.Alignment()%4 == 0) {
  9127  			break
  9128  		}
  9129  		v.reset(OpRISCV64MOVWstore)
  9130  		v.AuxInt = int32ToAuxInt(8)
  9131  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9132  		v0.AuxInt = int64ToAuxInt(0)
  9133  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVWstore, types.TypeMem)
  9134  		v1.AuxInt = int32ToAuxInt(4)
  9135  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVWstore, types.TypeMem)
  9136  		v2.AddArg3(ptr, v0, mem)
  9137  		v1.AddArg3(ptr, v0, v2)
  9138  		v.AddArg3(ptr, v0, v1)
  9139  		return true
  9140  	}
  9141  	// match: (Zero [16] {t} ptr mem)
  9142  	// cond: t.Alignment()%8 == 0
  9143  	// result: (MOVDstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem))
  9144  	for {
  9145  		if auxIntToInt64(v.AuxInt) != 16 {
  9146  			break
  9147  		}
  9148  		t := auxToType(v.Aux)
  9149  		ptr := v_0
  9150  		mem := v_1
  9151  		if !(t.Alignment()%8 == 0) {
  9152  			break
  9153  		}
  9154  		v.reset(OpRISCV64MOVDstore)
  9155  		v.AuxInt = int32ToAuxInt(8)
  9156  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9157  		v0.AuxInt = int64ToAuxInt(0)
  9158  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  9159  		v1.AddArg3(ptr, v0, mem)
  9160  		v.AddArg3(ptr, v0, v1)
  9161  		return true
  9162  	}
  9163  	// match: (Zero [24] {t} ptr mem)
  9164  	// cond: t.Alignment()%8 == 0
  9165  	// result: (MOVDstore [16] ptr (MOVDconst [0]) (MOVDstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem)))
  9166  	for {
  9167  		if auxIntToInt64(v.AuxInt) != 24 {
  9168  			break
  9169  		}
  9170  		t := auxToType(v.Aux)
  9171  		ptr := v_0
  9172  		mem := v_1
  9173  		if !(t.Alignment()%8 == 0) {
  9174  			break
  9175  		}
  9176  		v.reset(OpRISCV64MOVDstore)
  9177  		v.AuxInt = int32ToAuxInt(16)
  9178  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9179  		v0.AuxInt = int64ToAuxInt(0)
  9180  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  9181  		v1.AuxInt = int32ToAuxInt(8)
  9182  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  9183  		v2.AddArg3(ptr, v0, mem)
  9184  		v1.AddArg3(ptr, v0, v2)
  9185  		v.AddArg3(ptr, v0, v1)
  9186  		return true
  9187  	}
  9188  	// match: (Zero [32] {t} ptr mem)
  9189  	// cond: t.Alignment()%8 == 0
  9190  	// result: (MOVDstore [24] ptr (MOVDconst [0]) (MOVDstore [16] ptr (MOVDconst [0]) (MOVDstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem))))
  9191  	for {
  9192  		if auxIntToInt64(v.AuxInt) != 32 {
  9193  			break
  9194  		}
  9195  		t := auxToType(v.Aux)
  9196  		ptr := v_0
  9197  		mem := v_1
  9198  		if !(t.Alignment()%8 == 0) {
  9199  			break
  9200  		}
  9201  		v.reset(OpRISCV64MOVDstore)
  9202  		v.AuxInt = int32ToAuxInt(24)
  9203  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9204  		v0.AuxInt = int64ToAuxInt(0)
  9205  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  9206  		v1.AuxInt = int32ToAuxInt(16)
  9207  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  9208  		v2.AuxInt = int32ToAuxInt(8)
  9209  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  9210  		v3.AddArg3(ptr, v0, mem)
  9211  		v2.AddArg3(ptr, v0, v3)
  9212  		v1.AddArg3(ptr, v0, v2)
  9213  		v.AddArg3(ptr, v0, v1)
  9214  		return true
  9215  	}
  9216  	// match: (Zero [s] {t} ptr mem)
  9217  	// cond: s%8 == 0 && s <= 8*128 && t.Alignment()%8 == 0
  9218  	// result: (DUFFZERO [8 * (128 - s/8)] ptr mem)
  9219  	for {
  9220  		s := auxIntToInt64(v.AuxInt)
  9221  		t := auxToType(v.Aux)
  9222  		ptr := v_0
  9223  		mem := v_1
  9224  		if !(s%8 == 0 && s <= 8*128 && t.Alignment()%8 == 0) {
  9225  			break
  9226  		}
  9227  		v.reset(OpRISCV64DUFFZERO)
  9228  		v.AuxInt = int64ToAuxInt(8 * (128 - s/8))
  9229  		v.AddArg2(ptr, mem)
  9230  		return true
  9231  	}
  9232  	// match: (Zero [s] {t} ptr mem)
  9233  	// result: (LoweredZero [t.Alignment()] ptr (ADD <ptr.Type> ptr (MOVDconst [s-moveSize(t.Alignment(), config)])) mem)
  9234  	for {
  9235  		s := auxIntToInt64(v.AuxInt)
  9236  		t := auxToType(v.Aux)
  9237  		ptr := v_0
  9238  		mem := v_1
  9239  		v.reset(OpRISCV64LoweredZero)
  9240  		v.AuxInt = int64ToAuxInt(t.Alignment())
  9241  		v0 := b.NewValue0(v.Pos, OpRISCV64ADD, ptr.Type)
  9242  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9243  		v1.AuxInt = int64ToAuxInt(s - moveSize(t.Alignment(), config))
  9244  		v0.AddArg2(ptr, v1)
  9245  		v.AddArg3(ptr, v0, mem)
  9246  		return true
  9247  	}
  9248  }
  9249  func rewriteBlockRISCV64(b *Block) bool {
  9250  	typ := &b.Func.Config.Types
  9251  	switch b.Kind {
  9252  	case BlockRISCV64BEQ:
  9253  		// match: (BEQ (MOVDconst [0]) cond yes no)
  9254  		// result: (BEQZ cond yes no)
  9255  		for b.Controls[0].Op == OpRISCV64MOVDconst {
  9256  			v_0 := b.Controls[0]
  9257  			if auxIntToInt64(v_0.AuxInt) != 0 {
  9258  				break
  9259  			}
  9260  			cond := b.Controls[1]
  9261  			b.resetWithControl(BlockRISCV64BEQZ, cond)
  9262  			return true
  9263  		}
  9264  		// match: (BEQ cond (MOVDconst [0]) yes no)
  9265  		// result: (BEQZ cond yes no)
  9266  		for b.Controls[1].Op == OpRISCV64MOVDconst {
  9267  			cond := b.Controls[0]
  9268  			v_1 := b.Controls[1]
  9269  			if auxIntToInt64(v_1.AuxInt) != 0 {
  9270  				break
  9271  			}
  9272  			b.resetWithControl(BlockRISCV64BEQZ, cond)
  9273  			return true
  9274  		}
  9275  	case BlockRISCV64BEQZ:
  9276  		// match: (BEQZ (SEQZ x) yes no)
  9277  		// result: (BNEZ x yes no)
  9278  		for b.Controls[0].Op == OpRISCV64SEQZ {
  9279  			v_0 := b.Controls[0]
  9280  			x := v_0.Args[0]
  9281  			b.resetWithControl(BlockRISCV64BNEZ, x)
  9282  			return true
  9283  		}
  9284  		// match: (BEQZ (SNEZ x) yes no)
  9285  		// result: (BEQZ x yes no)
  9286  		for b.Controls[0].Op == OpRISCV64SNEZ {
  9287  			v_0 := b.Controls[0]
  9288  			x := v_0.Args[0]
  9289  			b.resetWithControl(BlockRISCV64BEQZ, x)
  9290  			return true
  9291  		}
  9292  		// match: (BEQZ (NEG x) yes no)
  9293  		// result: (BEQZ x yes no)
  9294  		for b.Controls[0].Op == OpRISCV64NEG {
  9295  			v_0 := b.Controls[0]
  9296  			x := v_0.Args[0]
  9297  			b.resetWithControl(BlockRISCV64BEQZ, x)
  9298  			return true
  9299  		}
  9300  		// match: (BEQZ (FNES <t> x y) yes no)
  9301  		// result: (BNEZ (FEQS <t> x y) yes no)
  9302  		for b.Controls[0].Op == OpRISCV64FNES {
  9303  			v_0 := b.Controls[0]
  9304  			t := v_0.Type
  9305  			_ = v_0.Args[1]
  9306  			v_0_0 := v_0.Args[0]
  9307  			v_0_1 := v_0.Args[1]
  9308  			for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
  9309  				x := v_0_0
  9310  				y := v_0_1
  9311  				v0 := b.NewValue0(v_0.Pos, OpRISCV64FEQS, t)
  9312  				v0.AddArg2(x, y)
  9313  				b.resetWithControl(BlockRISCV64BNEZ, v0)
  9314  				return true
  9315  			}
  9316  		}
  9317  		// match: (BEQZ (FNED <t> x y) yes no)
  9318  		// result: (BNEZ (FEQD <t> x y) yes no)
  9319  		for b.Controls[0].Op == OpRISCV64FNED {
  9320  			v_0 := b.Controls[0]
  9321  			t := v_0.Type
  9322  			_ = v_0.Args[1]
  9323  			v_0_0 := v_0.Args[0]
  9324  			v_0_1 := v_0.Args[1]
  9325  			for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
  9326  				x := v_0_0
  9327  				y := v_0_1
  9328  				v0 := b.NewValue0(v_0.Pos, OpRISCV64FEQD, t)
  9329  				v0.AddArg2(x, y)
  9330  				b.resetWithControl(BlockRISCV64BNEZ, v0)
  9331  				return true
  9332  			}
  9333  		}
  9334  		// match: (BEQZ (SUB x y) yes no)
  9335  		// result: (BEQ x y yes no)
  9336  		for b.Controls[0].Op == OpRISCV64SUB {
  9337  			v_0 := b.Controls[0]
  9338  			y := v_0.Args[1]
  9339  			x := v_0.Args[0]
  9340  			b.resetWithControl2(BlockRISCV64BEQ, x, y)
  9341  			return true
  9342  		}
  9343  		// match: (BEQZ (SLT x y) yes no)
  9344  		// result: (BGE x y yes no)
  9345  		for b.Controls[0].Op == OpRISCV64SLT {
  9346  			v_0 := b.Controls[0]
  9347  			y := v_0.Args[1]
  9348  			x := v_0.Args[0]
  9349  			b.resetWithControl2(BlockRISCV64BGE, x, y)
  9350  			return true
  9351  		}
  9352  		// match: (BEQZ (SLTU x y) yes no)
  9353  		// result: (BGEU x y yes no)
  9354  		for b.Controls[0].Op == OpRISCV64SLTU {
  9355  			v_0 := b.Controls[0]
  9356  			y := v_0.Args[1]
  9357  			x := v_0.Args[0]
  9358  			b.resetWithControl2(BlockRISCV64BGEU, x, y)
  9359  			return true
  9360  		}
  9361  		// match: (BEQZ (SLTI [x] y) yes no)
  9362  		// result: (BGE y (MOVDconst [x]) yes no)
  9363  		for b.Controls[0].Op == OpRISCV64SLTI {
  9364  			v_0 := b.Controls[0]
  9365  			x := auxIntToInt64(v_0.AuxInt)
  9366  			y := v_0.Args[0]
  9367  			v0 := b.NewValue0(b.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9368  			v0.AuxInt = int64ToAuxInt(x)
  9369  			b.resetWithControl2(BlockRISCV64BGE, y, v0)
  9370  			return true
  9371  		}
  9372  		// match: (BEQZ (SLTIU [x] y) yes no)
  9373  		// result: (BGEU y (MOVDconst [x]) yes no)
  9374  		for b.Controls[0].Op == OpRISCV64SLTIU {
  9375  			v_0 := b.Controls[0]
  9376  			x := auxIntToInt64(v_0.AuxInt)
  9377  			y := v_0.Args[0]
  9378  			v0 := b.NewValue0(b.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9379  			v0.AuxInt = int64ToAuxInt(x)
  9380  			b.resetWithControl2(BlockRISCV64BGEU, y, v0)
  9381  			return true
  9382  		}
  9383  	case BlockRISCV64BGE:
  9384  		// match: (BGE (MOVDconst [0]) cond yes no)
  9385  		// result: (BLEZ cond yes no)
  9386  		for b.Controls[0].Op == OpRISCV64MOVDconst {
  9387  			v_0 := b.Controls[0]
  9388  			if auxIntToInt64(v_0.AuxInt) != 0 {
  9389  				break
  9390  			}
  9391  			cond := b.Controls[1]
  9392  			b.resetWithControl(BlockRISCV64BLEZ, cond)
  9393  			return true
  9394  		}
  9395  		// match: (BGE cond (MOVDconst [0]) yes no)
  9396  		// result: (BGEZ cond yes no)
  9397  		for b.Controls[1].Op == OpRISCV64MOVDconst {
  9398  			cond := b.Controls[0]
  9399  			v_1 := b.Controls[1]
  9400  			if auxIntToInt64(v_1.AuxInt) != 0 {
  9401  				break
  9402  			}
  9403  			b.resetWithControl(BlockRISCV64BGEZ, cond)
  9404  			return true
  9405  		}
  9406  	case BlockRISCV64BGEU:
  9407  		// match: (BGEU (MOVDconst [0]) cond yes no)
  9408  		// result: (BEQZ cond yes no)
  9409  		for b.Controls[0].Op == OpRISCV64MOVDconst {
  9410  			v_0 := b.Controls[0]
  9411  			if auxIntToInt64(v_0.AuxInt) != 0 {
  9412  				break
  9413  			}
  9414  			cond := b.Controls[1]
  9415  			b.resetWithControl(BlockRISCV64BEQZ, cond)
  9416  			return true
  9417  		}
  9418  	case BlockRISCV64BLT:
  9419  		// match: (BLT (MOVDconst [0]) cond yes no)
  9420  		// result: (BGTZ cond yes no)
  9421  		for b.Controls[0].Op == OpRISCV64MOVDconst {
  9422  			v_0 := b.Controls[0]
  9423  			if auxIntToInt64(v_0.AuxInt) != 0 {
  9424  				break
  9425  			}
  9426  			cond := b.Controls[1]
  9427  			b.resetWithControl(BlockRISCV64BGTZ, cond)
  9428  			return true
  9429  		}
  9430  		// match: (BLT cond (MOVDconst [0]) yes no)
  9431  		// result: (BLTZ cond yes no)
  9432  		for b.Controls[1].Op == OpRISCV64MOVDconst {
  9433  			cond := b.Controls[0]
  9434  			v_1 := b.Controls[1]
  9435  			if auxIntToInt64(v_1.AuxInt) != 0 {
  9436  				break
  9437  			}
  9438  			b.resetWithControl(BlockRISCV64BLTZ, cond)
  9439  			return true
  9440  		}
  9441  	case BlockRISCV64BLTU:
  9442  		// match: (BLTU (MOVDconst [0]) cond yes no)
  9443  		// result: (BNEZ cond yes no)
  9444  		for b.Controls[0].Op == OpRISCV64MOVDconst {
  9445  			v_0 := b.Controls[0]
  9446  			if auxIntToInt64(v_0.AuxInt) != 0 {
  9447  				break
  9448  			}
  9449  			cond := b.Controls[1]
  9450  			b.resetWithControl(BlockRISCV64BNEZ, cond)
  9451  			return true
  9452  		}
  9453  	case BlockRISCV64BNE:
  9454  		// match: (BNE (MOVDconst [0]) cond yes no)
  9455  		// result: (BNEZ cond yes no)
  9456  		for b.Controls[0].Op == OpRISCV64MOVDconst {
  9457  			v_0 := b.Controls[0]
  9458  			if auxIntToInt64(v_0.AuxInt) != 0 {
  9459  				break
  9460  			}
  9461  			cond := b.Controls[1]
  9462  			b.resetWithControl(BlockRISCV64BNEZ, cond)
  9463  			return true
  9464  		}
  9465  		// match: (BNE cond (MOVDconst [0]) yes no)
  9466  		// result: (BNEZ cond yes no)
  9467  		for b.Controls[1].Op == OpRISCV64MOVDconst {
  9468  			cond := b.Controls[0]
  9469  			v_1 := b.Controls[1]
  9470  			if auxIntToInt64(v_1.AuxInt) != 0 {
  9471  				break
  9472  			}
  9473  			b.resetWithControl(BlockRISCV64BNEZ, cond)
  9474  			return true
  9475  		}
  9476  	case BlockRISCV64BNEZ:
  9477  		// match: (BNEZ (SEQZ x) yes no)
  9478  		// result: (BEQZ x yes no)
  9479  		for b.Controls[0].Op == OpRISCV64SEQZ {
  9480  			v_0 := b.Controls[0]
  9481  			x := v_0.Args[0]
  9482  			b.resetWithControl(BlockRISCV64BEQZ, x)
  9483  			return true
  9484  		}
  9485  		// match: (BNEZ (SNEZ x) yes no)
  9486  		// result: (BNEZ x yes no)
  9487  		for b.Controls[0].Op == OpRISCV64SNEZ {
  9488  			v_0 := b.Controls[0]
  9489  			x := v_0.Args[0]
  9490  			b.resetWithControl(BlockRISCV64BNEZ, x)
  9491  			return true
  9492  		}
  9493  		// match: (BNEZ (NEG x) yes no)
  9494  		// result: (BNEZ x yes no)
  9495  		for b.Controls[0].Op == OpRISCV64NEG {
  9496  			v_0 := b.Controls[0]
  9497  			x := v_0.Args[0]
  9498  			b.resetWithControl(BlockRISCV64BNEZ, x)
  9499  			return true
  9500  		}
  9501  		// match: (BNEZ (FNES <t> x y) yes no)
  9502  		// result: (BEQZ (FEQS <t> x y) yes no)
  9503  		for b.Controls[0].Op == OpRISCV64FNES {
  9504  			v_0 := b.Controls[0]
  9505  			t := v_0.Type
  9506  			_ = v_0.Args[1]
  9507  			v_0_0 := v_0.Args[0]
  9508  			v_0_1 := v_0.Args[1]
  9509  			for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
  9510  				x := v_0_0
  9511  				y := v_0_1
  9512  				v0 := b.NewValue0(v_0.Pos, OpRISCV64FEQS, t)
  9513  				v0.AddArg2(x, y)
  9514  				b.resetWithControl(BlockRISCV64BEQZ, v0)
  9515  				return true
  9516  			}
  9517  		}
  9518  		// match: (BNEZ (FNED <t> x y) yes no)
  9519  		// result: (BEQZ (FEQD <t> x y) yes no)
  9520  		for b.Controls[0].Op == OpRISCV64FNED {
  9521  			v_0 := b.Controls[0]
  9522  			t := v_0.Type
  9523  			_ = v_0.Args[1]
  9524  			v_0_0 := v_0.Args[0]
  9525  			v_0_1 := v_0.Args[1]
  9526  			for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
  9527  				x := v_0_0
  9528  				y := v_0_1
  9529  				v0 := b.NewValue0(v_0.Pos, OpRISCV64FEQD, t)
  9530  				v0.AddArg2(x, y)
  9531  				b.resetWithControl(BlockRISCV64BEQZ, v0)
  9532  				return true
  9533  			}
  9534  		}
  9535  		// match: (BNEZ (SUB x y) yes no)
  9536  		// result: (BNE x y yes no)
  9537  		for b.Controls[0].Op == OpRISCV64SUB {
  9538  			v_0 := b.Controls[0]
  9539  			y := v_0.Args[1]
  9540  			x := v_0.Args[0]
  9541  			b.resetWithControl2(BlockRISCV64BNE, x, y)
  9542  			return true
  9543  		}
  9544  		// match: (BNEZ (SLT x y) yes no)
  9545  		// result: (BLT x y yes no)
  9546  		for b.Controls[0].Op == OpRISCV64SLT {
  9547  			v_0 := b.Controls[0]
  9548  			y := v_0.Args[1]
  9549  			x := v_0.Args[0]
  9550  			b.resetWithControl2(BlockRISCV64BLT, x, y)
  9551  			return true
  9552  		}
  9553  		// match: (BNEZ (SLTU x y) yes no)
  9554  		// result: (BLTU x y yes no)
  9555  		for b.Controls[0].Op == OpRISCV64SLTU {
  9556  			v_0 := b.Controls[0]
  9557  			y := v_0.Args[1]
  9558  			x := v_0.Args[0]
  9559  			b.resetWithControl2(BlockRISCV64BLTU, x, y)
  9560  			return true
  9561  		}
  9562  		// match: (BNEZ (SLTI [x] y) yes no)
  9563  		// result: (BLT y (MOVDconst [x]) yes no)
  9564  		for b.Controls[0].Op == OpRISCV64SLTI {
  9565  			v_0 := b.Controls[0]
  9566  			x := auxIntToInt64(v_0.AuxInt)
  9567  			y := v_0.Args[0]
  9568  			v0 := b.NewValue0(b.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9569  			v0.AuxInt = int64ToAuxInt(x)
  9570  			b.resetWithControl2(BlockRISCV64BLT, y, v0)
  9571  			return true
  9572  		}
  9573  		// match: (BNEZ (SLTIU [x] y) yes no)
  9574  		// result: (BLTU y (MOVDconst [x]) yes no)
  9575  		for b.Controls[0].Op == OpRISCV64SLTIU {
  9576  			v_0 := b.Controls[0]
  9577  			x := auxIntToInt64(v_0.AuxInt)
  9578  			y := v_0.Args[0]
  9579  			v0 := b.NewValue0(b.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9580  			v0.AuxInt = int64ToAuxInt(x)
  9581  			b.resetWithControl2(BlockRISCV64BLTU, y, v0)
  9582  			return true
  9583  		}
  9584  	case BlockIf:
  9585  		// match: (If cond yes no)
  9586  		// result: (BNEZ (MOVBUreg <typ.UInt64> cond) yes no)
  9587  		for {
  9588  			cond := b.Controls[0]
  9589  			v0 := b.NewValue0(cond.Pos, OpRISCV64MOVBUreg, typ.UInt64)
  9590  			v0.AddArg(cond)
  9591  			b.resetWithControl(BlockRISCV64BNEZ, v0)
  9592  			return true
  9593  		}
  9594  	}
  9595  	return false
  9596  }
  9597  

View as plain text