Source file src/cmd/compile/internal/ssa/rewriteRISCV64.go

     1  // Code generated from _gen/RISCV64.rules using 'go generate'; DO NOT EDIT.
     2  
     3  package ssa
     4  
     5  import "math"
     6  import "cmd/compile/internal/types"
     7  
     8  func rewriteValueRISCV64(v *Value) bool {
     9  	switch v.Op {
    10  	case OpAbs:
    11  		v.Op = OpRISCV64FABSD
    12  		return true
    13  	case OpAdd16:
    14  		v.Op = OpRISCV64ADD
    15  		return true
    16  	case OpAdd32:
    17  		v.Op = OpRISCV64ADD
    18  		return true
    19  	case OpAdd32F:
    20  		v.Op = OpRISCV64FADDS
    21  		return true
    22  	case OpAdd64:
    23  		v.Op = OpRISCV64ADD
    24  		return true
    25  	case OpAdd64F:
    26  		v.Op = OpRISCV64FADDD
    27  		return true
    28  	case OpAdd8:
    29  		v.Op = OpRISCV64ADD
    30  		return true
    31  	case OpAddPtr:
    32  		v.Op = OpRISCV64ADD
    33  		return true
    34  	case OpAddr:
    35  		return rewriteValueRISCV64_OpAddr(v)
    36  	case OpAnd16:
    37  		v.Op = OpRISCV64AND
    38  		return true
    39  	case OpAnd32:
    40  		v.Op = OpRISCV64AND
    41  		return true
    42  	case OpAnd64:
    43  		v.Op = OpRISCV64AND
    44  		return true
    45  	case OpAnd8:
    46  		v.Op = OpRISCV64AND
    47  		return true
    48  	case OpAndB:
    49  		v.Op = OpRISCV64AND
    50  		return true
    51  	case OpAtomicAdd32:
    52  		v.Op = OpRISCV64LoweredAtomicAdd32
    53  		return true
    54  	case OpAtomicAdd64:
    55  		v.Op = OpRISCV64LoweredAtomicAdd64
    56  		return true
    57  	case OpAtomicAnd32:
    58  		v.Op = OpRISCV64LoweredAtomicAnd32
    59  		return true
    60  	case OpAtomicAnd8:
    61  		return rewriteValueRISCV64_OpAtomicAnd8(v)
    62  	case OpAtomicCompareAndSwap32:
    63  		return rewriteValueRISCV64_OpAtomicCompareAndSwap32(v)
    64  	case OpAtomicCompareAndSwap64:
    65  		v.Op = OpRISCV64LoweredAtomicCas64
    66  		return true
    67  	case OpAtomicExchange32:
    68  		v.Op = OpRISCV64LoweredAtomicExchange32
    69  		return true
    70  	case OpAtomicExchange64:
    71  		v.Op = OpRISCV64LoweredAtomicExchange64
    72  		return true
    73  	case OpAtomicLoad32:
    74  		v.Op = OpRISCV64LoweredAtomicLoad32
    75  		return true
    76  	case OpAtomicLoad64:
    77  		v.Op = OpRISCV64LoweredAtomicLoad64
    78  		return true
    79  	case OpAtomicLoad8:
    80  		v.Op = OpRISCV64LoweredAtomicLoad8
    81  		return true
    82  	case OpAtomicLoadPtr:
    83  		v.Op = OpRISCV64LoweredAtomicLoad64
    84  		return true
    85  	case OpAtomicOr32:
    86  		v.Op = OpRISCV64LoweredAtomicOr32
    87  		return true
    88  	case OpAtomicOr8:
    89  		return rewriteValueRISCV64_OpAtomicOr8(v)
    90  	case OpAtomicStore32:
    91  		v.Op = OpRISCV64LoweredAtomicStore32
    92  		return true
    93  	case OpAtomicStore64:
    94  		v.Op = OpRISCV64LoweredAtomicStore64
    95  		return true
    96  	case OpAtomicStore8:
    97  		v.Op = OpRISCV64LoweredAtomicStore8
    98  		return true
    99  	case OpAtomicStorePtrNoWB:
   100  		v.Op = OpRISCV64LoweredAtomicStore64
   101  		return true
   102  	case OpAvg64u:
   103  		return rewriteValueRISCV64_OpAvg64u(v)
   104  	case OpClosureCall:
   105  		v.Op = OpRISCV64CALLclosure
   106  		return true
   107  	case OpCom16:
   108  		v.Op = OpRISCV64NOT
   109  		return true
   110  	case OpCom32:
   111  		v.Op = OpRISCV64NOT
   112  		return true
   113  	case OpCom64:
   114  		v.Op = OpRISCV64NOT
   115  		return true
   116  	case OpCom8:
   117  		v.Op = OpRISCV64NOT
   118  		return true
   119  	case OpConst16:
   120  		return rewriteValueRISCV64_OpConst16(v)
   121  	case OpConst32:
   122  		return rewriteValueRISCV64_OpConst32(v)
   123  	case OpConst32F:
   124  		return rewriteValueRISCV64_OpConst32F(v)
   125  	case OpConst64:
   126  		return rewriteValueRISCV64_OpConst64(v)
   127  	case OpConst64F:
   128  		return rewriteValueRISCV64_OpConst64F(v)
   129  	case OpConst8:
   130  		return rewriteValueRISCV64_OpConst8(v)
   131  	case OpConstBool:
   132  		return rewriteValueRISCV64_OpConstBool(v)
   133  	case OpConstNil:
   134  		return rewriteValueRISCV64_OpConstNil(v)
   135  	case OpCopysign:
   136  		v.Op = OpRISCV64FSGNJD
   137  		return true
   138  	case OpCvt32Fto32:
   139  		v.Op = OpRISCV64FCVTWS
   140  		return true
   141  	case OpCvt32Fto64:
   142  		v.Op = OpRISCV64FCVTLS
   143  		return true
   144  	case OpCvt32Fto64F:
   145  		v.Op = OpRISCV64FCVTDS
   146  		return true
   147  	case OpCvt32to32F:
   148  		v.Op = OpRISCV64FCVTSW
   149  		return true
   150  	case OpCvt32to64F:
   151  		v.Op = OpRISCV64FCVTDW
   152  		return true
   153  	case OpCvt64Fto32:
   154  		v.Op = OpRISCV64FCVTWD
   155  		return true
   156  	case OpCvt64Fto32F:
   157  		v.Op = OpRISCV64FCVTSD
   158  		return true
   159  	case OpCvt64Fto64:
   160  		v.Op = OpRISCV64FCVTLD
   161  		return true
   162  	case OpCvt64to32F:
   163  		v.Op = OpRISCV64FCVTSL
   164  		return true
   165  	case OpCvt64to64F:
   166  		v.Op = OpRISCV64FCVTDL
   167  		return true
   168  	case OpCvtBoolToUint8:
   169  		v.Op = OpCopy
   170  		return true
   171  	case OpDiv16:
   172  		return rewriteValueRISCV64_OpDiv16(v)
   173  	case OpDiv16u:
   174  		return rewriteValueRISCV64_OpDiv16u(v)
   175  	case OpDiv32:
   176  		return rewriteValueRISCV64_OpDiv32(v)
   177  	case OpDiv32F:
   178  		v.Op = OpRISCV64FDIVS
   179  		return true
   180  	case OpDiv32u:
   181  		v.Op = OpRISCV64DIVUW
   182  		return true
   183  	case OpDiv64:
   184  		return rewriteValueRISCV64_OpDiv64(v)
   185  	case OpDiv64F:
   186  		v.Op = OpRISCV64FDIVD
   187  		return true
   188  	case OpDiv64u:
   189  		v.Op = OpRISCV64DIVU
   190  		return true
   191  	case OpDiv8:
   192  		return rewriteValueRISCV64_OpDiv8(v)
   193  	case OpDiv8u:
   194  		return rewriteValueRISCV64_OpDiv8u(v)
   195  	case OpEq16:
   196  		return rewriteValueRISCV64_OpEq16(v)
   197  	case OpEq32:
   198  		return rewriteValueRISCV64_OpEq32(v)
   199  	case OpEq32F:
   200  		v.Op = OpRISCV64FEQS
   201  		return true
   202  	case OpEq64:
   203  		return rewriteValueRISCV64_OpEq64(v)
   204  	case OpEq64F:
   205  		v.Op = OpRISCV64FEQD
   206  		return true
   207  	case OpEq8:
   208  		return rewriteValueRISCV64_OpEq8(v)
   209  	case OpEqB:
   210  		return rewriteValueRISCV64_OpEqB(v)
   211  	case OpEqPtr:
   212  		return rewriteValueRISCV64_OpEqPtr(v)
   213  	case OpFMA:
   214  		v.Op = OpRISCV64FMADDD
   215  		return true
   216  	case OpGetCallerPC:
   217  		v.Op = OpRISCV64LoweredGetCallerPC
   218  		return true
   219  	case OpGetCallerSP:
   220  		v.Op = OpRISCV64LoweredGetCallerSP
   221  		return true
   222  	case OpGetClosurePtr:
   223  		v.Op = OpRISCV64LoweredGetClosurePtr
   224  		return true
   225  	case OpHmul32:
   226  		return rewriteValueRISCV64_OpHmul32(v)
   227  	case OpHmul32u:
   228  		return rewriteValueRISCV64_OpHmul32u(v)
   229  	case OpHmul64:
   230  		v.Op = OpRISCV64MULH
   231  		return true
   232  	case OpHmul64u:
   233  		v.Op = OpRISCV64MULHU
   234  		return true
   235  	case OpInterCall:
   236  		v.Op = OpRISCV64CALLinter
   237  		return true
   238  	case OpIsInBounds:
   239  		v.Op = OpLess64U
   240  		return true
   241  	case OpIsNonNil:
   242  		v.Op = OpRISCV64SNEZ
   243  		return true
   244  	case OpIsSliceInBounds:
   245  		v.Op = OpLeq64U
   246  		return true
   247  	case OpLeq16:
   248  		return rewriteValueRISCV64_OpLeq16(v)
   249  	case OpLeq16U:
   250  		return rewriteValueRISCV64_OpLeq16U(v)
   251  	case OpLeq32:
   252  		return rewriteValueRISCV64_OpLeq32(v)
   253  	case OpLeq32F:
   254  		v.Op = OpRISCV64FLES
   255  		return true
   256  	case OpLeq32U:
   257  		return rewriteValueRISCV64_OpLeq32U(v)
   258  	case OpLeq64:
   259  		return rewriteValueRISCV64_OpLeq64(v)
   260  	case OpLeq64F:
   261  		v.Op = OpRISCV64FLED
   262  		return true
   263  	case OpLeq64U:
   264  		return rewriteValueRISCV64_OpLeq64U(v)
   265  	case OpLeq8:
   266  		return rewriteValueRISCV64_OpLeq8(v)
   267  	case OpLeq8U:
   268  		return rewriteValueRISCV64_OpLeq8U(v)
   269  	case OpLess16:
   270  		return rewriteValueRISCV64_OpLess16(v)
   271  	case OpLess16U:
   272  		return rewriteValueRISCV64_OpLess16U(v)
   273  	case OpLess32:
   274  		return rewriteValueRISCV64_OpLess32(v)
   275  	case OpLess32F:
   276  		v.Op = OpRISCV64FLTS
   277  		return true
   278  	case OpLess32U:
   279  		return rewriteValueRISCV64_OpLess32U(v)
   280  	case OpLess64:
   281  		v.Op = OpRISCV64SLT
   282  		return true
   283  	case OpLess64F:
   284  		v.Op = OpRISCV64FLTD
   285  		return true
   286  	case OpLess64U:
   287  		v.Op = OpRISCV64SLTU
   288  		return true
   289  	case OpLess8:
   290  		return rewriteValueRISCV64_OpLess8(v)
   291  	case OpLess8U:
   292  		return rewriteValueRISCV64_OpLess8U(v)
   293  	case OpLoad:
   294  		return rewriteValueRISCV64_OpLoad(v)
   295  	case OpLocalAddr:
   296  		return rewriteValueRISCV64_OpLocalAddr(v)
   297  	case OpLsh16x16:
   298  		return rewriteValueRISCV64_OpLsh16x16(v)
   299  	case OpLsh16x32:
   300  		return rewriteValueRISCV64_OpLsh16x32(v)
   301  	case OpLsh16x64:
   302  		return rewriteValueRISCV64_OpLsh16x64(v)
   303  	case OpLsh16x8:
   304  		return rewriteValueRISCV64_OpLsh16x8(v)
   305  	case OpLsh32x16:
   306  		return rewriteValueRISCV64_OpLsh32x16(v)
   307  	case OpLsh32x32:
   308  		return rewriteValueRISCV64_OpLsh32x32(v)
   309  	case OpLsh32x64:
   310  		return rewriteValueRISCV64_OpLsh32x64(v)
   311  	case OpLsh32x8:
   312  		return rewriteValueRISCV64_OpLsh32x8(v)
   313  	case OpLsh64x16:
   314  		return rewriteValueRISCV64_OpLsh64x16(v)
   315  	case OpLsh64x32:
   316  		return rewriteValueRISCV64_OpLsh64x32(v)
   317  	case OpLsh64x64:
   318  		return rewriteValueRISCV64_OpLsh64x64(v)
   319  	case OpLsh64x8:
   320  		return rewriteValueRISCV64_OpLsh64x8(v)
   321  	case OpLsh8x16:
   322  		return rewriteValueRISCV64_OpLsh8x16(v)
   323  	case OpLsh8x32:
   324  		return rewriteValueRISCV64_OpLsh8x32(v)
   325  	case OpLsh8x64:
   326  		return rewriteValueRISCV64_OpLsh8x64(v)
   327  	case OpLsh8x8:
   328  		return rewriteValueRISCV64_OpLsh8x8(v)
   329  	case OpMax32F:
   330  		v.Op = OpRISCV64LoweredFMAXS
   331  		return true
   332  	case OpMax64F:
   333  		v.Op = OpRISCV64LoweredFMAXD
   334  		return true
   335  	case OpMin32F:
   336  		v.Op = OpRISCV64LoweredFMINS
   337  		return true
   338  	case OpMin64F:
   339  		v.Op = OpRISCV64LoweredFMIND
   340  		return true
   341  	case OpMod16:
   342  		return rewriteValueRISCV64_OpMod16(v)
   343  	case OpMod16u:
   344  		return rewriteValueRISCV64_OpMod16u(v)
   345  	case OpMod32:
   346  		return rewriteValueRISCV64_OpMod32(v)
   347  	case OpMod32u:
   348  		v.Op = OpRISCV64REMUW
   349  		return true
   350  	case OpMod64:
   351  		return rewriteValueRISCV64_OpMod64(v)
   352  	case OpMod64u:
   353  		v.Op = OpRISCV64REMU
   354  		return true
   355  	case OpMod8:
   356  		return rewriteValueRISCV64_OpMod8(v)
   357  	case OpMod8u:
   358  		return rewriteValueRISCV64_OpMod8u(v)
   359  	case OpMove:
   360  		return rewriteValueRISCV64_OpMove(v)
   361  	case OpMul16:
   362  		return rewriteValueRISCV64_OpMul16(v)
   363  	case OpMul32:
   364  		v.Op = OpRISCV64MULW
   365  		return true
   366  	case OpMul32F:
   367  		v.Op = OpRISCV64FMULS
   368  		return true
   369  	case OpMul64:
   370  		v.Op = OpRISCV64MUL
   371  		return true
   372  	case OpMul64F:
   373  		v.Op = OpRISCV64FMULD
   374  		return true
   375  	case OpMul64uhilo:
   376  		v.Op = OpRISCV64LoweredMuluhilo
   377  		return true
   378  	case OpMul64uover:
   379  		v.Op = OpRISCV64LoweredMuluover
   380  		return true
   381  	case OpMul8:
   382  		return rewriteValueRISCV64_OpMul8(v)
   383  	case OpNeg16:
   384  		v.Op = OpRISCV64NEG
   385  		return true
   386  	case OpNeg32:
   387  		v.Op = OpRISCV64NEG
   388  		return true
   389  	case OpNeg32F:
   390  		v.Op = OpRISCV64FNEGS
   391  		return true
   392  	case OpNeg64:
   393  		v.Op = OpRISCV64NEG
   394  		return true
   395  	case OpNeg64F:
   396  		v.Op = OpRISCV64FNEGD
   397  		return true
   398  	case OpNeg8:
   399  		v.Op = OpRISCV64NEG
   400  		return true
   401  	case OpNeq16:
   402  		return rewriteValueRISCV64_OpNeq16(v)
   403  	case OpNeq32:
   404  		return rewriteValueRISCV64_OpNeq32(v)
   405  	case OpNeq32F:
   406  		v.Op = OpRISCV64FNES
   407  		return true
   408  	case OpNeq64:
   409  		return rewriteValueRISCV64_OpNeq64(v)
   410  	case OpNeq64F:
   411  		v.Op = OpRISCV64FNED
   412  		return true
   413  	case OpNeq8:
   414  		return rewriteValueRISCV64_OpNeq8(v)
   415  	case OpNeqB:
   416  		return rewriteValueRISCV64_OpNeqB(v)
   417  	case OpNeqPtr:
   418  		return rewriteValueRISCV64_OpNeqPtr(v)
   419  	case OpNilCheck:
   420  		v.Op = OpRISCV64LoweredNilCheck
   421  		return true
   422  	case OpNot:
   423  		v.Op = OpRISCV64SEQZ
   424  		return true
   425  	case OpOffPtr:
   426  		return rewriteValueRISCV64_OpOffPtr(v)
   427  	case OpOr16:
   428  		v.Op = OpRISCV64OR
   429  		return true
   430  	case OpOr32:
   431  		v.Op = OpRISCV64OR
   432  		return true
   433  	case OpOr64:
   434  		v.Op = OpRISCV64OR
   435  		return true
   436  	case OpOr8:
   437  		v.Op = OpRISCV64OR
   438  		return true
   439  	case OpOrB:
   440  		v.Op = OpRISCV64OR
   441  		return true
   442  	case OpPanicBounds:
   443  		return rewriteValueRISCV64_OpPanicBounds(v)
   444  	case OpPubBarrier:
   445  		v.Op = OpRISCV64LoweredPubBarrier
   446  		return true
   447  	case OpRISCV64ADD:
   448  		return rewriteValueRISCV64_OpRISCV64ADD(v)
   449  	case OpRISCV64ADDI:
   450  		return rewriteValueRISCV64_OpRISCV64ADDI(v)
   451  	case OpRISCV64AND:
   452  		return rewriteValueRISCV64_OpRISCV64AND(v)
   453  	case OpRISCV64ANDI:
   454  		return rewriteValueRISCV64_OpRISCV64ANDI(v)
   455  	case OpRISCV64FADDD:
   456  		return rewriteValueRISCV64_OpRISCV64FADDD(v)
   457  	case OpRISCV64FADDS:
   458  		return rewriteValueRISCV64_OpRISCV64FADDS(v)
   459  	case OpRISCV64FMADDD:
   460  		return rewriteValueRISCV64_OpRISCV64FMADDD(v)
   461  	case OpRISCV64FMADDS:
   462  		return rewriteValueRISCV64_OpRISCV64FMADDS(v)
   463  	case OpRISCV64FMSUBD:
   464  		return rewriteValueRISCV64_OpRISCV64FMSUBD(v)
   465  	case OpRISCV64FMSUBS:
   466  		return rewriteValueRISCV64_OpRISCV64FMSUBS(v)
   467  	case OpRISCV64FNMADDD:
   468  		return rewriteValueRISCV64_OpRISCV64FNMADDD(v)
   469  	case OpRISCV64FNMADDS:
   470  		return rewriteValueRISCV64_OpRISCV64FNMADDS(v)
   471  	case OpRISCV64FNMSUBD:
   472  		return rewriteValueRISCV64_OpRISCV64FNMSUBD(v)
   473  	case OpRISCV64FNMSUBS:
   474  		return rewriteValueRISCV64_OpRISCV64FNMSUBS(v)
   475  	case OpRISCV64FSUBD:
   476  		return rewriteValueRISCV64_OpRISCV64FSUBD(v)
   477  	case OpRISCV64FSUBS:
   478  		return rewriteValueRISCV64_OpRISCV64FSUBS(v)
   479  	case OpRISCV64MOVBUload:
   480  		return rewriteValueRISCV64_OpRISCV64MOVBUload(v)
   481  	case OpRISCV64MOVBUreg:
   482  		return rewriteValueRISCV64_OpRISCV64MOVBUreg(v)
   483  	case OpRISCV64MOVBload:
   484  		return rewriteValueRISCV64_OpRISCV64MOVBload(v)
   485  	case OpRISCV64MOVBreg:
   486  		return rewriteValueRISCV64_OpRISCV64MOVBreg(v)
   487  	case OpRISCV64MOVBstore:
   488  		return rewriteValueRISCV64_OpRISCV64MOVBstore(v)
   489  	case OpRISCV64MOVBstorezero:
   490  		return rewriteValueRISCV64_OpRISCV64MOVBstorezero(v)
   491  	case OpRISCV64MOVDload:
   492  		return rewriteValueRISCV64_OpRISCV64MOVDload(v)
   493  	case OpRISCV64MOVDnop:
   494  		return rewriteValueRISCV64_OpRISCV64MOVDnop(v)
   495  	case OpRISCV64MOVDreg:
   496  		return rewriteValueRISCV64_OpRISCV64MOVDreg(v)
   497  	case OpRISCV64MOVDstore:
   498  		return rewriteValueRISCV64_OpRISCV64MOVDstore(v)
   499  	case OpRISCV64MOVDstorezero:
   500  		return rewriteValueRISCV64_OpRISCV64MOVDstorezero(v)
   501  	case OpRISCV64MOVHUload:
   502  		return rewriteValueRISCV64_OpRISCV64MOVHUload(v)
   503  	case OpRISCV64MOVHUreg:
   504  		return rewriteValueRISCV64_OpRISCV64MOVHUreg(v)
   505  	case OpRISCV64MOVHload:
   506  		return rewriteValueRISCV64_OpRISCV64MOVHload(v)
   507  	case OpRISCV64MOVHreg:
   508  		return rewriteValueRISCV64_OpRISCV64MOVHreg(v)
   509  	case OpRISCV64MOVHstore:
   510  		return rewriteValueRISCV64_OpRISCV64MOVHstore(v)
   511  	case OpRISCV64MOVHstorezero:
   512  		return rewriteValueRISCV64_OpRISCV64MOVHstorezero(v)
   513  	case OpRISCV64MOVWUload:
   514  		return rewriteValueRISCV64_OpRISCV64MOVWUload(v)
   515  	case OpRISCV64MOVWUreg:
   516  		return rewriteValueRISCV64_OpRISCV64MOVWUreg(v)
   517  	case OpRISCV64MOVWload:
   518  		return rewriteValueRISCV64_OpRISCV64MOVWload(v)
   519  	case OpRISCV64MOVWreg:
   520  		return rewriteValueRISCV64_OpRISCV64MOVWreg(v)
   521  	case OpRISCV64MOVWstore:
   522  		return rewriteValueRISCV64_OpRISCV64MOVWstore(v)
   523  	case OpRISCV64MOVWstorezero:
   524  		return rewriteValueRISCV64_OpRISCV64MOVWstorezero(v)
   525  	case OpRISCV64NEG:
   526  		return rewriteValueRISCV64_OpRISCV64NEG(v)
   527  	case OpRISCV64NEGW:
   528  		return rewriteValueRISCV64_OpRISCV64NEGW(v)
   529  	case OpRISCV64OR:
   530  		return rewriteValueRISCV64_OpRISCV64OR(v)
   531  	case OpRISCV64ORI:
   532  		return rewriteValueRISCV64_OpRISCV64ORI(v)
   533  	case OpRISCV64ROL:
   534  		return rewriteValueRISCV64_OpRISCV64ROL(v)
   535  	case OpRISCV64ROLW:
   536  		return rewriteValueRISCV64_OpRISCV64ROLW(v)
   537  	case OpRISCV64ROR:
   538  		return rewriteValueRISCV64_OpRISCV64ROR(v)
   539  	case OpRISCV64RORW:
   540  		return rewriteValueRISCV64_OpRISCV64RORW(v)
   541  	case OpRISCV64SEQZ:
   542  		return rewriteValueRISCV64_OpRISCV64SEQZ(v)
   543  	case OpRISCV64SLL:
   544  		return rewriteValueRISCV64_OpRISCV64SLL(v)
   545  	case OpRISCV64SLLI:
   546  		return rewriteValueRISCV64_OpRISCV64SLLI(v)
   547  	case OpRISCV64SLLW:
   548  		return rewriteValueRISCV64_OpRISCV64SLLW(v)
   549  	case OpRISCV64SLT:
   550  		return rewriteValueRISCV64_OpRISCV64SLT(v)
   551  	case OpRISCV64SLTI:
   552  		return rewriteValueRISCV64_OpRISCV64SLTI(v)
   553  	case OpRISCV64SLTIU:
   554  		return rewriteValueRISCV64_OpRISCV64SLTIU(v)
   555  	case OpRISCV64SLTU:
   556  		return rewriteValueRISCV64_OpRISCV64SLTU(v)
   557  	case OpRISCV64SNEZ:
   558  		return rewriteValueRISCV64_OpRISCV64SNEZ(v)
   559  	case OpRISCV64SRA:
   560  		return rewriteValueRISCV64_OpRISCV64SRA(v)
   561  	case OpRISCV64SRAI:
   562  		return rewriteValueRISCV64_OpRISCV64SRAI(v)
   563  	case OpRISCV64SRAW:
   564  		return rewriteValueRISCV64_OpRISCV64SRAW(v)
   565  	case OpRISCV64SRL:
   566  		return rewriteValueRISCV64_OpRISCV64SRL(v)
   567  	case OpRISCV64SRLI:
   568  		return rewriteValueRISCV64_OpRISCV64SRLI(v)
   569  	case OpRISCV64SRLW:
   570  		return rewriteValueRISCV64_OpRISCV64SRLW(v)
   571  	case OpRISCV64SUB:
   572  		return rewriteValueRISCV64_OpRISCV64SUB(v)
   573  	case OpRISCV64SUBW:
   574  		return rewriteValueRISCV64_OpRISCV64SUBW(v)
   575  	case OpRISCV64XOR:
   576  		return rewriteValueRISCV64_OpRISCV64XOR(v)
   577  	case OpRotateLeft16:
   578  		return rewriteValueRISCV64_OpRotateLeft16(v)
   579  	case OpRotateLeft32:
   580  		v.Op = OpRISCV64ROLW
   581  		return true
   582  	case OpRotateLeft64:
   583  		v.Op = OpRISCV64ROL
   584  		return true
   585  	case OpRotateLeft8:
   586  		return rewriteValueRISCV64_OpRotateLeft8(v)
   587  	case OpRound32F:
   588  		v.Op = OpRISCV64LoweredRound32F
   589  		return true
   590  	case OpRound64F:
   591  		v.Op = OpRISCV64LoweredRound64F
   592  		return true
   593  	case OpRsh16Ux16:
   594  		return rewriteValueRISCV64_OpRsh16Ux16(v)
   595  	case OpRsh16Ux32:
   596  		return rewriteValueRISCV64_OpRsh16Ux32(v)
   597  	case OpRsh16Ux64:
   598  		return rewriteValueRISCV64_OpRsh16Ux64(v)
   599  	case OpRsh16Ux8:
   600  		return rewriteValueRISCV64_OpRsh16Ux8(v)
   601  	case OpRsh16x16:
   602  		return rewriteValueRISCV64_OpRsh16x16(v)
   603  	case OpRsh16x32:
   604  		return rewriteValueRISCV64_OpRsh16x32(v)
   605  	case OpRsh16x64:
   606  		return rewriteValueRISCV64_OpRsh16x64(v)
   607  	case OpRsh16x8:
   608  		return rewriteValueRISCV64_OpRsh16x8(v)
   609  	case OpRsh32Ux16:
   610  		return rewriteValueRISCV64_OpRsh32Ux16(v)
   611  	case OpRsh32Ux32:
   612  		return rewriteValueRISCV64_OpRsh32Ux32(v)
   613  	case OpRsh32Ux64:
   614  		return rewriteValueRISCV64_OpRsh32Ux64(v)
   615  	case OpRsh32Ux8:
   616  		return rewriteValueRISCV64_OpRsh32Ux8(v)
   617  	case OpRsh32x16:
   618  		return rewriteValueRISCV64_OpRsh32x16(v)
   619  	case OpRsh32x32:
   620  		return rewriteValueRISCV64_OpRsh32x32(v)
   621  	case OpRsh32x64:
   622  		return rewriteValueRISCV64_OpRsh32x64(v)
   623  	case OpRsh32x8:
   624  		return rewriteValueRISCV64_OpRsh32x8(v)
   625  	case OpRsh64Ux16:
   626  		return rewriteValueRISCV64_OpRsh64Ux16(v)
   627  	case OpRsh64Ux32:
   628  		return rewriteValueRISCV64_OpRsh64Ux32(v)
   629  	case OpRsh64Ux64:
   630  		return rewriteValueRISCV64_OpRsh64Ux64(v)
   631  	case OpRsh64Ux8:
   632  		return rewriteValueRISCV64_OpRsh64Ux8(v)
   633  	case OpRsh64x16:
   634  		return rewriteValueRISCV64_OpRsh64x16(v)
   635  	case OpRsh64x32:
   636  		return rewriteValueRISCV64_OpRsh64x32(v)
   637  	case OpRsh64x64:
   638  		return rewriteValueRISCV64_OpRsh64x64(v)
   639  	case OpRsh64x8:
   640  		return rewriteValueRISCV64_OpRsh64x8(v)
   641  	case OpRsh8Ux16:
   642  		return rewriteValueRISCV64_OpRsh8Ux16(v)
   643  	case OpRsh8Ux32:
   644  		return rewriteValueRISCV64_OpRsh8Ux32(v)
   645  	case OpRsh8Ux64:
   646  		return rewriteValueRISCV64_OpRsh8Ux64(v)
   647  	case OpRsh8Ux8:
   648  		return rewriteValueRISCV64_OpRsh8Ux8(v)
   649  	case OpRsh8x16:
   650  		return rewriteValueRISCV64_OpRsh8x16(v)
   651  	case OpRsh8x32:
   652  		return rewriteValueRISCV64_OpRsh8x32(v)
   653  	case OpRsh8x64:
   654  		return rewriteValueRISCV64_OpRsh8x64(v)
   655  	case OpRsh8x8:
   656  		return rewriteValueRISCV64_OpRsh8x8(v)
   657  	case OpSelect0:
   658  		return rewriteValueRISCV64_OpSelect0(v)
   659  	case OpSelect1:
   660  		return rewriteValueRISCV64_OpSelect1(v)
   661  	case OpSignExt16to32:
   662  		v.Op = OpRISCV64MOVHreg
   663  		return true
   664  	case OpSignExt16to64:
   665  		v.Op = OpRISCV64MOVHreg
   666  		return true
   667  	case OpSignExt32to64:
   668  		v.Op = OpRISCV64MOVWreg
   669  		return true
   670  	case OpSignExt8to16:
   671  		v.Op = OpRISCV64MOVBreg
   672  		return true
   673  	case OpSignExt8to32:
   674  		v.Op = OpRISCV64MOVBreg
   675  		return true
   676  	case OpSignExt8to64:
   677  		v.Op = OpRISCV64MOVBreg
   678  		return true
   679  	case OpSlicemask:
   680  		return rewriteValueRISCV64_OpSlicemask(v)
   681  	case OpSqrt:
   682  		v.Op = OpRISCV64FSQRTD
   683  		return true
   684  	case OpSqrt32:
   685  		v.Op = OpRISCV64FSQRTS
   686  		return true
   687  	case OpStaticCall:
   688  		v.Op = OpRISCV64CALLstatic
   689  		return true
   690  	case OpStore:
   691  		return rewriteValueRISCV64_OpStore(v)
   692  	case OpSub16:
   693  		v.Op = OpRISCV64SUB
   694  		return true
   695  	case OpSub32:
   696  		v.Op = OpRISCV64SUB
   697  		return true
   698  	case OpSub32F:
   699  		v.Op = OpRISCV64FSUBS
   700  		return true
   701  	case OpSub64:
   702  		v.Op = OpRISCV64SUB
   703  		return true
   704  	case OpSub64F:
   705  		v.Op = OpRISCV64FSUBD
   706  		return true
   707  	case OpSub8:
   708  		v.Op = OpRISCV64SUB
   709  		return true
   710  	case OpSubPtr:
   711  		v.Op = OpRISCV64SUB
   712  		return true
   713  	case OpTailCall:
   714  		v.Op = OpRISCV64CALLtail
   715  		return true
   716  	case OpTrunc16to8:
   717  		v.Op = OpCopy
   718  		return true
   719  	case OpTrunc32to16:
   720  		v.Op = OpCopy
   721  		return true
   722  	case OpTrunc32to8:
   723  		v.Op = OpCopy
   724  		return true
   725  	case OpTrunc64to16:
   726  		v.Op = OpCopy
   727  		return true
   728  	case OpTrunc64to32:
   729  		v.Op = OpCopy
   730  		return true
   731  	case OpTrunc64to8:
   732  		v.Op = OpCopy
   733  		return true
   734  	case OpWB:
   735  		v.Op = OpRISCV64LoweredWB
   736  		return true
   737  	case OpXor16:
   738  		v.Op = OpRISCV64XOR
   739  		return true
   740  	case OpXor32:
   741  		v.Op = OpRISCV64XOR
   742  		return true
   743  	case OpXor64:
   744  		v.Op = OpRISCV64XOR
   745  		return true
   746  	case OpXor8:
   747  		v.Op = OpRISCV64XOR
   748  		return true
   749  	case OpZero:
   750  		return rewriteValueRISCV64_OpZero(v)
   751  	case OpZeroExt16to32:
   752  		v.Op = OpRISCV64MOVHUreg
   753  		return true
   754  	case OpZeroExt16to64:
   755  		v.Op = OpRISCV64MOVHUreg
   756  		return true
   757  	case OpZeroExt32to64:
   758  		v.Op = OpRISCV64MOVWUreg
   759  		return true
   760  	case OpZeroExt8to16:
   761  		v.Op = OpRISCV64MOVBUreg
   762  		return true
   763  	case OpZeroExt8to32:
   764  		v.Op = OpRISCV64MOVBUreg
   765  		return true
   766  	case OpZeroExt8to64:
   767  		v.Op = OpRISCV64MOVBUreg
   768  		return true
   769  	}
   770  	return false
   771  }
   772  func rewriteValueRISCV64_OpAddr(v *Value) bool {
   773  	v_0 := v.Args[0]
   774  	// match: (Addr {sym} base)
   775  	// result: (MOVaddr {sym} [0] base)
   776  	for {
   777  		sym := auxToSym(v.Aux)
   778  		base := v_0
   779  		v.reset(OpRISCV64MOVaddr)
   780  		v.AuxInt = int32ToAuxInt(0)
   781  		v.Aux = symToAux(sym)
   782  		v.AddArg(base)
   783  		return true
   784  	}
   785  }
   786  func rewriteValueRISCV64_OpAtomicAnd8(v *Value) bool {
   787  	v_2 := v.Args[2]
   788  	v_1 := v.Args[1]
   789  	v_0 := v.Args[0]
   790  	b := v.Block
   791  	typ := &b.Func.Config.Types
   792  	// match: (AtomicAnd8 ptr val mem)
   793  	// result: (LoweredAtomicAnd32 (ANDI <typ.Uintptr> [^3] ptr) (NOT <typ.UInt32> (SLL <typ.UInt32> (XORI <typ.UInt32> [0xff] (ZeroExt8to32 val)) (SLLI <typ.UInt64> [3] (ANDI <typ.UInt64> [3] ptr)))) mem)
   794  	for {
   795  		ptr := v_0
   796  		val := v_1
   797  		mem := v_2
   798  		v.reset(OpRISCV64LoweredAtomicAnd32)
   799  		v0 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.Uintptr)
   800  		v0.AuxInt = int64ToAuxInt(^3)
   801  		v0.AddArg(ptr)
   802  		v1 := b.NewValue0(v.Pos, OpRISCV64NOT, typ.UInt32)
   803  		v2 := b.NewValue0(v.Pos, OpRISCV64SLL, typ.UInt32)
   804  		v3 := b.NewValue0(v.Pos, OpRISCV64XORI, typ.UInt32)
   805  		v3.AuxInt = int64ToAuxInt(0xff)
   806  		v4 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   807  		v4.AddArg(val)
   808  		v3.AddArg(v4)
   809  		v5 := b.NewValue0(v.Pos, OpRISCV64SLLI, typ.UInt64)
   810  		v5.AuxInt = int64ToAuxInt(3)
   811  		v6 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.UInt64)
   812  		v6.AuxInt = int64ToAuxInt(3)
   813  		v6.AddArg(ptr)
   814  		v5.AddArg(v6)
   815  		v2.AddArg2(v3, v5)
   816  		v1.AddArg(v2)
   817  		v.AddArg3(v0, v1, mem)
   818  		return true
   819  	}
   820  }
   821  func rewriteValueRISCV64_OpAtomicCompareAndSwap32(v *Value) bool {
   822  	v_3 := v.Args[3]
   823  	v_2 := v.Args[2]
   824  	v_1 := v.Args[1]
   825  	v_0 := v.Args[0]
   826  	b := v.Block
   827  	typ := &b.Func.Config.Types
   828  	// match: (AtomicCompareAndSwap32 ptr old new mem)
   829  	// result: (LoweredAtomicCas32 ptr (SignExt32to64 old) new mem)
   830  	for {
   831  		ptr := v_0
   832  		old := v_1
   833  		new := v_2
   834  		mem := v_3
   835  		v.reset(OpRISCV64LoweredAtomicCas32)
   836  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
   837  		v0.AddArg(old)
   838  		v.AddArg4(ptr, v0, new, mem)
   839  		return true
   840  	}
   841  }
   842  func rewriteValueRISCV64_OpAtomicOr8(v *Value) bool {
   843  	v_2 := v.Args[2]
   844  	v_1 := v.Args[1]
   845  	v_0 := v.Args[0]
   846  	b := v.Block
   847  	typ := &b.Func.Config.Types
   848  	// match: (AtomicOr8 ptr val mem)
   849  	// result: (LoweredAtomicOr32 (ANDI <typ.Uintptr> [^3] ptr) (SLL <typ.UInt32> (ZeroExt8to32 val) (SLLI <typ.UInt64> [3] (ANDI <typ.UInt64> [3] ptr))) mem)
   850  	for {
   851  		ptr := v_0
   852  		val := v_1
   853  		mem := v_2
   854  		v.reset(OpRISCV64LoweredAtomicOr32)
   855  		v0 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.Uintptr)
   856  		v0.AuxInt = int64ToAuxInt(^3)
   857  		v0.AddArg(ptr)
   858  		v1 := b.NewValue0(v.Pos, OpRISCV64SLL, typ.UInt32)
   859  		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   860  		v2.AddArg(val)
   861  		v3 := b.NewValue0(v.Pos, OpRISCV64SLLI, typ.UInt64)
   862  		v3.AuxInt = int64ToAuxInt(3)
   863  		v4 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.UInt64)
   864  		v4.AuxInt = int64ToAuxInt(3)
   865  		v4.AddArg(ptr)
   866  		v3.AddArg(v4)
   867  		v1.AddArg2(v2, v3)
   868  		v.AddArg3(v0, v1, mem)
   869  		return true
   870  	}
   871  }
   872  func rewriteValueRISCV64_OpAvg64u(v *Value) bool {
   873  	v_1 := v.Args[1]
   874  	v_0 := v.Args[0]
   875  	b := v.Block
   876  	// match: (Avg64u <t> x y)
   877  	// result: (ADD (ADD <t> (SRLI <t> [1] x) (SRLI <t> [1] y)) (ANDI <t> [1] (AND <t> x y)))
   878  	for {
   879  		t := v.Type
   880  		x := v_0
   881  		y := v_1
   882  		v.reset(OpRISCV64ADD)
   883  		v0 := b.NewValue0(v.Pos, OpRISCV64ADD, t)
   884  		v1 := b.NewValue0(v.Pos, OpRISCV64SRLI, t)
   885  		v1.AuxInt = int64ToAuxInt(1)
   886  		v1.AddArg(x)
   887  		v2 := b.NewValue0(v.Pos, OpRISCV64SRLI, t)
   888  		v2.AuxInt = int64ToAuxInt(1)
   889  		v2.AddArg(y)
   890  		v0.AddArg2(v1, v2)
   891  		v3 := b.NewValue0(v.Pos, OpRISCV64ANDI, t)
   892  		v3.AuxInt = int64ToAuxInt(1)
   893  		v4 := b.NewValue0(v.Pos, OpRISCV64AND, t)
   894  		v4.AddArg2(x, y)
   895  		v3.AddArg(v4)
   896  		v.AddArg2(v0, v3)
   897  		return true
   898  	}
   899  }
   900  func rewriteValueRISCV64_OpConst16(v *Value) bool {
   901  	// match: (Const16 [val])
   902  	// result: (MOVDconst [int64(val)])
   903  	for {
   904  		val := auxIntToInt16(v.AuxInt)
   905  		v.reset(OpRISCV64MOVDconst)
   906  		v.AuxInt = int64ToAuxInt(int64(val))
   907  		return true
   908  	}
   909  }
   910  func rewriteValueRISCV64_OpConst32(v *Value) bool {
   911  	// match: (Const32 [val])
   912  	// result: (MOVDconst [int64(val)])
   913  	for {
   914  		val := auxIntToInt32(v.AuxInt)
   915  		v.reset(OpRISCV64MOVDconst)
   916  		v.AuxInt = int64ToAuxInt(int64(val))
   917  		return true
   918  	}
   919  }
   920  func rewriteValueRISCV64_OpConst32F(v *Value) bool {
   921  	b := v.Block
   922  	typ := &b.Func.Config.Types
   923  	// match: (Const32F [val])
   924  	// result: (FMVSX (MOVDconst [int64(math.Float32bits(val))]))
   925  	for {
   926  		val := auxIntToFloat32(v.AuxInt)
   927  		v.reset(OpRISCV64FMVSX)
   928  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
   929  		v0.AuxInt = int64ToAuxInt(int64(math.Float32bits(val)))
   930  		v.AddArg(v0)
   931  		return true
   932  	}
   933  }
   934  func rewriteValueRISCV64_OpConst64(v *Value) bool {
   935  	// match: (Const64 [val])
   936  	// result: (MOVDconst [int64(val)])
   937  	for {
   938  		val := auxIntToInt64(v.AuxInt)
   939  		v.reset(OpRISCV64MOVDconst)
   940  		v.AuxInt = int64ToAuxInt(int64(val))
   941  		return true
   942  	}
   943  }
   944  func rewriteValueRISCV64_OpConst64F(v *Value) bool {
   945  	b := v.Block
   946  	typ := &b.Func.Config.Types
   947  	// match: (Const64F [val])
   948  	// result: (FMVDX (MOVDconst [int64(math.Float64bits(val))]))
   949  	for {
   950  		val := auxIntToFloat64(v.AuxInt)
   951  		v.reset(OpRISCV64FMVDX)
   952  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
   953  		v0.AuxInt = int64ToAuxInt(int64(math.Float64bits(val)))
   954  		v.AddArg(v0)
   955  		return true
   956  	}
   957  }
   958  func rewriteValueRISCV64_OpConst8(v *Value) bool {
   959  	// match: (Const8 [val])
   960  	// result: (MOVDconst [int64(val)])
   961  	for {
   962  		val := auxIntToInt8(v.AuxInt)
   963  		v.reset(OpRISCV64MOVDconst)
   964  		v.AuxInt = int64ToAuxInt(int64(val))
   965  		return true
   966  	}
   967  }
   968  func rewriteValueRISCV64_OpConstBool(v *Value) bool {
   969  	// match: (ConstBool [val])
   970  	// result: (MOVDconst [int64(b2i(val))])
   971  	for {
   972  		val := auxIntToBool(v.AuxInt)
   973  		v.reset(OpRISCV64MOVDconst)
   974  		v.AuxInt = int64ToAuxInt(int64(b2i(val)))
   975  		return true
   976  	}
   977  }
   978  func rewriteValueRISCV64_OpConstNil(v *Value) bool {
   979  	// match: (ConstNil)
   980  	// result: (MOVDconst [0])
   981  	for {
   982  		v.reset(OpRISCV64MOVDconst)
   983  		v.AuxInt = int64ToAuxInt(0)
   984  		return true
   985  	}
   986  }
   987  func rewriteValueRISCV64_OpDiv16(v *Value) bool {
   988  	v_1 := v.Args[1]
   989  	v_0 := v.Args[0]
   990  	b := v.Block
   991  	typ := &b.Func.Config.Types
   992  	// match: (Div16 x y [false])
   993  	// result: (DIVW (SignExt16to32 x) (SignExt16to32 y))
   994  	for {
   995  		if auxIntToBool(v.AuxInt) != false {
   996  			break
   997  		}
   998  		x := v_0
   999  		y := v_1
  1000  		v.reset(OpRISCV64DIVW)
  1001  		v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  1002  		v0.AddArg(x)
  1003  		v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  1004  		v1.AddArg(y)
  1005  		v.AddArg2(v0, v1)
  1006  		return true
  1007  	}
  1008  	return false
  1009  }
  1010  func rewriteValueRISCV64_OpDiv16u(v *Value) bool {
  1011  	v_1 := v.Args[1]
  1012  	v_0 := v.Args[0]
  1013  	b := v.Block
  1014  	typ := &b.Func.Config.Types
  1015  	// match: (Div16u x y)
  1016  	// result: (DIVUW (ZeroExt16to32 x) (ZeroExt16to32 y))
  1017  	for {
  1018  		x := v_0
  1019  		y := v_1
  1020  		v.reset(OpRISCV64DIVUW)
  1021  		v0 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  1022  		v0.AddArg(x)
  1023  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  1024  		v1.AddArg(y)
  1025  		v.AddArg2(v0, v1)
  1026  		return true
  1027  	}
  1028  }
  1029  func rewriteValueRISCV64_OpDiv32(v *Value) bool {
  1030  	v_1 := v.Args[1]
  1031  	v_0 := v.Args[0]
  1032  	// match: (Div32 x y [false])
  1033  	// result: (DIVW x y)
  1034  	for {
  1035  		if auxIntToBool(v.AuxInt) != false {
  1036  			break
  1037  		}
  1038  		x := v_0
  1039  		y := v_1
  1040  		v.reset(OpRISCV64DIVW)
  1041  		v.AddArg2(x, y)
  1042  		return true
  1043  	}
  1044  	return false
  1045  }
  1046  func rewriteValueRISCV64_OpDiv64(v *Value) bool {
  1047  	v_1 := v.Args[1]
  1048  	v_0 := v.Args[0]
  1049  	// match: (Div64 x y [false])
  1050  	// result: (DIV x y)
  1051  	for {
  1052  		if auxIntToBool(v.AuxInt) != false {
  1053  			break
  1054  		}
  1055  		x := v_0
  1056  		y := v_1
  1057  		v.reset(OpRISCV64DIV)
  1058  		v.AddArg2(x, y)
  1059  		return true
  1060  	}
  1061  	return false
  1062  }
  1063  func rewriteValueRISCV64_OpDiv8(v *Value) bool {
  1064  	v_1 := v.Args[1]
  1065  	v_0 := v.Args[0]
  1066  	b := v.Block
  1067  	typ := &b.Func.Config.Types
  1068  	// match: (Div8 x y)
  1069  	// result: (DIVW (SignExt8to32 x) (SignExt8to32 y))
  1070  	for {
  1071  		x := v_0
  1072  		y := v_1
  1073  		v.reset(OpRISCV64DIVW)
  1074  		v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  1075  		v0.AddArg(x)
  1076  		v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  1077  		v1.AddArg(y)
  1078  		v.AddArg2(v0, v1)
  1079  		return true
  1080  	}
  1081  }
  1082  func rewriteValueRISCV64_OpDiv8u(v *Value) bool {
  1083  	v_1 := v.Args[1]
  1084  	v_0 := v.Args[0]
  1085  	b := v.Block
  1086  	typ := &b.Func.Config.Types
  1087  	// match: (Div8u x y)
  1088  	// result: (DIVUW (ZeroExt8to32 x) (ZeroExt8to32 y))
  1089  	for {
  1090  		x := v_0
  1091  		y := v_1
  1092  		v.reset(OpRISCV64DIVUW)
  1093  		v0 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  1094  		v0.AddArg(x)
  1095  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  1096  		v1.AddArg(y)
  1097  		v.AddArg2(v0, v1)
  1098  		return true
  1099  	}
  1100  }
  1101  func rewriteValueRISCV64_OpEq16(v *Value) bool {
  1102  	v_1 := v.Args[1]
  1103  	v_0 := v.Args[0]
  1104  	b := v.Block
  1105  	typ := &b.Func.Config.Types
  1106  	// match: (Eq16 x y)
  1107  	// result: (SEQZ (SUB <x.Type> (ZeroExt16to64 x) (ZeroExt16to64 y)))
  1108  	for {
  1109  		x := v_0
  1110  		y := v_1
  1111  		v.reset(OpRISCV64SEQZ)
  1112  		v0 := b.NewValue0(v.Pos, OpRISCV64SUB, x.Type)
  1113  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1114  		v1.AddArg(x)
  1115  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1116  		v2.AddArg(y)
  1117  		v0.AddArg2(v1, v2)
  1118  		v.AddArg(v0)
  1119  		return true
  1120  	}
  1121  }
  1122  func rewriteValueRISCV64_OpEq32(v *Value) bool {
  1123  	v_1 := v.Args[1]
  1124  	v_0 := v.Args[0]
  1125  	b := v.Block
  1126  	typ := &b.Func.Config.Types
  1127  	// match: (Eq32 x y)
  1128  	// cond: x.Type.IsSigned()
  1129  	// result: (SEQZ (SUB <x.Type> (SignExt32to64 x) (SignExt32to64 y)))
  1130  	for {
  1131  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1132  			x := v_0
  1133  			y := v_1
  1134  			if !(x.Type.IsSigned()) {
  1135  				continue
  1136  			}
  1137  			v.reset(OpRISCV64SEQZ)
  1138  			v0 := b.NewValue0(v.Pos, OpRISCV64SUB, x.Type)
  1139  			v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1140  			v1.AddArg(x)
  1141  			v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1142  			v2.AddArg(y)
  1143  			v0.AddArg2(v1, v2)
  1144  			v.AddArg(v0)
  1145  			return true
  1146  		}
  1147  		break
  1148  	}
  1149  	// match: (Eq32 x y)
  1150  	// cond: !x.Type.IsSigned()
  1151  	// result: (SEQZ (SUB <x.Type> (ZeroExt32to64 x) (ZeroExt32to64 y)))
  1152  	for {
  1153  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1154  			x := v_0
  1155  			y := v_1
  1156  			if !(!x.Type.IsSigned()) {
  1157  				continue
  1158  			}
  1159  			v.reset(OpRISCV64SEQZ)
  1160  			v0 := b.NewValue0(v.Pos, OpRISCV64SUB, x.Type)
  1161  			v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1162  			v1.AddArg(x)
  1163  			v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1164  			v2.AddArg(y)
  1165  			v0.AddArg2(v1, v2)
  1166  			v.AddArg(v0)
  1167  			return true
  1168  		}
  1169  		break
  1170  	}
  1171  	return false
  1172  }
  1173  func rewriteValueRISCV64_OpEq64(v *Value) bool {
  1174  	v_1 := v.Args[1]
  1175  	v_0 := v.Args[0]
  1176  	b := v.Block
  1177  	// match: (Eq64 x y)
  1178  	// result: (SEQZ (SUB <x.Type> x y))
  1179  	for {
  1180  		x := v_0
  1181  		y := v_1
  1182  		v.reset(OpRISCV64SEQZ)
  1183  		v0 := b.NewValue0(v.Pos, OpRISCV64SUB, x.Type)
  1184  		v0.AddArg2(x, y)
  1185  		v.AddArg(v0)
  1186  		return true
  1187  	}
  1188  }
  1189  func rewriteValueRISCV64_OpEq8(v *Value) bool {
  1190  	v_1 := v.Args[1]
  1191  	v_0 := v.Args[0]
  1192  	b := v.Block
  1193  	typ := &b.Func.Config.Types
  1194  	// match: (Eq8 x y)
  1195  	// result: (SEQZ (SUB <x.Type> (ZeroExt8to64 x) (ZeroExt8to64 y)))
  1196  	for {
  1197  		x := v_0
  1198  		y := v_1
  1199  		v.reset(OpRISCV64SEQZ)
  1200  		v0 := b.NewValue0(v.Pos, OpRISCV64SUB, x.Type)
  1201  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1202  		v1.AddArg(x)
  1203  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1204  		v2.AddArg(y)
  1205  		v0.AddArg2(v1, v2)
  1206  		v.AddArg(v0)
  1207  		return true
  1208  	}
  1209  }
  1210  func rewriteValueRISCV64_OpEqB(v *Value) bool {
  1211  	v_1 := v.Args[1]
  1212  	v_0 := v.Args[0]
  1213  	b := v.Block
  1214  	typ := &b.Func.Config.Types
  1215  	// match: (EqB x y)
  1216  	// result: (SEQZ (SUB <typ.Bool> x y))
  1217  	for {
  1218  		x := v_0
  1219  		y := v_1
  1220  		v.reset(OpRISCV64SEQZ)
  1221  		v0 := b.NewValue0(v.Pos, OpRISCV64SUB, typ.Bool)
  1222  		v0.AddArg2(x, y)
  1223  		v.AddArg(v0)
  1224  		return true
  1225  	}
  1226  }
  1227  func rewriteValueRISCV64_OpEqPtr(v *Value) bool {
  1228  	v_1 := v.Args[1]
  1229  	v_0 := v.Args[0]
  1230  	b := v.Block
  1231  	typ := &b.Func.Config.Types
  1232  	// match: (EqPtr x y)
  1233  	// result: (SEQZ (SUB <typ.Uintptr> x y))
  1234  	for {
  1235  		x := v_0
  1236  		y := v_1
  1237  		v.reset(OpRISCV64SEQZ)
  1238  		v0 := b.NewValue0(v.Pos, OpRISCV64SUB, typ.Uintptr)
  1239  		v0.AddArg2(x, y)
  1240  		v.AddArg(v0)
  1241  		return true
  1242  	}
  1243  }
  1244  func rewriteValueRISCV64_OpHmul32(v *Value) bool {
  1245  	v_1 := v.Args[1]
  1246  	v_0 := v.Args[0]
  1247  	b := v.Block
  1248  	typ := &b.Func.Config.Types
  1249  	// match: (Hmul32 x y)
  1250  	// result: (SRAI [32] (MUL (SignExt32to64 x) (SignExt32to64 y)))
  1251  	for {
  1252  		x := v_0
  1253  		y := v_1
  1254  		v.reset(OpRISCV64SRAI)
  1255  		v.AuxInt = int64ToAuxInt(32)
  1256  		v0 := b.NewValue0(v.Pos, OpRISCV64MUL, typ.Int64)
  1257  		v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1258  		v1.AddArg(x)
  1259  		v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1260  		v2.AddArg(y)
  1261  		v0.AddArg2(v1, v2)
  1262  		v.AddArg(v0)
  1263  		return true
  1264  	}
  1265  }
  1266  func rewriteValueRISCV64_OpHmul32u(v *Value) bool {
  1267  	v_1 := v.Args[1]
  1268  	v_0 := v.Args[0]
  1269  	b := v.Block
  1270  	typ := &b.Func.Config.Types
  1271  	// match: (Hmul32u x y)
  1272  	// result: (SRLI [32] (MUL (ZeroExt32to64 x) (ZeroExt32to64 y)))
  1273  	for {
  1274  		x := v_0
  1275  		y := v_1
  1276  		v.reset(OpRISCV64SRLI)
  1277  		v.AuxInt = int64ToAuxInt(32)
  1278  		v0 := b.NewValue0(v.Pos, OpRISCV64MUL, typ.Int64)
  1279  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1280  		v1.AddArg(x)
  1281  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1282  		v2.AddArg(y)
  1283  		v0.AddArg2(v1, v2)
  1284  		v.AddArg(v0)
  1285  		return true
  1286  	}
  1287  }
  1288  func rewriteValueRISCV64_OpLeq16(v *Value) bool {
  1289  	v_1 := v.Args[1]
  1290  	v_0 := v.Args[0]
  1291  	b := v.Block
  1292  	typ := &b.Func.Config.Types
  1293  	// match: (Leq16 x y)
  1294  	// result: (Not (Less16 y x))
  1295  	for {
  1296  		x := v_0
  1297  		y := v_1
  1298  		v.reset(OpNot)
  1299  		v0 := b.NewValue0(v.Pos, OpLess16, typ.Bool)
  1300  		v0.AddArg2(y, x)
  1301  		v.AddArg(v0)
  1302  		return true
  1303  	}
  1304  }
  1305  func rewriteValueRISCV64_OpLeq16U(v *Value) bool {
  1306  	v_1 := v.Args[1]
  1307  	v_0 := v.Args[0]
  1308  	b := v.Block
  1309  	typ := &b.Func.Config.Types
  1310  	// match: (Leq16U x y)
  1311  	// result: (Not (Less16U y x))
  1312  	for {
  1313  		x := v_0
  1314  		y := v_1
  1315  		v.reset(OpNot)
  1316  		v0 := b.NewValue0(v.Pos, OpLess16U, typ.Bool)
  1317  		v0.AddArg2(y, x)
  1318  		v.AddArg(v0)
  1319  		return true
  1320  	}
  1321  }
  1322  func rewriteValueRISCV64_OpLeq32(v *Value) bool {
  1323  	v_1 := v.Args[1]
  1324  	v_0 := v.Args[0]
  1325  	b := v.Block
  1326  	typ := &b.Func.Config.Types
  1327  	// match: (Leq32 x y)
  1328  	// result: (Not (Less32 y x))
  1329  	for {
  1330  		x := v_0
  1331  		y := v_1
  1332  		v.reset(OpNot)
  1333  		v0 := b.NewValue0(v.Pos, OpLess32, typ.Bool)
  1334  		v0.AddArg2(y, x)
  1335  		v.AddArg(v0)
  1336  		return true
  1337  	}
  1338  }
  1339  func rewriteValueRISCV64_OpLeq32U(v *Value) bool {
  1340  	v_1 := v.Args[1]
  1341  	v_0 := v.Args[0]
  1342  	b := v.Block
  1343  	typ := &b.Func.Config.Types
  1344  	// match: (Leq32U x y)
  1345  	// result: (Not (Less32U y x))
  1346  	for {
  1347  		x := v_0
  1348  		y := v_1
  1349  		v.reset(OpNot)
  1350  		v0 := b.NewValue0(v.Pos, OpLess32U, typ.Bool)
  1351  		v0.AddArg2(y, x)
  1352  		v.AddArg(v0)
  1353  		return true
  1354  	}
  1355  }
  1356  func rewriteValueRISCV64_OpLeq64(v *Value) bool {
  1357  	v_1 := v.Args[1]
  1358  	v_0 := v.Args[0]
  1359  	b := v.Block
  1360  	typ := &b.Func.Config.Types
  1361  	// match: (Leq64 x y)
  1362  	// result: (Not (Less64 y x))
  1363  	for {
  1364  		x := v_0
  1365  		y := v_1
  1366  		v.reset(OpNot)
  1367  		v0 := b.NewValue0(v.Pos, OpLess64, typ.Bool)
  1368  		v0.AddArg2(y, x)
  1369  		v.AddArg(v0)
  1370  		return true
  1371  	}
  1372  }
  1373  func rewriteValueRISCV64_OpLeq64U(v *Value) bool {
  1374  	v_1 := v.Args[1]
  1375  	v_0 := v.Args[0]
  1376  	b := v.Block
  1377  	typ := &b.Func.Config.Types
  1378  	// match: (Leq64U x y)
  1379  	// result: (Not (Less64U y x))
  1380  	for {
  1381  		x := v_0
  1382  		y := v_1
  1383  		v.reset(OpNot)
  1384  		v0 := b.NewValue0(v.Pos, OpLess64U, typ.Bool)
  1385  		v0.AddArg2(y, x)
  1386  		v.AddArg(v0)
  1387  		return true
  1388  	}
  1389  }
  1390  func rewriteValueRISCV64_OpLeq8(v *Value) bool {
  1391  	v_1 := v.Args[1]
  1392  	v_0 := v.Args[0]
  1393  	b := v.Block
  1394  	typ := &b.Func.Config.Types
  1395  	// match: (Leq8 x y)
  1396  	// result: (Not (Less8 y x))
  1397  	for {
  1398  		x := v_0
  1399  		y := v_1
  1400  		v.reset(OpNot)
  1401  		v0 := b.NewValue0(v.Pos, OpLess8, typ.Bool)
  1402  		v0.AddArg2(y, x)
  1403  		v.AddArg(v0)
  1404  		return true
  1405  	}
  1406  }
  1407  func rewriteValueRISCV64_OpLeq8U(v *Value) bool {
  1408  	v_1 := v.Args[1]
  1409  	v_0 := v.Args[0]
  1410  	b := v.Block
  1411  	typ := &b.Func.Config.Types
  1412  	// match: (Leq8U x y)
  1413  	// result: (Not (Less8U y x))
  1414  	for {
  1415  		x := v_0
  1416  		y := v_1
  1417  		v.reset(OpNot)
  1418  		v0 := b.NewValue0(v.Pos, OpLess8U, typ.Bool)
  1419  		v0.AddArg2(y, x)
  1420  		v.AddArg(v0)
  1421  		return true
  1422  	}
  1423  }
  1424  func rewriteValueRISCV64_OpLess16(v *Value) bool {
  1425  	v_1 := v.Args[1]
  1426  	v_0 := v.Args[0]
  1427  	b := v.Block
  1428  	typ := &b.Func.Config.Types
  1429  	// match: (Less16 x y)
  1430  	// result: (SLT (SignExt16to64 x) (SignExt16to64 y))
  1431  	for {
  1432  		x := v_0
  1433  		y := v_1
  1434  		v.reset(OpRISCV64SLT)
  1435  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  1436  		v0.AddArg(x)
  1437  		v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  1438  		v1.AddArg(y)
  1439  		v.AddArg2(v0, v1)
  1440  		return true
  1441  	}
  1442  }
  1443  func rewriteValueRISCV64_OpLess16U(v *Value) bool {
  1444  	v_1 := v.Args[1]
  1445  	v_0 := v.Args[0]
  1446  	b := v.Block
  1447  	typ := &b.Func.Config.Types
  1448  	// match: (Less16U x y)
  1449  	// result: (SLTU (ZeroExt16to64 x) (ZeroExt16to64 y))
  1450  	for {
  1451  		x := v_0
  1452  		y := v_1
  1453  		v.reset(OpRISCV64SLTU)
  1454  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1455  		v0.AddArg(x)
  1456  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1457  		v1.AddArg(y)
  1458  		v.AddArg2(v0, v1)
  1459  		return true
  1460  	}
  1461  }
  1462  func rewriteValueRISCV64_OpLess32(v *Value) bool {
  1463  	v_1 := v.Args[1]
  1464  	v_0 := v.Args[0]
  1465  	b := v.Block
  1466  	typ := &b.Func.Config.Types
  1467  	// match: (Less32 x y)
  1468  	// result: (SLT (SignExt32to64 x) (SignExt32to64 y))
  1469  	for {
  1470  		x := v_0
  1471  		y := v_1
  1472  		v.reset(OpRISCV64SLT)
  1473  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1474  		v0.AddArg(x)
  1475  		v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1476  		v1.AddArg(y)
  1477  		v.AddArg2(v0, v1)
  1478  		return true
  1479  	}
  1480  }
  1481  func rewriteValueRISCV64_OpLess32U(v *Value) bool {
  1482  	v_1 := v.Args[1]
  1483  	v_0 := v.Args[0]
  1484  	b := v.Block
  1485  	typ := &b.Func.Config.Types
  1486  	// match: (Less32U x y)
  1487  	// result: (SLTU (ZeroExt32to64 x) (ZeroExt32to64 y))
  1488  	for {
  1489  		x := v_0
  1490  		y := v_1
  1491  		v.reset(OpRISCV64SLTU)
  1492  		v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1493  		v0.AddArg(x)
  1494  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1495  		v1.AddArg(y)
  1496  		v.AddArg2(v0, v1)
  1497  		return true
  1498  	}
  1499  }
  1500  func rewriteValueRISCV64_OpLess8(v *Value) bool {
  1501  	v_1 := v.Args[1]
  1502  	v_0 := v.Args[0]
  1503  	b := v.Block
  1504  	typ := &b.Func.Config.Types
  1505  	// match: (Less8 x y)
  1506  	// result: (SLT (SignExt8to64 x) (SignExt8to64 y))
  1507  	for {
  1508  		x := v_0
  1509  		y := v_1
  1510  		v.reset(OpRISCV64SLT)
  1511  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  1512  		v0.AddArg(x)
  1513  		v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  1514  		v1.AddArg(y)
  1515  		v.AddArg2(v0, v1)
  1516  		return true
  1517  	}
  1518  }
  1519  func rewriteValueRISCV64_OpLess8U(v *Value) bool {
  1520  	v_1 := v.Args[1]
  1521  	v_0 := v.Args[0]
  1522  	b := v.Block
  1523  	typ := &b.Func.Config.Types
  1524  	// match: (Less8U x y)
  1525  	// result: (SLTU (ZeroExt8to64 x) (ZeroExt8to64 y))
  1526  	for {
  1527  		x := v_0
  1528  		y := v_1
  1529  		v.reset(OpRISCV64SLTU)
  1530  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1531  		v0.AddArg(x)
  1532  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1533  		v1.AddArg(y)
  1534  		v.AddArg2(v0, v1)
  1535  		return true
  1536  	}
  1537  }
  1538  func rewriteValueRISCV64_OpLoad(v *Value) bool {
  1539  	v_1 := v.Args[1]
  1540  	v_0 := v.Args[0]
  1541  	// match: (Load <t> ptr mem)
  1542  	// cond: t.IsBoolean()
  1543  	// result: (MOVBUload ptr mem)
  1544  	for {
  1545  		t := v.Type
  1546  		ptr := v_0
  1547  		mem := v_1
  1548  		if !(t.IsBoolean()) {
  1549  			break
  1550  		}
  1551  		v.reset(OpRISCV64MOVBUload)
  1552  		v.AddArg2(ptr, mem)
  1553  		return true
  1554  	}
  1555  	// match: (Load <t> ptr mem)
  1556  	// cond: ( is8BitInt(t) && t.IsSigned())
  1557  	// result: (MOVBload ptr mem)
  1558  	for {
  1559  		t := v.Type
  1560  		ptr := v_0
  1561  		mem := v_1
  1562  		if !(is8BitInt(t) && t.IsSigned()) {
  1563  			break
  1564  		}
  1565  		v.reset(OpRISCV64MOVBload)
  1566  		v.AddArg2(ptr, mem)
  1567  		return true
  1568  	}
  1569  	// match: (Load <t> ptr mem)
  1570  	// cond: ( is8BitInt(t) && !t.IsSigned())
  1571  	// result: (MOVBUload ptr mem)
  1572  	for {
  1573  		t := v.Type
  1574  		ptr := v_0
  1575  		mem := v_1
  1576  		if !(is8BitInt(t) && !t.IsSigned()) {
  1577  			break
  1578  		}
  1579  		v.reset(OpRISCV64MOVBUload)
  1580  		v.AddArg2(ptr, mem)
  1581  		return true
  1582  	}
  1583  	// match: (Load <t> ptr mem)
  1584  	// cond: (is16BitInt(t) && t.IsSigned())
  1585  	// result: (MOVHload ptr mem)
  1586  	for {
  1587  		t := v.Type
  1588  		ptr := v_0
  1589  		mem := v_1
  1590  		if !(is16BitInt(t) && t.IsSigned()) {
  1591  			break
  1592  		}
  1593  		v.reset(OpRISCV64MOVHload)
  1594  		v.AddArg2(ptr, mem)
  1595  		return true
  1596  	}
  1597  	// match: (Load <t> ptr mem)
  1598  	// cond: (is16BitInt(t) && !t.IsSigned())
  1599  	// result: (MOVHUload ptr mem)
  1600  	for {
  1601  		t := v.Type
  1602  		ptr := v_0
  1603  		mem := v_1
  1604  		if !(is16BitInt(t) && !t.IsSigned()) {
  1605  			break
  1606  		}
  1607  		v.reset(OpRISCV64MOVHUload)
  1608  		v.AddArg2(ptr, mem)
  1609  		return true
  1610  	}
  1611  	// match: (Load <t> ptr mem)
  1612  	// cond: (is32BitInt(t) && t.IsSigned())
  1613  	// result: (MOVWload ptr mem)
  1614  	for {
  1615  		t := v.Type
  1616  		ptr := v_0
  1617  		mem := v_1
  1618  		if !(is32BitInt(t) && t.IsSigned()) {
  1619  			break
  1620  		}
  1621  		v.reset(OpRISCV64MOVWload)
  1622  		v.AddArg2(ptr, mem)
  1623  		return true
  1624  	}
  1625  	// match: (Load <t> ptr mem)
  1626  	// cond: (is32BitInt(t) && !t.IsSigned())
  1627  	// result: (MOVWUload ptr mem)
  1628  	for {
  1629  		t := v.Type
  1630  		ptr := v_0
  1631  		mem := v_1
  1632  		if !(is32BitInt(t) && !t.IsSigned()) {
  1633  			break
  1634  		}
  1635  		v.reset(OpRISCV64MOVWUload)
  1636  		v.AddArg2(ptr, mem)
  1637  		return true
  1638  	}
  1639  	// match: (Load <t> ptr mem)
  1640  	// cond: (is64BitInt(t) || isPtr(t))
  1641  	// result: (MOVDload ptr mem)
  1642  	for {
  1643  		t := v.Type
  1644  		ptr := v_0
  1645  		mem := v_1
  1646  		if !(is64BitInt(t) || isPtr(t)) {
  1647  			break
  1648  		}
  1649  		v.reset(OpRISCV64MOVDload)
  1650  		v.AddArg2(ptr, mem)
  1651  		return true
  1652  	}
  1653  	// match: (Load <t> ptr mem)
  1654  	// cond: is32BitFloat(t)
  1655  	// result: (FMOVWload ptr mem)
  1656  	for {
  1657  		t := v.Type
  1658  		ptr := v_0
  1659  		mem := v_1
  1660  		if !(is32BitFloat(t)) {
  1661  			break
  1662  		}
  1663  		v.reset(OpRISCV64FMOVWload)
  1664  		v.AddArg2(ptr, mem)
  1665  		return true
  1666  	}
  1667  	// match: (Load <t> ptr mem)
  1668  	// cond: is64BitFloat(t)
  1669  	// result: (FMOVDload ptr mem)
  1670  	for {
  1671  		t := v.Type
  1672  		ptr := v_0
  1673  		mem := v_1
  1674  		if !(is64BitFloat(t)) {
  1675  			break
  1676  		}
  1677  		v.reset(OpRISCV64FMOVDload)
  1678  		v.AddArg2(ptr, mem)
  1679  		return true
  1680  	}
  1681  	return false
  1682  }
  1683  func rewriteValueRISCV64_OpLocalAddr(v *Value) bool {
  1684  	v_1 := v.Args[1]
  1685  	v_0 := v.Args[0]
  1686  	b := v.Block
  1687  	typ := &b.Func.Config.Types
  1688  	// match: (LocalAddr <t> {sym} base mem)
  1689  	// cond: t.Elem().HasPointers()
  1690  	// result: (MOVaddr {sym} (SPanchored base mem))
  1691  	for {
  1692  		t := v.Type
  1693  		sym := auxToSym(v.Aux)
  1694  		base := v_0
  1695  		mem := v_1
  1696  		if !(t.Elem().HasPointers()) {
  1697  			break
  1698  		}
  1699  		v.reset(OpRISCV64MOVaddr)
  1700  		v.Aux = symToAux(sym)
  1701  		v0 := b.NewValue0(v.Pos, OpSPanchored, typ.Uintptr)
  1702  		v0.AddArg2(base, mem)
  1703  		v.AddArg(v0)
  1704  		return true
  1705  	}
  1706  	// match: (LocalAddr <t> {sym} base _)
  1707  	// cond: !t.Elem().HasPointers()
  1708  	// result: (MOVaddr {sym} base)
  1709  	for {
  1710  		t := v.Type
  1711  		sym := auxToSym(v.Aux)
  1712  		base := v_0
  1713  		if !(!t.Elem().HasPointers()) {
  1714  			break
  1715  		}
  1716  		v.reset(OpRISCV64MOVaddr)
  1717  		v.Aux = symToAux(sym)
  1718  		v.AddArg(base)
  1719  		return true
  1720  	}
  1721  	return false
  1722  }
  1723  func rewriteValueRISCV64_OpLsh16x16(v *Value) bool {
  1724  	v_1 := v.Args[1]
  1725  	v_0 := v.Args[0]
  1726  	b := v.Block
  1727  	typ := &b.Func.Config.Types
  1728  	// match: (Lsh16x16 <t> x y)
  1729  	// cond: !shiftIsBounded(v)
  1730  	// result: (AND (SLL <t> x y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
  1731  	for {
  1732  		t := v.Type
  1733  		x := v_0
  1734  		y := v_1
  1735  		if !(!shiftIsBounded(v)) {
  1736  			break
  1737  		}
  1738  		v.reset(OpRISCV64AND)
  1739  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  1740  		v0.AddArg2(x, y)
  1741  		v1 := b.NewValue0(v.Pos, OpNeg16, t)
  1742  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  1743  		v2.AuxInt = int64ToAuxInt(64)
  1744  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1745  		v3.AddArg(y)
  1746  		v2.AddArg(v3)
  1747  		v1.AddArg(v2)
  1748  		v.AddArg2(v0, v1)
  1749  		return true
  1750  	}
  1751  	// match: (Lsh16x16 x y)
  1752  	// cond: shiftIsBounded(v)
  1753  	// result: (SLL x y)
  1754  	for {
  1755  		x := v_0
  1756  		y := v_1
  1757  		if !(shiftIsBounded(v)) {
  1758  			break
  1759  		}
  1760  		v.reset(OpRISCV64SLL)
  1761  		v.AddArg2(x, y)
  1762  		return true
  1763  	}
  1764  	return false
  1765  }
  1766  func rewriteValueRISCV64_OpLsh16x32(v *Value) bool {
  1767  	v_1 := v.Args[1]
  1768  	v_0 := v.Args[0]
  1769  	b := v.Block
  1770  	typ := &b.Func.Config.Types
  1771  	// match: (Lsh16x32 <t> x y)
  1772  	// cond: !shiftIsBounded(v)
  1773  	// result: (AND (SLL <t> x y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
  1774  	for {
  1775  		t := v.Type
  1776  		x := v_0
  1777  		y := v_1
  1778  		if !(!shiftIsBounded(v)) {
  1779  			break
  1780  		}
  1781  		v.reset(OpRISCV64AND)
  1782  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  1783  		v0.AddArg2(x, y)
  1784  		v1 := b.NewValue0(v.Pos, OpNeg16, t)
  1785  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  1786  		v2.AuxInt = int64ToAuxInt(64)
  1787  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1788  		v3.AddArg(y)
  1789  		v2.AddArg(v3)
  1790  		v1.AddArg(v2)
  1791  		v.AddArg2(v0, v1)
  1792  		return true
  1793  	}
  1794  	// match: (Lsh16x32 x y)
  1795  	// cond: shiftIsBounded(v)
  1796  	// result: (SLL x y)
  1797  	for {
  1798  		x := v_0
  1799  		y := v_1
  1800  		if !(shiftIsBounded(v)) {
  1801  			break
  1802  		}
  1803  		v.reset(OpRISCV64SLL)
  1804  		v.AddArg2(x, y)
  1805  		return true
  1806  	}
  1807  	return false
  1808  }
  1809  func rewriteValueRISCV64_OpLsh16x64(v *Value) bool {
  1810  	v_1 := v.Args[1]
  1811  	v_0 := v.Args[0]
  1812  	b := v.Block
  1813  	// match: (Lsh16x64 <t> x y)
  1814  	// cond: !shiftIsBounded(v)
  1815  	// result: (AND (SLL <t> x y) (Neg16 <t> (SLTIU <t> [64] y)))
  1816  	for {
  1817  		t := v.Type
  1818  		x := v_0
  1819  		y := v_1
  1820  		if !(!shiftIsBounded(v)) {
  1821  			break
  1822  		}
  1823  		v.reset(OpRISCV64AND)
  1824  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  1825  		v0.AddArg2(x, y)
  1826  		v1 := b.NewValue0(v.Pos, OpNeg16, t)
  1827  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  1828  		v2.AuxInt = int64ToAuxInt(64)
  1829  		v2.AddArg(y)
  1830  		v1.AddArg(v2)
  1831  		v.AddArg2(v0, v1)
  1832  		return true
  1833  	}
  1834  	// match: (Lsh16x64 x y)
  1835  	// cond: shiftIsBounded(v)
  1836  	// result: (SLL x y)
  1837  	for {
  1838  		x := v_0
  1839  		y := v_1
  1840  		if !(shiftIsBounded(v)) {
  1841  			break
  1842  		}
  1843  		v.reset(OpRISCV64SLL)
  1844  		v.AddArg2(x, y)
  1845  		return true
  1846  	}
  1847  	return false
  1848  }
  1849  func rewriteValueRISCV64_OpLsh16x8(v *Value) bool {
  1850  	v_1 := v.Args[1]
  1851  	v_0 := v.Args[0]
  1852  	b := v.Block
  1853  	typ := &b.Func.Config.Types
  1854  	// match: (Lsh16x8 <t> x y)
  1855  	// cond: !shiftIsBounded(v)
  1856  	// result: (AND (SLL <t> x y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
  1857  	for {
  1858  		t := v.Type
  1859  		x := v_0
  1860  		y := v_1
  1861  		if !(!shiftIsBounded(v)) {
  1862  			break
  1863  		}
  1864  		v.reset(OpRISCV64AND)
  1865  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  1866  		v0.AddArg2(x, y)
  1867  		v1 := b.NewValue0(v.Pos, OpNeg16, t)
  1868  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  1869  		v2.AuxInt = int64ToAuxInt(64)
  1870  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1871  		v3.AddArg(y)
  1872  		v2.AddArg(v3)
  1873  		v1.AddArg(v2)
  1874  		v.AddArg2(v0, v1)
  1875  		return true
  1876  	}
  1877  	// match: (Lsh16x8 x y)
  1878  	// cond: shiftIsBounded(v)
  1879  	// result: (SLL x y)
  1880  	for {
  1881  		x := v_0
  1882  		y := v_1
  1883  		if !(shiftIsBounded(v)) {
  1884  			break
  1885  		}
  1886  		v.reset(OpRISCV64SLL)
  1887  		v.AddArg2(x, y)
  1888  		return true
  1889  	}
  1890  	return false
  1891  }
  1892  func rewriteValueRISCV64_OpLsh32x16(v *Value) bool {
  1893  	v_1 := v.Args[1]
  1894  	v_0 := v.Args[0]
  1895  	b := v.Block
  1896  	typ := &b.Func.Config.Types
  1897  	// match: (Lsh32x16 <t> x y)
  1898  	// cond: !shiftIsBounded(v)
  1899  	// result: (AND (SLL <t> x y) (Neg32 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
  1900  	for {
  1901  		t := v.Type
  1902  		x := v_0
  1903  		y := v_1
  1904  		if !(!shiftIsBounded(v)) {
  1905  			break
  1906  		}
  1907  		v.reset(OpRISCV64AND)
  1908  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  1909  		v0.AddArg2(x, y)
  1910  		v1 := b.NewValue0(v.Pos, OpNeg32, t)
  1911  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  1912  		v2.AuxInt = int64ToAuxInt(64)
  1913  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1914  		v3.AddArg(y)
  1915  		v2.AddArg(v3)
  1916  		v1.AddArg(v2)
  1917  		v.AddArg2(v0, v1)
  1918  		return true
  1919  	}
  1920  	// match: (Lsh32x16 x y)
  1921  	// cond: shiftIsBounded(v)
  1922  	// result: (SLL x y)
  1923  	for {
  1924  		x := v_0
  1925  		y := v_1
  1926  		if !(shiftIsBounded(v)) {
  1927  			break
  1928  		}
  1929  		v.reset(OpRISCV64SLL)
  1930  		v.AddArg2(x, y)
  1931  		return true
  1932  	}
  1933  	return false
  1934  }
  1935  func rewriteValueRISCV64_OpLsh32x32(v *Value) bool {
  1936  	v_1 := v.Args[1]
  1937  	v_0 := v.Args[0]
  1938  	b := v.Block
  1939  	typ := &b.Func.Config.Types
  1940  	// match: (Lsh32x32 <t> x y)
  1941  	// cond: !shiftIsBounded(v)
  1942  	// result: (AND (SLL <t> x y) (Neg32 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
  1943  	for {
  1944  		t := v.Type
  1945  		x := v_0
  1946  		y := v_1
  1947  		if !(!shiftIsBounded(v)) {
  1948  			break
  1949  		}
  1950  		v.reset(OpRISCV64AND)
  1951  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  1952  		v0.AddArg2(x, y)
  1953  		v1 := b.NewValue0(v.Pos, OpNeg32, t)
  1954  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  1955  		v2.AuxInt = int64ToAuxInt(64)
  1956  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1957  		v3.AddArg(y)
  1958  		v2.AddArg(v3)
  1959  		v1.AddArg(v2)
  1960  		v.AddArg2(v0, v1)
  1961  		return true
  1962  	}
  1963  	// match: (Lsh32x32 x y)
  1964  	// cond: shiftIsBounded(v)
  1965  	// result: (SLL x y)
  1966  	for {
  1967  		x := v_0
  1968  		y := v_1
  1969  		if !(shiftIsBounded(v)) {
  1970  			break
  1971  		}
  1972  		v.reset(OpRISCV64SLL)
  1973  		v.AddArg2(x, y)
  1974  		return true
  1975  	}
  1976  	return false
  1977  }
  1978  func rewriteValueRISCV64_OpLsh32x64(v *Value) bool {
  1979  	v_1 := v.Args[1]
  1980  	v_0 := v.Args[0]
  1981  	b := v.Block
  1982  	// match: (Lsh32x64 <t> x y)
  1983  	// cond: !shiftIsBounded(v)
  1984  	// result: (AND (SLL <t> x y) (Neg32 <t> (SLTIU <t> [64] y)))
  1985  	for {
  1986  		t := v.Type
  1987  		x := v_0
  1988  		y := v_1
  1989  		if !(!shiftIsBounded(v)) {
  1990  			break
  1991  		}
  1992  		v.reset(OpRISCV64AND)
  1993  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  1994  		v0.AddArg2(x, y)
  1995  		v1 := b.NewValue0(v.Pos, OpNeg32, t)
  1996  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  1997  		v2.AuxInt = int64ToAuxInt(64)
  1998  		v2.AddArg(y)
  1999  		v1.AddArg(v2)
  2000  		v.AddArg2(v0, v1)
  2001  		return true
  2002  	}
  2003  	// match: (Lsh32x64 x y)
  2004  	// cond: shiftIsBounded(v)
  2005  	// result: (SLL x y)
  2006  	for {
  2007  		x := v_0
  2008  		y := v_1
  2009  		if !(shiftIsBounded(v)) {
  2010  			break
  2011  		}
  2012  		v.reset(OpRISCV64SLL)
  2013  		v.AddArg2(x, y)
  2014  		return true
  2015  	}
  2016  	return false
  2017  }
  2018  func rewriteValueRISCV64_OpLsh32x8(v *Value) bool {
  2019  	v_1 := v.Args[1]
  2020  	v_0 := v.Args[0]
  2021  	b := v.Block
  2022  	typ := &b.Func.Config.Types
  2023  	// match: (Lsh32x8 <t> x y)
  2024  	// cond: !shiftIsBounded(v)
  2025  	// result: (AND (SLL <t> x y) (Neg32 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
  2026  	for {
  2027  		t := v.Type
  2028  		x := v_0
  2029  		y := v_1
  2030  		if !(!shiftIsBounded(v)) {
  2031  			break
  2032  		}
  2033  		v.reset(OpRISCV64AND)
  2034  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2035  		v0.AddArg2(x, y)
  2036  		v1 := b.NewValue0(v.Pos, OpNeg32, t)
  2037  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2038  		v2.AuxInt = int64ToAuxInt(64)
  2039  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  2040  		v3.AddArg(y)
  2041  		v2.AddArg(v3)
  2042  		v1.AddArg(v2)
  2043  		v.AddArg2(v0, v1)
  2044  		return true
  2045  	}
  2046  	// match: (Lsh32x8 x y)
  2047  	// cond: shiftIsBounded(v)
  2048  	// result: (SLL x y)
  2049  	for {
  2050  		x := v_0
  2051  		y := v_1
  2052  		if !(shiftIsBounded(v)) {
  2053  			break
  2054  		}
  2055  		v.reset(OpRISCV64SLL)
  2056  		v.AddArg2(x, y)
  2057  		return true
  2058  	}
  2059  	return false
  2060  }
  2061  func rewriteValueRISCV64_OpLsh64x16(v *Value) bool {
  2062  	v_1 := v.Args[1]
  2063  	v_0 := v.Args[0]
  2064  	b := v.Block
  2065  	typ := &b.Func.Config.Types
  2066  	// match: (Lsh64x16 <t> x y)
  2067  	// cond: !shiftIsBounded(v)
  2068  	// result: (AND (SLL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
  2069  	for {
  2070  		t := v.Type
  2071  		x := v_0
  2072  		y := v_1
  2073  		if !(!shiftIsBounded(v)) {
  2074  			break
  2075  		}
  2076  		v.reset(OpRISCV64AND)
  2077  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2078  		v0.AddArg2(x, y)
  2079  		v1 := b.NewValue0(v.Pos, OpNeg64, t)
  2080  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2081  		v2.AuxInt = int64ToAuxInt(64)
  2082  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  2083  		v3.AddArg(y)
  2084  		v2.AddArg(v3)
  2085  		v1.AddArg(v2)
  2086  		v.AddArg2(v0, v1)
  2087  		return true
  2088  	}
  2089  	// match: (Lsh64x16 x y)
  2090  	// cond: shiftIsBounded(v)
  2091  	// result: (SLL x y)
  2092  	for {
  2093  		x := v_0
  2094  		y := v_1
  2095  		if !(shiftIsBounded(v)) {
  2096  			break
  2097  		}
  2098  		v.reset(OpRISCV64SLL)
  2099  		v.AddArg2(x, y)
  2100  		return true
  2101  	}
  2102  	return false
  2103  }
  2104  func rewriteValueRISCV64_OpLsh64x32(v *Value) bool {
  2105  	v_1 := v.Args[1]
  2106  	v_0 := v.Args[0]
  2107  	b := v.Block
  2108  	typ := &b.Func.Config.Types
  2109  	// match: (Lsh64x32 <t> x y)
  2110  	// cond: !shiftIsBounded(v)
  2111  	// result: (AND (SLL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
  2112  	for {
  2113  		t := v.Type
  2114  		x := v_0
  2115  		y := v_1
  2116  		if !(!shiftIsBounded(v)) {
  2117  			break
  2118  		}
  2119  		v.reset(OpRISCV64AND)
  2120  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2121  		v0.AddArg2(x, y)
  2122  		v1 := b.NewValue0(v.Pos, OpNeg64, t)
  2123  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2124  		v2.AuxInt = int64ToAuxInt(64)
  2125  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  2126  		v3.AddArg(y)
  2127  		v2.AddArg(v3)
  2128  		v1.AddArg(v2)
  2129  		v.AddArg2(v0, v1)
  2130  		return true
  2131  	}
  2132  	// match: (Lsh64x32 x y)
  2133  	// cond: shiftIsBounded(v)
  2134  	// result: (SLL x y)
  2135  	for {
  2136  		x := v_0
  2137  		y := v_1
  2138  		if !(shiftIsBounded(v)) {
  2139  			break
  2140  		}
  2141  		v.reset(OpRISCV64SLL)
  2142  		v.AddArg2(x, y)
  2143  		return true
  2144  	}
  2145  	return false
  2146  }
  2147  func rewriteValueRISCV64_OpLsh64x64(v *Value) bool {
  2148  	v_1 := v.Args[1]
  2149  	v_0 := v.Args[0]
  2150  	b := v.Block
  2151  	// match: (Lsh64x64 <t> x y)
  2152  	// cond: !shiftIsBounded(v)
  2153  	// result: (AND (SLL <t> x y) (Neg64 <t> (SLTIU <t> [64] y)))
  2154  	for {
  2155  		t := v.Type
  2156  		x := v_0
  2157  		y := v_1
  2158  		if !(!shiftIsBounded(v)) {
  2159  			break
  2160  		}
  2161  		v.reset(OpRISCV64AND)
  2162  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2163  		v0.AddArg2(x, y)
  2164  		v1 := b.NewValue0(v.Pos, OpNeg64, t)
  2165  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2166  		v2.AuxInt = int64ToAuxInt(64)
  2167  		v2.AddArg(y)
  2168  		v1.AddArg(v2)
  2169  		v.AddArg2(v0, v1)
  2170  		return true
  2171  	}
  2172  	// match: (Lsh64x64 x y)
  2173  	// cond: shiftIsBounded(v)
  2174  	// result: (SLL x y)
  2175  	for {
  2176  		x := v_0
  2177  		y := v_1
  2178  		if !(shiftIsBounded(v)) {
  2179  			break
  2180  		}
  2181  		v.reset(OpRISCV64SLL)
  2182  		v.AddArg2(x, y)
  2183  		return true
  2184  	}
  2185  	return false
  2186  }
  2187  func rewriteValueRISCV64_OpLsh64x8(v *Value) bool {
  2188  	v_1 := v.Args[1]
  2189  	v_0 := v.Args[0]
  2190  	b := v.Block
  2191  	typ := &b.Func.Config.Types
  2192  	// match: (Lsh64x8 <t> x y)
  2193  	// cond: !shiftIsBounded(v)
  2194  	// result: (AND (SLL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
  2195  	for {
  2196  		t := v.Type
  2197  		x := v_0
  2198  		y := v_1
  2199  		if !(!shiftIsBounded(v)) {
  2200  			break
  2201  		}
  2202  		v.reset(OpRISCV64AND)
  2203  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2204  		v0.AddArg2(x, y)
  2205  		v1 := b.NewValue0(v.Pos, OpNeg64, t)
  2206  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2207  		v2.AuxInt = int64ToAuxInt(64)
  2208  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  2209  		v3.AddArg(y)
  2210  		v2.AddArg(v3)
  2211  		v1.AddArg(v2)
  2212  		v.AddArg2(v0, v1)
  2213  		return true
  2214  	}
  2215  	// match: (Lsh64x8 x y)
  2216  	// cond: shiftIsBounded(v)
  2217  	// result: (SLL x y)
  2218  	for {
  2219  		x := v_0
  2220  		y := v_1
  2221  		if !(shiftIsBounded(v)) {
  2222  			break
  2223  		}
  2224  		v.reset(OpRISCV64SLL)
  2225  		v.AddArg2(x, y)
  2226  		return true
  2227  	}
  2228  	return false
  2229  }
  2230  func rewriteValueRISCV64_OpLsh8x16(v *Value) bool {
  2231  	v_1 := v.Args[1]
  2232  	v_0 := v.Args[0]
  2233  	b := v.Block
  2234  	typ := &b.Func.Config.Types
  2235  	// match: (Lsh8x16 <t> x y)
  2236  	// cond: !shiftIsBounded(v)
  2237  	// result: (AND (SLL <t> x y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
  2238  	for {
  2239  		t := v.Type
  2240  		x := v_0
  2241  		y := v_1
  2242  		if !(!shiftIsBounded(v)) {
  2243  			break
  2244  		}
  2245  		v.reset(OpRISCV64AND)
  2246  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2247  		v0.AddArg2(x, y)
  2248  		v1 := b.NewValue0(v.Pos, OpNeg8, t)
  2249  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2250  		v2.AuxInt = int64ToAuxInt(64)
  2251  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  2252  		v3.AddArg(y)
  2253  		v2.AddArg(v3)
  2254  		v1.AddArg(v2)
  2255  		v.AddArg2(v0, v1)
  2256  		return true
  2257  	}
  2258  	// match: (Lsh8x16 x y)
  2259  	// cond: shiftIsBounded(v)
  2260  	// result: (SLL x y)
  2261  	for {
  2262  		x := v_0
  2263  		y := v_1
  2264  		if !(shiftIsBounded(v)) {
  2265  			break
  2266  		}
  2267  		v.reset(OpRISCV64SLL)
  2268  		v.AddArg2(x, y)
  2269  		return true
  2270  	}
  2271  	return false
  2272  }
  2273  func rewriteValueRISCV64_OpLsh8x32(v *Value) bool {
  2274  	v_1 := v.Args[1]
  2275  	v_0 := v.Args[0]
  2276  	b := v.Block
  2277  	typ := &b.Func.Config.Types
  2278  	// match: (Lsh8x32 <t> x y)
  2279  	// cond: !shiftIsBounded(v)
  2280  	// result: (AND (SLL <t> x y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
  2281  	for {
  2282  		t := v.Type
  2283  		x := v_0
  2284  		y := v_1
  2285  		if !(!shiftIsBounded(v)) {
  2286  			break
  2287  		}
  2288  		v.reset(OpRISCV64AND)
  2289  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2290  		v0.AddArg2(x, y)
  2291  		v1 := b.NewValue0(v.Pos, OpNeg8, t)
  2292  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2293  		v2.AuxInt = int64ToAuxInt(64)
  2294  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  2295  		v3.AddArg(y)
  2296  		v2.AddArg(v3)
  2297  		v1.AddArg(v2)
  2298  		v.AddArg2(v0, v1)
  2299  		return true
  2300  	}
  2301  	// match: (Lsh8x32 x y)
  2302  	// cond: shiftIsBounded(v)
  2303  	// result: (SLL x y)
  2304  	for {
  2305  		x := v_0
  2306  		y := v_1
  2307  		if !(shiftIsBounded(v)) {
  2308  			break
  2309  		}
  2310  		v.reset(OpRISCV64SLL)
  2311  		v.AddArg2(x, y)
  2312  		return true
  2313  	}
  2314  	return false
  2315  }
  2316  func rewriteValueRISCV64_OpLsh8x64(v *Value) bool {
  2317  	v_1 := v.Args[1]
  2318  	v_0 := v.Args[0]
  2319  	b := v.Block
  2320  	// match: (Lsh8x64 <t> x y)
  2321  	// cond: !shiftIsBounded(v)
  2322  	// result: (AND (SLL <t> x y) (Neg8 <t> (SLTIU <t> [64] y)))
  2323  	for {
  2324  		t := v.Type
  2325  		x := v_0
  2326  		y := v_1
  2327  		if !(!shiftIsBounded(v)) {
  2328  			break
  2329  		}
  2330  		v.reset(OpRISCV64AND)
  2331  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2332  		v0.AddArg2(x, y)
  2333  		v1 := b.NewValue0(v.Pos, OpNeg8, t)
  2334  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2335  		v2.AuxInt = int64ToAuxInt(64)
  2336  		v2.AddArg(y)
  2337  		v1.AddArg(v2)
  2338  		v.AddArg2(v0, v1)
  2339  		return true
  2340  	}
  2341  	// match: (Lsh8x64 x y)
  2342  	// cond: shiftIsBounded(v)
  2343  	// result: (SLL x y)
  2344  	for {
  2345  		x := v_0
  2346  		y := v_1
  2347  		if !(shiftIsBounded(v)) {
  2348  			break
  2349  		}
  2350  		v.reset(OpRISCV64SLL)
  2351  		v.AddArg2(x, y)
  2352  		return true
  2353  	}
  2354  	return false
  2355  }
  2356  func rewriteValueRISCV64_OpLsh8x8(v *Value) bool {
  2357  	v_1 := v.Args[1]
  2358  	v_0 := v.Args[0]
  2359  	b := v.Block
  2360  	typ := &b.Func.Config.Types
  2361  	// match: (Lsh8x8 <t> x y)
  2362  	// cond: !shiftIsBounded(v)
  2363  	// result: (AND (SLL <t> x y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
  2364  	for {
  2365  		t := v.Type
  2366  		x := v_0
  2367  		y := v_1
  2368  		if !(!shiftIsBounded(v)) {
  2369  			break
  2370  		}
  2371  		v.reset(OpRISCV64AND)
  2372  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2373  		v0.AddArg2(x, y)
  2374  		v1 := b.NewValue0(v.Pos, OpNeg8, t)
  2375  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2376  		v2.AuxInt = int64ToAuxInt(64)
  2377  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  2378  		v3.AddArg(y)
  2379  		v2.AddArg(v3)
  2380  		v1.AddArg(v2)
  2381  		v.AddArg2(v0, v1)
  2382  		return true
  2383  	}
  2384  	// match: (Lsh8x8 x y)
  2385  	// cond: shiftIsBounded(v)
  2386  	// result: (SLL x y)
  2387  	for {
  2388  		x := v_0
  2389  		y := v_1
  2390  		if !(shiftIsBounded(v)) {
  2391  			break
  2392  		}
  2393  		v.reset(OpRISCV64SLL)
  2394  		v.AddArg2(x, y)
  2395  		return true
  2396  	}
  2397  	return false
  2398  }
  2399  func rewriteValueRISCV64_OpMod16(v *Value) bool {
  2400  	v_1 := v.Args[1]
  2401  	v_0 := v.Args[0]
  2402  	b := v.Block
  2403  	typ := &b.Func.Config.Types
  2404  	// match: (Mod16 x y [false])
  2405  	// result: (REMW (SignExt16to32 x) (SignExt16to32 y))
  2406  	for {
  2407  		if auxIntToBool(v.AuxInt) != false {
  2408  			break
  2409  		}
  2410  		x := v_0
  2411  		y := v_1
  2412  		v.reset(OpRISCV64REMW)
  2413  		v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  2414  		v0.AddArg(x)
  2415  		v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  2416  		v1.AddArg(y)
  2417  		v.AddArg2(v0, v1)
  2418  		return true
  2419  	}
  2420  	return false
  2421  }
  2422  func rewriteValueRISCV64_OpMod16u(v *Value) bool {
  2423  	v_1 := v.Args[1]
  2424  	v_0 := v.Args[0]
  2425  	b := v.Block
  2426  	typ := &b.Func.Config.Types
  2427  	// match: (Mod16u x y)
  2428  	// result: (REMUW (ZeroExt16to32 x) (ZeroExt16to32 y))
  2429  	for {
  2430  		x := v_0
  2431  		y := v_1
  2432  		v.reset(OpRISCV64REMUW)
  2433  		v0 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  2434  		v0.AddArg(x)
  2435  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  2436  		v1.AddArg(y)
  2437  		v.AddArg2(v0, v1)
  2438  		return true
  2439  	}
  2440  }
  2441  func rewriteValueRISCV64_OpMod32(v *Value) bool {
  2442  	v_1 := v.Args[1]
  2443  	v_0 := v.Args[0]
  2444  	// match: (Mod32 x y [false])
  2445  	// result: (REMW x y)
  2446  	for {
  2447  		if auxIntToBool(v.AuxInt) != false {
  2448  			break
  2449  		}
  2450  		x := v_0
  2451  		y := v_1
  2452  		v.reset(OpRISCV64REMW)
  2453  		v.AddArg2(x, y)
  2454  		return true
  2455  	}
  2456  	return false
  2457  }
  2458  func rewriteValueRISCV64_OpMod64(v *Value) bool {
  2459  	v_1 := v.Args[1]
  2460  	v_0 := v.Args[0]
  2461  	// match: (Mod64 x y [false])
  2462  	// result: (REM x y)
  2463  	for {
  2464  		if auxIntToBool(v.AuxInt) != false {
  2465  			break
  2466  		}
  2467  		x := v_0
  2468  		y := v_1
  2469  		v.reset(OpRISCV64REM)
  2470  		v.AddArg2(x, y)
  2471  		return true
  2472  	}
  2473  	return false
  2474  }
  2475  func rewriteValueRISCV64_OpMod8(v *Value) bool {
  2476  	v_1 := v.Args[1]
  2477  	v_0 := v.Args[0]
  2478  	b := v.Block
  2479  	typ := &b.Func.Config.Types
  2480  	// match: (Mod8 x y)
  2481  	// result: (REMW (SignExt8to32 x) (SignExt8to32 y))
  2482  	for {
  2483  		x := v_0
  2484  		y := v_1
  2485  		v.reset(OpRISCV64REMW)
  2486  		v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  2487  		v0.AddArg(x)
  2488  		v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  2489  		v1.AddArg(y)
  2490  		v.AddArg2(v0, v1)
  2491  		return true
  2492  	}
  2493  }
  2494  func rewriteValueRISCV64_OpMod8u(v *Value) bool {
  2495  	v_1 := v.Args[1]
  2496  	v_0 := v.Args[0]
  2497  	b := v.Block
  2498  	typ := &b.Func.Config.Types
  2499  	// match: (Mod8u x y)
  2500  	// result: (REMUW (ZeroExt8to32 x) (ZeroExt8to32 y))
  2501  	for {
  2502  		x := v_0
  2503  		y := v_1
  2504  		v.reset(OpRISCV64REMUW)
  2505  		v0 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  2506  		v0.AddArg(x)
  2507  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  2508  		v1.AddArg(y)
  2509  		v.AddArg2(v0, v1)
  2510  		return true
  2511  	}
  2512  }
  2513  func rewriteValueRISCV64_OpMove(v *Value) bool {
  2514  	v_2 := v.Args[2]
  2515  	v_1 := v.Args[1]
  2516  	v_0 := v.Args[0]
  2517  	b := v.Block
  2518  	config := b.Func.Config
  2519  	typ := &b.Func.Config.Types
  2520  	// match: (Move [0] _ _ mem)
  2521  	// result: mem
  2522  	for {
  2523  		if auxIntToInt64(v.AuxInt) != 0 {
  2524  			break
  2525  		}
  2526  		mem := v_2
  2527  		v.copyOf(mem)
  2528  		return true
  2529  	}
  2530  	// match: (Move [1] dst src mem)
  2531  	// result: (MOVBstore dst (MOVBload src mem) mem)
  2532  	for {
  2533  		if auxIntToInt64(v.AuxInt) != 1 {
  2534  			break
  2535  		}
  2536  		dst := v_0
  2537  		src := v_1
  2538  		mem := v_2
  2539  		v.reset(OpRISCV64MOVBstore)
  2540  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  2541  		v0.AddArg2(src, mem)
  2542  		v.AddArg3(dst, v0, mem)
  2543  		return true
  2544  	}
  2545  	// match: (Move [2] {t} dst src mem)
  2546  	// cond: t.Alignment()%2 == 0
  2547  	// result: (MOVHstore dst (MOVHload src mem) mem)
  2548  	for {
  2549  		if auxIntToInt64(v.AuxInt) != 2 {
  2550  			break
  2551  		}
  2552  		t := auxToType(v.Aux)
  2553  		dst := v_0
  2554  		src := v_1
  2555  		mem := v_2
  2556  		if !(t.Alignment()%2 == 0) {
  2557  			break
  2558  		}
  2559  		v.reset(OpRISCV64MOVHstore)
  2560  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  2561  		v0.AddArg2(src, mem)
  2562  		v.AddArg3(dst, v0, mem)
  2563  		return true
  2564  	}
  2565  	// match: (Move [2] dst src mem)
  2566  	// result: (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem))
  2567  	for {
  2568  		if auxIntToInt64(v.AuxInt) != 2 {
  2569  			break
  2570  		}
  2571  		dst := v_0
  2572  		src := v_1
  2573  		mem := v_2
  2574  		v.reset(OpRISCV64MOVBstore)
  2575  		v.AuxInt = int32ToAuxInt(1)
  2576  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  2577  		v0.AuxInt = int32ToAuxInt(1)
  2578  		v0.AddArg2(src, mem)
  2579  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  2580  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  2581  		v2.AddArg2(src, mem)
  2582  		v1.AddArg3(dst, v2, mem)
  2583  		v.AddArg3(dst, v0, v1)
  2584  		return true
  2585  	}
  2586  	// match: (Move [4] {t} dst src mem)
  2587  	// cond: t.Alignment()%4 == 0
  2588  	// result: (MOVWstore dst (MOVWload src mem) mem)
  2589  	for {
  2590  		if auxIntToInt64(v.AuxInt) != 4 {
  2591  			break
  2592  		}
  2593  		t := auxToType(v.Aux)
  2594  		dst := v_0
  2595  		src := v_1
  2596  		mem := v_2
  2597  		if !(t.Alignment()%4 == 0) {
  2598  			break
  2599  		}
  2600  		v.reset(OpRISCV64MOVWstore)
  2601  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVWload, typ.Int32)
  2602  		v0.AddArg2(src, mem)
  2603  		v.AddArg3(dst, v0, mem)
  2604  		return true
  2605  	}
  2606  	// match: (Move [4] {t} dst src mem)
  2607  	// cond: t.Alignment()%2 == 0
  2608  	// result: (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem))
  2609  	for {
  2610  		if auxIntToInt64(v.AuxInt) != 4 {
  2611  			break
  2612  		}
  2613  		t := auxToType(v.Aux)
  2614  		dst := v_0
  2615  		src := v_1
  2616  		mem := v_2
  2617  		if !(t.Alignment()%2 == 0) {
  2618  			break
  2619  		}
  2620  		v.reset(OpRISCV64MOVHstore)
  2621  		v.AuxInt = int32ToAuxInt(2)
  2622  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  2623  		v0.AuxInt = int32ToAuxInt(2)
  2624  		v0.AddArg2(src, mem)
  2625  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  2626  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  2627  		v2.AddArg2(src, mem)
  2628  		v1.AddArg3(dst, v2, mem)
  2629  		v.AddArg3(dst, v0, v1)
  2630  		return true
  2631  	}
  2632  	// match: (Move [4] dst src mem)
  2633  	// result: (MOVBstore [3] dst (MOVBload [3] src mem) (MOVBstore [2] dst (MOVBload [2] src mem) (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem))))
  2634  	for {
  2635  		if auxIntToInt64(v.AuxInt) != 4 {
  2636  			break
  2637  		}
  2638  		dst := v_0
  2639  		src := v_1
  2640  		mem := v_2
  2641  		v.reset(OpRISCV64MOVBstore)
  2642  		v.AuxInt = int32ToAuxInt(3)
  2643  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  2644  		v0.AuxInt = int32ToAuxInt(3)
  2645  		v0.AddArg2(src, mem)
  2646  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  2647  		v1.AuxInt = int32ToAuxInt(2)
  2648  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  2649  		v2.AuxInt = int32ToAuxInt(2)
  2650  		v2.AddArg2(src, mem)
  2651  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  2652  		v3.AuxInt = int32ToAuxInt(1)
  2653  		v4 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  2654  		v4.AuxInt = int32ToAuxInt(1)
  2655  		v4.AddArg2(src, mem)
  2656  		v5 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  2657  		v6 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  2658  		v6.AddArg2(src, mem)
  2659  		v5.AddArg3(dst, v6, mem)
  2660  		v3.AddArg3(dst, v4, v5)
  2661  		v1.AddArg3(dst, v2, v3)
  2662  		v.AddArg3(dst, v0, v1)
  2663  		return true
  2664  	}
  2665  	// match: (Move [8] {t} dst src mem)
  2666  	// cond: t.Alignment()%8 == 0
  2667  	// result: (MOVDstore dst (MOVDload src mem) mem)
  2668  	for {
  2669  		if auxIntToInt64(v.AuxInt) != 8 {
  2670  			break
  2671  		}
  2672  		t := auxToType(v.Aux)
  2673  		dst := v_0
  2674  		src := v_1
  2675  		mem := v_2
  2676  		if !(t.Alignment()%8 == 0) {
  2677  			break
  2678  		}
  2679  		v.reset(OpRISCV64MOVDstore)
  2680  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
  2681  		v0.AddArg2(src, mem)
  2682  		v.AddArg3(dst, v0, mem)
  2683  		return true
  2684  	}
  2685  	// match: (Move [8] {t} dst src mem)
  2686  	// cond: t.Alignment()%4 == 0
  2687  	// result: (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem))
  2688  	for {
  2689  		if auxIntToInt64(v.AuxInt) != 8 {
  2690  			break
  2691  		}
  2692  		t := auxToType(v.Aux)
  2693  		dst := v_0
  2694  		src := v_1
  2695  		mem := v_2
  2696  		if !(t.Alignment()%4 == 0) {
  2697  			break
  2698  		}
  2699  		v.reset(OpRISCV64MOVWstore)
  2700  		v.AuxInt = int32ToAuxInt(4)
  2701  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVWload, typ.Int32)
  2702  		v0.AuxInt = int32ToAuxInt(4)
  2703  		v0.AddArg2(src, mem)
  2704  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVWstore, types.TypeMem)
  2705  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVWload, typ.Int32)
  2706  		v2.AddArg2(src, mem)
  2707  		v1.AddArg3(dst, v2, mem)
  2708  		v.AddArg3(dst, v0, v1)
  2709  		return true
  2710  	}
  2711  	// match: (Move [8] {t} dst src mem)
  2712  	// cond: t.Alignment()%2 == 0
  2713  	// result: (MOVHstore [6] dst (MOVHload [6] src mem) (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem))))
  2714  	for {
  2715  		if auxIntToInt64(v.AuxInt) != 8 {
  2716  			break
  2717  		}
  2718  		t := auxToType(v.Aux)
  2719  		dst := v_0
  2720  		src := v_1
  2721  		mem := v_2
  2722  		if !(t.Alignment()%2 == 0) {
  2723  			break
  2724  		}
  2725  		v.reset(OpRISCV64MOVHstore)
  2726  		v.AuxInt = int32ToAuxInt(6)
  2727  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  2728  		v0.AuxInt = int32ToAuxInt(6)
  2729  		v0.AddArg2(src, mem)
  2730  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  2731  		v1.AuxInt = int32ToAuxInt(4)
  2732  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  2733  		v2.AuxInt = int32ToAuxInt(4)
  2734  		v2.AddArg2(src, mem)
  2735  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  2736  		v3.AuxInt = int32ToAuxInt(2)
  2737  		v4 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  2738  		v4.AuxInt = int32ToAuxInt(2)
  2739  		v4.AddArg2(src, mem)
  2740  		v5 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  2741  		v6 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  2742  		v6.AddArg2(src, mem)
  2743  		v5.AddArg3(dst, v6, mem)
  2744  		v3.AddArg3(dst, v4, v5)
  2745  		v1.AddArg3(dst, v2, v3)
  2746  		v.AddArg3(dst, v0, v1)
  2747  		return true
  2748  	}
  2749  	// match: (Move [3] dst src mem)
  2750  	// result: (MOVBstore [2] dst (MOVBload [2] src mem) (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem)))
  2751  	for {
  2752  		if auxIntToInt64(v.AuxInt) != 3 {
  2753  			break
  2754  		}
  2755  		dst := v_0
  2756  		src := v_1
  2757  		mem := v_2
  2758  		v.reset(OpRISCV64MOVBstore)
  2759  		v.AuxInt = int32ToAuxInt(2)
  2760  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  2761  		v0.AuxInt = int32ToAuxInt(2)
  2762  		v0.AddArg2(src, mem)
  2763  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  2764  		v1.AuxInt = int32ToAuxInt(1)
  2765  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  2766  		v2.AuxInt = int32ToAuxInt(1)
  2767  		v2.AddArg2(src, mem)
  2768  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  2769  		v4 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  2770  		v4.AddArg2(src, mem)
  2771  		v3.AddArg3(dst, v4, mem)
  2772  		v1.AddArg3(dst, v2, v3)
  2773  		v.AddArg3(dst, v0, v1)
  2774  		return true
  2775  	}
  2776  	// match: (Move [6] {t} dst src mem)
  2777  	// cond: t.Alignment()%2 == 0
  2778  	// result: (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem)))
  2779  	for {
  2780  		if auxIntToInt64(v.AuxInt) != 6 {
  2781  			break
  2782  		}
  2783  		t := auxToType(v.Aux)
  2784  		dst := v_0
  2785  		src := v_1
  2786  		mem := v_2
  2787  		if !(t.Alignment()%2 == 0) {
  2788  			break
  2789  		}
  2790  		v.reset(OpRISCV64MOVHstore)
  2791  		v.AuxInt = int32ToAuxInt(4)
  2792  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  2793  		v0.AuxInt = int32ToAuxInt(4)
  2794  		v0.AddArg2(src, mem)
  2795  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  2796  		v1.AuxInt = int32ToAuxInt(2)
  2797  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  2798  		v2.AuxInt = int32ToAuxInt(2)
  2799  		v2.AddArg2(src, mem)
  2800  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  2801  		v4 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  2802  		v4.AddArg2(src, mem)
  2803  		v3.AddArg3(dst, v4, mem)
  2804  		v1.AddArg3(dst, v2, v3)
  2805  		v.AddArg3(dst, v0, v1)
  2806  		return true
  2807  	}
  2808  	// match: (Move [12] {t} dst src mem)
  2809  	// cond: t.Alignment()%4 == 0
  2810  	// result: (MOVWstore [8] dst (MOVWload [8] src mem) (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem)))
  2811  	for {
  2812  		if auxIntToInt64(v.AuxInt) != 12 {
  2813  			break
  2814  		}
  2815  		t := auxToType(v.Aux)
  2816  		dst := v_0
  2817  		src := v_1
  2818  		mem := v_2
  2819  		if !(t.Alignment()%4 == 0) {
  2820  			break
  2821  		}
  2822  		v.reset(OpRISCV64MOVWstore)
  2823  		v.AuxInt = int32ToAuxInt(8)
  2824  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVWload, typ.Int32)
  2825  		v0.AuxInt = int32ToAuxInt(8)
  2826  		v0.AddArg2(src, mem)
  2827  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVWstore, types.TypeMem)
  2828  		v1.AuxInt = int32ToAuxInt(4)
  2829  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVWload, typ.Int32)
  2830  		v2.AuxInt = int32ToAuxInt(4)
  2831  		v2.AddArg2(src, mem)
  2832  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVWstore, types.TypeMem)
  2833  		v4 := b.NewValue0(v.Pos, OpRISCV64MOVWload, typ.Int32)
  2834  		v4.AddArg2(src, mem)
  2835  		v3.AddArg3(dst, v4, mem)
  2836  		v1.AddArg3(dst, v2, v3)
  2837  		v.AddArg3(dst, v0, v1)
  2838  		return true
  2839  	}
  2840  	// match: (Move [16] {t} dst src mem)
  2841  	// cond: t.Alignment()%8 == 0
  2842  	// result: (MOVDstore [8] dst (MOVDload [8] src mem) (MOVDstore dst (MOVDload src mem) mem))
  2843  	for {
  2844  		if auxIntToInt64(v.AuxInt) != 16 {
  2845  			break
  2846  		}
  2847  		t := auxToType(v.Aux)
  2848  		dst := v_0
  2849  		src := v_1
  2850  		mem := v_2
  2851  		if !(t.Alignment()%8 == 0) {
  2852  			break
  2853  		}
  2854  		v.reset(OpRISCV64MOVDstore)
  2855  		v.AuxInt = int32ToAuxInt(8)
  2856  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
  2857  		v0.AuxInt = int32ToAuxInt(8)
  2858  		v0.AddArg2(src, mem)
  2859  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  2860  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
  2861  		v2.AddArg2(src, mem)
  2862  		v1.AddArg3(dst, v2, mem)
  2863  		v.AddArg3(dst, v0, v1)
  2864  		return true
  2865  	}
  2866  	// match: (Move [24] {t} dst src mem)
  2867  	// cond: t.Alignment()%8 == 0
  2868  	// result: (MOVDstore [16] dst (MOVDload [16] src mem) (MOVDstore [8] dst (MOVDload [8] src mem) (MOVDstore dst (MOVDload src mem) mem)))
  2869  	for {
  2870  		if auxIntToInt64(v.AuxInt) != 24 {
  2871  			break
  2872  		}
  2873  		t := auxToType(v.Aux)
  2874  		dst := v_0
  2875  		src := v_1
  2876  		mem := v_2
  2877  		if !(t.Alignment()%8 == 0) {
  2878  			break
  2879  		}
  2880  		v.reset(OpRISCV64MOVDstore)
  2881  		v.AuxInt = int32ToAuxInt(16)
  2882  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
  2883  		v0.AuxInt = int32ToAuxInt(16)
  2884  		v0.AddArg2(src, mem)
  2885  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  2886  		v1.AuxInt = int32ToAuxInt(8)
  2887  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
  2888  		v2.AuxInt = int32ToAuxInt(8)
  2889  		v2.AddArg2(src, mem)
  2890  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  2891  		v4 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
  2892  		v4.AddArg2(src, mem)
  2893  		v3.AddArg3(dst, v4, mem)
  2894  		v1.AddArg3(dst, v2, v3)
  2895  		v.AddArg3(dst, v0, v1)
  2896  		return true
  2897  	}
  2898  	// match: (Move [32] {t} dst src mem)
  2899  	// cond: t.Alignment()%8 == 0
  2900  	// result: (MOVDstore [24] dst (MOVDload [24] src mem) (MOVDstore [16] dst (MOVDload [16] src mem) (MOVDstore [8] dst (MOVDload [8] src mem) (MOVDstore dst (MOVDload src mem) mem))))
  2901  	for {
  2902  		if auxIntToInt64(v.AuxInt) != 32 {
  2903  			break
  2904  		}
  2905  		t := auxToType(v.Aux)
  2906  		dst := v_0
  2907  		src := v_1
  2908  		mem := v_2
  2909  		if !(t.Alignment()%8 == 0) {
  2910  			break
  2911  		}
  2912  		v.reset(OpRISCV64MOVDstore)
  2913  		v.AuxInt = int32ToAuxInt(24)
  2914  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
  2915  		v0.AuxInt = int32ToAuxInt(24)
  2916  		v0.AddArg2(src, mem)
  2917  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  2918  		v1.AuxInt = int32ToAuxInt(16)
  2919  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
  2920  		v2.AuxInt = int32ToAuxInt(16)
  2921  		v2.AddArg2(src, mem)
  2922  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  2923  		v3.AuxInt = int32ToAuxInt(8)
  2924  		v4 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
  2925  		v4.AuxInt = int32ToAuxInt(8)
  2926  		v4.AddArg2(src, mem)
  2927  		v5 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  2928  		v6 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
  2929  		v6.AddArg2(src, mem)
  2930  		v5.AddArg3(dst, v6, mem)
  2931  		v3.AddArg3(dst, v4, v5)
  2932  		v1.AddArg3(dst, v2, v3)
  2933  		v.AddArg3(dst, v0, v1)
  2934  		return true
  2935  	}
  2936  	// match: (Move [s] {t} dst src mem)
  2937  	// cond: s%8 == 0 && s <= 8*128 && t.Alignment()%8 == 0 && !config.noDuffDevice && logLargeCopy(v, s)
  2938  	// result: (DUFFCOPY [16 * (128 - s/8)] dst src mem)
  2939  	for {
  2940  		s := auxIntToInt64(v.AuxInt)
  2941  		t := auxToType(v.Aux)
  2942  		dst := v_0
  2943  		src := v_1
  2944  		mem := v_2
  2945  		if !(s%8 == 0 && s <= 8*128 && t.Alignment()%8 == 0 && !config.noDuffDevice && logLargeCopy(v, s)) {
  2946  			break
  2947  		}
  2948  		v.reset(OpRISCV64DUFFCOPY)
  2949  		v.AuxInt = int64ToAuxInt(16 * (128 - s/8))
  2950  		v.AddArg3(dst, src, mem)
  2951  		return true
  2952  	}
  2953  	// match: (Move [s] {t} dst src mem)
  2954  	// cond: (s <= 16 || logLargeCopy(v, s))
  2955  	// result: (LoweredMove [t.Alignment()] dst src (ADDI <src.Type> [s-moveSize(t.Alignment(), config)] src) mem)
  2956  	for {
  2957  		s := auxIntToInt64(v.AuxInt)
  2958  		t := auxToType(v.Aux)
  2959  		dst := v_0
  2960  		src := v_1
  2961  		mem := v_2
  2962  		if !(s <= 16 || logLargeCopy(v, s)) {
  2963  			break
  2964  		}
  2965  		v.reset(OpRISCV64LoweredMove)
  2966  		v.AuxInt = int64ToAuxInt(t.Alignment())
  2967  		v0 := b.NewValue0(v.Pos, OpRISCV64ADDI, src.Type)
  2968  		v0.AuxInt = int64ToAuxInt(s - moveSize(t.Alignment(), config))
  2969  		v0.AddArg(src)
  2970  		v.AddArg4(dst, src, v0, mem)
  2971  		return true
  2972  	}
  2973  	return false
  2974  }
  2975  func rewriteValueRISCV64_OpMul16(v *Value) bool {
  2976  	v_1 := v.Args[1]
  2977  	v_0 := v.Args[0]
  2978  	b := v.Block
  2979  	typ := &b.Func.Config.Types
  2980  	// match: (Mul16 x y)
  2981  	// result: (MULW (SignExt16to32 x) (SignExt16to32 y))
  2982  	for {
  2983  		x := v_0
  2984  		y := v_1
  2985  		v.reset(OpRISCV64MULW)
  2986  		v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  2987  		v0.AddArg(x)
  2988  		v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  2989  		v1.AddArg(y)
  2990  		v.AddArg2(v0, v1)
  2991  		return true
  2992  	}
  2993  }
  2994  func rewriteValueRISCV64_OpMul8(v *Value) bool {
  2995  	v_1 := v.Args[1]
  2996  	v_0 := v.Args[0]
  2997  	b := v.Block
  2998  	typ := &b.Func.Config.Types
  2999  	// match: (Mul8 x y)
  3000  	// result: (MULW (SignExt8to32 x) (SignExt8to32 y))
  3001  	for {
  3002  		x := v_0
  3003  		y := v_1
  3004  		v.reset(OpRISCV64MULW)
  3005  		v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  3006  		v0.AddArg(x)
  3007  		v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  3008  		v1.AddArg(y)
  3009  		v.AddArg2(v0, v1)
  3010  		return true
  3011  	}
  3012  }
  3013  func rewriteValueRISCV64_OpNeq16(v *Value) bool {
  3014  	v_1 := v.Args[1]
  3015  	v_0 := v.Args[0]
  3016  	b := v.Block
  3017  	typ := &b.Func.Config.Types
  3018  	// match: (Neq16 x y)
  3019  	// result: (Not (Eq16 x y))
  3020  	for {
  3021  		x := v_0
  3022  		y := v_1
  3023  		v.reset(OpNot)
  3024  		v0 := b.NewValue0(v.Pos, OpEq16, typ.Bool)
  3025  		v0.AddArg2(x, y)
  3026  		v.AddArg(v0)
  3027  		return true
  3028  	}
  3029  }
  3030  func rewriteValueRISCV64_OpNeq32(v *Value) bool {
  3031  	v_1 := v.Args[1]
  3032  	v_0 := v.Args[0]
  3033  	b := v.Block
  3034  	typ := &b.Func.Config.Types
  3035  	// match: (Neq32 x y)
  3036  	// result: (Not (Eq32 x y))
  3037  	for {
  3038  		x := v_0
  3039  		y := v_1
  3040  		v.reset(OpNot)
  3041  		v0 := b.NewValue0(v.Pos, OpEq32, typ.Bool)
  3042  		v0.AddArg2(x, y)
  3043  		v.AddArg(v0)
  3044  		return true
  3045  	}
  3046  }
  3047  func rewriteValueRISCV64_OpNeq64(v *Value) bool {
  3048  	v_1 := v.Args[1]
  3049  	v_0 := v.Args[0]
  3050  	b := v.Block
  3051  	typ := &b.Func.Config.Types
  3052  	// match: (Neq64 x y)
  3053  	// result: (Not (Eq64 x y))
  3054  	for {
  3055  		x := v_0
  3056  		y := v_1
  3057  		v.reset(OpNot)
  3058  		v0 := b.NewValue0(v.Pos, OpEq64, typ.Bool)
  3059  		v0.AddArg2(x, y)
  3060  		v.AddArg(v0)
  3061  		return true
  3062  	}
  3063  }
  3064  func rewriteValueRISCV64_OpNeq8(v *Value) bool {
  3065  	v_1 := v.Args[1]
  3066  	v_0 := v.Args[0]
  3067  	b := v.Block
  3068  	typ := &b.Func.Config.Types
  3069  	// match: (Neq8 x y)
  3070  	// result: (Not (Eq8 x y))
  3071  	for {
  3072  		x := v_0
  3073  		y := v_1
  3074  		v.reset(OpNot)
  3075  		v0 := b.NewValue0(v.Pos, OpEq8, typ.Bool)
  3076  		v0.AddArg2(x, y)
  3077  		v.AddArg(v0)
  3078  		return true
  3079  	}
  3080  }
  3081  func rewriteValueRISCV64_OpNeqB(v *Value) bool {
  3082  	v_1 := v.Args[1]
  3083  	v_0 := v.Args[0]
  3084  	b := v.Block
  3085  	typ := &b.Func.Config.Types
  3086  	// match: (NeqB x y)
  3087  	// result: (SNEZ (SUB <typ.Bool> x y))
  3088  	for {
  3089  		x := v_0
  3090  		y := v_1
  3091  		v.reset(OpRISCV64SNEZ)
  3092  		v0 := b.NewValue0(v.Pos, OpRISCV64SUB, typ.Bool)
  3093  		v0.AddArg2(x, y)
  3094  		v.AddArg(v0)
  3095  		return true
  3096  	}
  3097  }
  3098  func rewriteValueRISCV64_OpNeqPtr(v *Value) bool {
  3099  	v_1 := v.Args[1]
  3100  	v_0 := v.Args[0]
  3101  	b := v.Block
  3102  	typ := &b.Func.Config.Types
  3103  	// match: (NeqPtr x y)
  3104  	// result: (Not (EqPtr x y))
  3105  	for {
  3106  		x := v_0
  3107  		y := v_1
  3108  		v.reset(OpNot)
  3109  		v0 := b.NewValue0(v.Pos, OpEqPtr, typ.Bool)
  3110  		v0.AddArg2(x, y)
  3111  		v.AddArg(v0)
  3112  		return true
  3113  	}
  3114  }
  3115  func rewriteValueRISCV64_OpOffPtr(v *Value) bool {
  3116  	v_0 := v.Args[0]
  3117  	b := v.Block
  3118  	typ := &b.Func.Config.Types
  3119  	// match: (OffPtr [off] ptr:(SP))
  3120  	// cond: is32Bit(off)
  3121  	// result: (MOVaddr [int32(off)] ptr)
  3122  	for {
  3123  		off := auxIntToInt64(v.AuxInt)
  3124  		ptr := v_0
  3125  		if ptr.Op != OpSP || !(is32Bit(off)) {
  3126  			break
  3127  		}
  3128  		v.reset(OpRISCV64MOVaddr)
  3129  		v.AuxInt = int32ToAuxInt(int32(off))
  3130  		v.AddArg(ptr)
  3131  		return true
  3132  	}
  3133  	// match: (OffPtr [off] ptr)
  3134  	// cond: is32Bit(off)
  3135  	// result: (ADDI [off] ptr)
  3136  	for {
  3137  		off := auxIntToInt64(v.AuxInt)
  3138  		ptr := v_0
  3139  		if !(is32Bit(off)) {
  3140  			break
  3141  		}
  3142  		v.reset(OpRISCV64ADDI)
  3143  		v.AuxInt = int64ToAuxInt(off)
  3144  		v.AddArg(ptr)
  3145  		return true
  3146  	}
  3147  	// match: (OffPtr [off] ptr)
  3148  	// result: (ADD (MOVDconst [off]) ptr)
  3149  	for {
  3150  		off := auxIntToInt64(v.AuxInt)
  3151  		ptr := v_0
  3152  		v.reset(OpRISCV64ADD)
  3153  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  3154  		v0.AuxInt = int64ToAuxInt(off)
  3155  		v.AddArg2(v0, ptr)
  3156  		return true
  3157  	}
  3158  }
  3159  func rewriteValueRISCV64_OpPanicBounds(v *Value) bool {
  3160  	v_2 := v.Args[2]
  3161  	v_1 := v.Args[1]
  3162  	v_0 := v.Args[0]
  3163  	// match: (PanicBounds [kind] x y mem)
  3164  	// cond: boundsABI(kind) == 0
  3165  	// result: (LoweredPanicBoundsA [kind] x y mem)
  3166  	for {
  3167  		kind := auxIntToInt64(v.AuxInt)
  3168  		x := v_0
  3169  		y := v_1
  3170  		mem := v_2
  3171  		if !(boundsABI(kind) == 0) {
  3172  			break
  3173  		}
  3174  		v.reset(OpRISCV64LoweredPanicBoundsA)
  3175  		v.AuxInt = int64ToAuxInt(kind)
  3176  		v.AddArg3(x, y, mem)
  3177  		return true
  3178  	}
  3179  	// match: (PanicBounds [kind] x y mem)
  3180  	// cond: boundsABI(kind) == 1
  3181  	// result: (LoweredPanicBoundsB [kind] x y mem)
  3182  	for {
  3183  		kind := auxIntToInt64(v.AuxInt)
  3184  		x := v_0
  3185  		y := v_1
  3186  		mem := v_2
  3187  		if !(boundsABI(kind) == 1) {
  3188  			break
  3189  		}
  3190  		v.reset(OpRISCV64LoweredPanicBoundsB)
  3191  		v.AuxInt = int64ToAuxInt(kind)
  3192  		v.AddArg3(x, y, mem)
  3193  		return true
  3194  	}
  3195  	// match: (PanicBounds [kind] x y mem)
  3196  	// cond: boundsABI(kind) == 2
  3197  	// result: (LoweredPanicBoundsC [kind] x y mem)
  3198  	for {
  3199  		kind := auxIntToInt64(v.AuxInt)
  3200  		x := v_0
  3201  		y := v_1
  3202  		mem := v_2
  3203  		if !(boundsABI(kind) == 2) {
  3204  			break
  3205  		}
  3206  		v.reset(OpRISCV64LoweredPanicBoundsC)
  3207  		v.AuxInt = int64ToAuxInt(kind)
  3208  		v.AddArg3(x, y, mem)
  3209  		return true
  3210  	}
  3211  	return false
  3212  }
  3213  func rewriteValueRISCV64_OpRISCV64ADD(v *Value) bool {
  3214  	v_1 := v.Args[1]
  3215  	v_0 := v.Args[0]
  3216  	// match: (ADD (MOVDconst <t> [val]) x)
  3217  	// cond: is32Bit(val) && !t.IsPtr()
  3218  	// result: (ADDI [val] x)
  3219  	for {
  3220  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3221  			if v_0.Op != OpRISCV64MOVDconst {
  3222  				continue
  3223  			}
  3224  			t := v_0.Type
  3225  			val := auxIntToInt64(v_0.AuxInt)
  3226  			x := v_1
  3227  			if !(is32Bit(val) && !t.IsPtr()) {
  3228  				continue
  3229  			}
  3230  			v.reset(OpRISCV64ADDI)
  3231  			v.AuxInt = int64ToAuxInt(val)
  3232  			v.AddArg(x)
  3233  			return true
  3234  		}
  3235  		break
  3236  	}
  3237  	return false
  3238  }
  3239  func rewriteValueRISCV64_OpRISCV64ADDI(v *Value) bool {
  3240  	v_0 := v.Args[0]
  3241  	// match: (ADDI [c] (MOVaddr [d] {s} x))
  3242  	// cond: is32Bit(c+int64(d))
  3243  	// result: (MOVaddr [int32(c)+d] {s} x)
  3244  	for {
  3245  		c := auxIntToInt64(v.AuxInt)
  3246  		if v_0.Op != OpRISCV64MOVaddr {
  3247  			break
  3248  		}
  3249  		d := auxIntToInt32(v_0.AuxInt)
  3250  		s := auxToSym(v_0.Aux)
  3251  		x := v_0.Args[0]
  3252  		if !(is32Bit(c + int64(d))) {
  3253  			break
  3254  		}
  3255  		v.reset(OpRISCV64MOVaddr)
  3256  		v.AuxInt = int32ToAuxInt(int32(c) + d)
  3257  		v.Aux = symToAux(s)
  3258  		v.AddArg(x)
  3259  		return true
  3260  	}
  3261  	// match: (ADDI [0] x)
  3262  	// result: x
  3263  	for {
  3264  		if auxIntToInt64(v.AuxInt) != 0 {
  3265  			break
  3266  		}
  3267  		x := v_0
  3268  		v.copyOf(x)
  3269  		return true
  3270  	}
  3271  	// match: (ADDI [x] (MOVDconst [y]))
  3272  	// cond: is32Bit(x + y)
  3273  	// result: (MOVDconst [x + y])
  3274  	for {
  3275  		x := auxIntToInt64(v.AuxInt)
  3276  		if v_0.Op != OpRISCV64MOVDconst {
  3277  			break
  3278  		}
  3279  		y := auxIntToInt64(v_0.AuxInt)
  3280  		if !(is32Bit(x + y)) {
  3281  			break
  3282  		}
  3283  		v.reset(OpRISCV64MOVDconst)
  3284  		v.AuxInt = int64ToAuxInt(x + y)
  3285  		return true
  3286  	}
  3287  	// match: (ADDI [x] (ADDI [y] z))
  3288  	// cond: is32Bit(x + y)
  3289  	// result: (ADDI [x + y] z)
  3290  	for {
  3291  		x := auxIntToInt64(v.AuxInt)
  3292  		if v_0.Op != OpRISCV64ADDI {
  3293  			break
  3294  		}
  3295  		y := auxIntToInt64(v_0.AuxInt)
  3296  		z := v_0.Args[0]
  3297  		if !(is32Bit(x + y)) {
  3298  			break
  3299  		}
  3300  		v.reset(OpRISCV64ADDI)
  3301  		v.AuxInt = int64ToAuxInt(x + y)
  3302  		v.AddArg(z)
  3303  		return true
  3304  	}
  3305  	return false
  3306  }
  3307  func rewriteValueRISCV64_OpRISCV64AND(v *Value) bool {
  3308  	v_1 := v.Args[1]
  3309  	v_0 := v.Args[0]
  3310  	// match: (AND (MOVDconst [val]) x)
  3311  	// cond: is32Bit(val)
  3312  	// result: (ANDI [val] x)
  3313  	for {
  3314  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3315  			if v_0.Op != OpRISCV64MOVDconst {
  3316  				continue
  3317  			}
  3318  			val := auxIntToInt64(v_0.AuxInt)
  3319  			x := v_1
  3320  			if !(is32Bit(val)) {
  3321  				continue
  3322  			}
  3323  			v.reset(OpRISCV64ANDI)
  3324  			v.AuxInt = int64ToAuxInt(val)
  3325  			v.AddArg(x)
  3326  			return true
  3327  		}
  3328  		break
  3329  	}
  3330  	return false
  3331  }
  3332  func rewriteValueRISCV64_OpRISCV64ANDI(v *Value) bool {
  3333  	v_0 := v.Args[0]
  3334  	// match: (ANDI [0] x)
  3335  	// result: (MOVDconst [0])
  3336  	for {
  3337  		if auxIntToInt64(v.AuxInt) != 0 {
  3338  			break
  3339  		}
  3340  		v.reset(OpRISCV64MOVDconst)
  3341  		v.AuxInt = int64ToAuxInt(0)
  3342  		return true
  3343  	}
  3344  	// match: (ANDI [-1] x)
  3345  	// result: x
  3346  	for {
  3347  		if auxIntToInt64(v.AuxInt) != -1 {
  3348  			break
  3349  		}
  3350  		x := v_0
  3351  		v.copyOf(x)
  3352  		return true
  3353  	}
  3354  	// match: (ANDI [x] (MOVDconst [y]))
  3355  	// result: (MOVDconst [x & y])
  3356  	for {
  3357  		x := auxIntToInt64(v.AuxInt)
  3358  		if v_0.Op != OpRISCV64MOVDconst {
  3359  			break
  3360  		}
  3361  		y := auxIntToInt64(v_0.AuxInt)
  3362  		v.reset(OpRISCV64MOVDconst)
  3363  		v.AuxInt = int64ToAuxInt(x & y)
  3364  		return true
  3365  	}
  3366  	// match: (ANDI [x] (ANDI [y] z))
  3367  	// result: (ANDI [x & y] z)
  3368  	for {
  3369  		x := auxIntToInt64(v.AuxInt)
  3370  		if v_0.Op != OpRISCV64ANDI {
  3371  			break
  3372  		}
  3373  		y := auxIntToInt64(v_0.AuxInt)
  3374  		z := v_0.Args[0]
  3375  		v.reset(OpRISCV64ANDI)
  3376  		v.AuxInt = int64ToAuxInt(x & y)
  3377  		v.AddArg(z)
  3378  		return true
  3379  	}
  3380  	return false
  3381  }
  3382  func rewriteValueRISCV64_OpRISCV64FADDD(v *Value) bool {
  3383  	v_1 := v.Args[1]
  3384  	v_0 := v.Args[0]
  3385  	// match: (FADDD a (FMULD x y))
  3386  	// cond: a.Block.Func.useFMA(v)
  3387  	// result: (FMADDD x y a)
  3388  	for {
  3389  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3390  			a := v_0
  3391  			if v_1.Op != OpRISCV64FMULD {
  3392  				continue
  3393  			}
  3394  			y := v_1.Args[1]
  3395  			x := v_1.Args[0]
  3396  			if !(a.Block.Func.useFMA(v)) {
  3397  				continue
  3398  			}
  3399  			v.reset(OpRISCV64FMADDD)
  3400  			v.AddArg3(x, y, a)
  3401  			return true
  3402  		}
  3403  		break
  3404  	}
  3405  	return false
  3406  }
  3407  func rewriteValueRISCV64_OpRISCV64FADDS(v *Value) bool {
  3408  	v_1 := v.Args[1]
  3409  	v_0 := v.Args[0]
  3410  	// match: (FADDS a (FMULS x y))
  3411  	// cond: a.Block.Func.useFMA(v)
  3412  	// result: (FMADDS x y a)
  3413  	for {
  3414  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3415  			a := v_0
  3416  			if v_1.Op != OpRISCV64FMULS {
  3417  				continue
  3418  			}
  3419  			y := v_1.Args[1]
  3420  			x := v_1.Args[0]
  3421  			if !(a.Block.Func.useFMA(v)) {
  3422  				continue
  3423  			}
  3424  			v.reset(OpRISCV64FMADDS)
  3425  			v.AddArg3(x, y, a)
  3426  			return true
  3427  		}
  3428  		break
  3429  	}
  3430  	return false
  3431  }
  3432  func rewriteValueRISCV64_OpRISCV64FMADDD(v *Value) bool {
  3433  	v_2 := v.Args[2]
  3434  	v_1 := v.Args[1]
  3435  	v_0 := v.Args[0]
  3436  	// match: (FMADDD neg:(FNEGD x) y z)
  3437  	// cond: neg.Uses == 1
  3438  	// result: (FNMSUBD x y z)
  3439  	for {
  3440  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3441  			neg := v_0
  3442  			if neg.Op != OpRISCV64FNEGD {
  3443  				continue
  3444  			}
  3445  			x := neg.Args[0]
  3446  			y := v_1
  3447  			z := v_2
  3448  			if !(neg.Uses == 1) {
  3449  				continue
  3450  			}
  3451  			v.reset(OpRISCV64FNMSUBD)
  3452  			v.AddArg3(x, y, z)
  3453  			return true
  3454  		}
  3455  		break
  3456  	}
  3457  	// match: (FMADDD x y neg:(FNEGD z))
  3458  	// cond: neg.Uses == 1
  3459  	// result: (FMSUBD x y z)
  3460  	for {
  3461  		x := v_0
  3462  		y := v_1
  3463  		neg := v_2
  3464  		if neg.Op != OpRISCV64FNEGD {
  3465  			break
  3466  		}
  3467  		z := neg.Args[0]
  3468  		if !(neg.Uses == 1) {
  3469  			break
  3470  		}
  3471  		v.reset(OpRISCV64FMSUBD)
  3472  		v.AddArg3(x, y, z)
  3473  		return true
  3474  	}
  3475  	return false
  3476  }
  3477  func rewriteValueRISCV64_OpRISCV64FMADDS(v *Value) bool {
  3478  	v_2 := v.Args[2]
  3479  	v_1 := v.Args[1]
  3480  	v_0 := v.Args[0]
  3481  	// match: (FMADDS neg:(FNEGS x) y z)
  3482  	// cond: neg.Uses == 1
  3483  	// result: (FNMSUBS x y z)
  3484  	for {
  3485  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3486  			neg := v_0
  3487  			if neg.Op != OpRISCV64FNEGS {
  3488  				continue
  3489  			}
  3490  			x := neg.Args[0]
  3491  			y := v_1
  3492  			z := v_2
  3493  			if !(neg.Uses == 1) {
  3494  				continue
  3495  			}
  3496  			v.reset(OpRISCV64FNMSUBS)
  3497  			v.AddArg3(x, y, z)
  3498  			return true
  3499  		}
  3500  		break
  3501  	}
  3502  	// match: (FMADDS x y neg:(FNEGS z))
  3503  	// cond: neg.Uses == 1
  3504  	// result: (FMSUBS x y z)
  3505  	for {
  3506  		x := v_0
  3507  		y := v_1
  3508  		neg := v_2
  3509  		if neg.Op != OpRISCV64FNEGS {
  3510  			break
  3511  		}
  3512  		z := neg.Args[0]
  3513  		if !(neg.Uses == 1) {
  3514  			break
  3515  		}
  3516  		v.reset(OpRISCV64FMSUBS)
  3517  		v.AddArg3(x, y, z)
  3518  		return true
  3519  	}
  3520  	return false
  3521  }
  3522  func rewriteValueRISCV64_OpRISCV64FMSUBD(v *Value) bool {
  3523  	v_2 := v.Args[2]
  3524  	v_1 := v.Args[1]
  3525  	v_0 := v.Args[0]
  3526  	// match: (FMSUBD neg:(FNEGD x) y z)
  3527  	// cond: neg.Uses == 1
  3528  	// result: (FNMADDD x y z)
  3529  	for {
  3530  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3531  			neg := v_0
  3532  			if neg.Op != OpRISCV64FNEGD {
  3533  				continue
  3534  			}
  3535  			x := neg.Args[0]
  3536  			y := v_1
  3537  			z := v_2
  3538  			if !(neg.Uses == 1) {
  3539  				continue
  3540  			}
  3541  			v.reset(OpRISCV64FNMADDD)
  3542  			v.AddArg3(x, y, z)
  3543  			return true
  3544  		}
  3545  		break
  3546  	}
  3547  	// match: (FMSUBD x y neg:(FNEGD z))
  3548  	// cond: neg.Uses == 1
  3549  	// result: (FMADDD x y z)
  3550  	for {
  3551  		x := v_0
  3552  		y := v_1
  3553  		neg := v_2
  3554  		if neg.Op != OpRISCV64FNEGD {
  3555  			break
  3556  		}
  3557  		z := neg.Args[0]
  3558  		if !(neg.Uses == 1) {
  3559  			break
  3560  		}
  3561  		v.reset(OpRISCV64FMADDD)
  3562  		v.AddArg3(x, y, z)
  3563  		return true
  3564  	}
  3565  	return false
  3566  }
  3567  func rewriteValueRISCV64_OpRISCV64FMSUBS(v *Value) bool {
  3568  	v_2 := v.Args[2]
  3569  	v_1 := v.Args[1]
  3570  	v_0 := v.Args[0]
  3571  	// match: (FMSUBS neg:(FNEGS x) y z)
  3572  	// cond: neg.Uses == 1
  3573  	// result: (FNMADDS x y z)
  3574  	for {
  3575  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3576  			neg := v_0
  3577  			if neg.Op != OpRISCV64FNEGS {
  3578  				continue
  3579  			}
  3580  			x := neg.Args[0]
  3581  			y := v_1
  3582  			z := v_2
  3583  			if !(neg.Uses == 1) {
  3584  				continue
  3585  			}
  3586  			v.reset(OpRISCV64FNMADDS)
  3587  			v.AddArg3(x, y, z)
  3588  			return true
  3589  		}
  3590  		break
  3591  	}
  3592  	// match: (FMSUBS x y neg:(FNEGS z))
  3593  	// cond: neg.Uses == 1
  3594  	// result: (FMADDS x y z)
  3595  	for {
  3596  		x := v_0
  3597  		y := v_1
  3598  		neg := v_2
  3599  		if neg.Op != OpRISCV64FNEGS {
  3600  			break
  3601  		}
  3602  		z := neg.Args[0]
  3603  		if !(neg.Uses == 1) {
  3604  			break
  3605  		}
  3606  		v.reset(OpRISCV64FMADDS)
  3607  		v.AddArg3(x, y, z)
  3608  		return true
  3609  	}
  3610  	return false
  3611  }
  3612  func rewriteValueRISCV64_OpRISCV64FNMADDD(v *Value) bool {
  3613  	v_2 := v.Args[2]
  3614  	v_1 := v.Args[1]
  3615  	v_0 := v.Args[0]
  3616  	// match: (FNMADDD neg:(FNEGD x) y z)
  3617  	// cond: neg.Uses == 1
  3618  	// result: (FMSUBD x y z)
  3619  	for {
  3620  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3621  			neg := v_0
  3622  			if neg.Op != OpRISCV64FNEGD {
  3623  				continue
  3624  			}
  3625  			x := neg.Args[0]
  3626  			y := v_1
  3627  			z := v_2
  3628  			if !(neg.Uses == 1) {
  3629  				continue
  3630  			}
  3631  			v.reset(OpRISCV64FMSUBD)
  3632  			v.AddArg3(x, y, z)
  3633  			return true
  3634  		}
  3635  		break
  3636  	}
  3637  	// match: (FNMADDD x y neg:(FNEGD z))
  3638  	// cond: neg.Uses == 1
  3639  	// result: (FNMSUBD x y z)
  3640  	for {
  3641  		x := v_0
  3642  		y := v_1
  3643  		neg := v_2
  3644  		if neg.Op != OpRISCV64FNEGD {
  3645  			break
  3646  		}
  3647  		z := neg.Args[0]
  3648  		if !(neg.Uses == 1) {
  3649  			break
  3650  		}
  3651  		v.reset(OpRISCV64FNMSUBD)
  3652  		v.AddArg3(x, y, z)
  3653  		return true
  3654  	}
  3655  	return false
  3656  }
  3657  func rewriteValueRISCV64_OpRISCV64FNMADDS(v *Value) bool {
  3658  	v_2 := v.Args[2]
  3659  	v_1 := v.Args[1]
  3660  	v_0 := v.Args[0]
  3661  	// match: (FNMADDS neg:(FNEGS x) y z)
  3662  	// cond: neg.Uses == 1
  3663  	// result: (FMSUBS x y z)
  3664  	for {
  3665  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3666  			neg := v_0
  3667  			if neg.Op != OpRISCV64FNEGS {
  3668  				continue
  3669  			}
  3670  			x := neg.Args[0]
  3671  			y := v_1
  3672  			z := v_2
  3673  			if !(neg.Uses == 1) {
  3674  				continue
  3675  			}
  3676  			v.reset(OpRISCV64FMSUBS)
  3677  			v.AddArg3(x, y, z)
  3678  			return true
  3679  		}
  3680  		break
  3681  	}
  3682  	// match: (FNMADDS x y neg:(FNEGS z))
  3683  	// cond: neg.Uses == 1
  3684  	// result: (FNMSUBS x y z)
  3685  	for {
  3686  		x := v_0
  3687  		y := v_1
  3688  		neg := v_2
  3689  		if neg.Op != OpRISCV64FNEGS {
  3690  			break
  3691  		}
  3692  		z := neg.Args[0]
  3693  		if !(neg.Uses == 1) {
  3694  			break
  3695  		}
  3696  		v.reset(OpRISCV64FNMSUBS)
  3697  		v.AddArg3(x, y, z)
  3698  		return true
  3699  	}
  3700  	return false
  3701  }
  3702  func rewriteValueRISCV64_OpRISCV64FNMSUBD(v *Value) bool {
  3703  	v_2 := v.Args[2]
  3704  	v_1 := v.Args[1]
  3705  	v_0 := v.Args[0]
  3706  	// match: (FNMSUBD neg:(FNEGD x) y z)
  3707  	// cond: neg.Uses == 1
  3708  	// result: (FMADDD x y z)
  3709  	for {
  3710  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3711  			neg := v_0
  3712  			if neg.Op != OpRISCV64FNEGD {
  3713  				continue
  3714  			}
  3715  			x := neg.Args[0]
  3716  			y := v_1
  3717  			z := v_2
  3718  			if !(neg.Uses == 1) {
  3719  				continue
  3720  			}
  3721  			v.reset(OpRISCV64FMADDD)
  3722  			v.AddArg3(x, y, z)
  3723  			return true
  3724  		}
  3725  		break
  3726  	}
  3727  	// match: (FNMSUBD x y neg:(FNEGD z))
  3728  	// cond: neg.Uses == 1
  3729  	// result: (FNMADDD x y z)
  3730  	for {
  3731  		x := v_0
  3732  		y := v_1
  3733  		neg := v_2
  3734  		if neg.Op != OpRISCV64FNEGD {
  3735  			break
  3736  		}
  3737  		z := neg.Args[0]
  3738  		if !(neg.Uses == 1) {
  3739  			break
  3740  		}
  3741  		v.reset(OpRISCV64FNMADDD)
  3742  		v.AddArg3(x, y, z)
  3743  		return true
  3744  	}
  3745  	return false
  3746  }
  3747  func rewriteValueRISCV64_OpRISCV64FNMSUBS(v *Value) bool {
  3748  	v_2 := v.Args[2]
  3749  	v_1 := v.Args[1]
  3750  	v_0 := v.Args[0]
  3751  	// match: (FNMSUBS neg:(FNEGS x) y z)
  3752  	// cond: neg.Uses == 1
  3753  	// result: (FMADDS x y z)
  3754  	for {
  3755  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3756  			neg := v_0
  3757  			if neg.Op != OpRISCV64FNEGS {
  3758  				continue
  3759  			}
  3760  			x := neg.Args[0]
  3761  			y := v_1
  3762  			z := v_2
  3763  			if !(neg.Uses == 1) {
  3764  				continue
  3765  			}
  3766  			v.reset(OpRISCV64FMADDS)
  3767  			v.AddArg3(x, y, z)
  3768  			return true
  3769  		}
  3770  		break
  3771  	}
  3772  	// match: (FNMSUBS x y neg:(FNEGS z))
  3773  	// cond: neg.Uses == 1
  3774  	// result: (FNMADDS x y z)
  3775  	for {
  3776  		x := v_0
  3777  		y := v_1
  3778  		neg := v_2
  3779  		if neg.Op != OpRISCV64FNEGS {
  3780  			break
  3781  		}
  3782  		z := neg.Args[0]
  3783  		if !(neg.Uses == 1) {
  3784  			break
  3785  		}
  3786  		v.reset(OpRISCV64FNMADDS)
  3787  		v.AddArg3(x, y, z)
  3788  		return true
  3789  	}
  3790  	return false
  3791  }
  3792  func rewriteValueRISCV64_OpRISCV64FSUBD(v *Value) bool {
  3793  	v_1 := v.Args[1]
  3794  	v_0 := v.Args[0]
  3795  	// match: (FSUBD a (FMULD x y))
  3796  	// cond: a.Block.Func.useFMA(v)
  3797  	// result: (FNMSUBD x y a)
  3798  	for {
  3799  		a := v_0
  3800  		if v_1.Op != OpRISCV64FMULD {
  3801  			break
  3802  		}
  3803  		y := v_1.Args[1]
  3804  		x := v_1.Args[0]
  3805  		if !(a.Block.Func.useFMA(v)) {
  3806  			break
  3807  		}
  3808  		v.reset(OpRISCV64FNMSUBD)
  3809  		v.AddArg3(x, y, a)
  3810  		return true
  3811  	}
  3812  	// match: (FSUBD (FMULD x y) a)
  3813  	// cond: a.Block.Func.useFMA(v)
  3814  	// result: (FMSUBD x y a)
  3815  	for {
  3816  		if v_0.Op != OpRISCV64FMULD {
  3817  			break
  3818  		}
  3819  		y := v_0.Args[1]
  3820  		x := v_0.Args[0]
  3821  		a := v_1
  3822  		if !(a.Block.Func.useFMA(v)) {
  3823  			break
  3824  		}
  3825  		v.reset(OpRISCV64FMSUBD)
  3826  		v.AddArg3(x, y, a)
  3827  		return true
  3828  	}
  3829  	return false
  3830  }
  3831  func rewriteValueRISCV64_OpRISCV64FSUBS(v *Value) bool {
  3832  	v_1 := v.Args[1]
  3833  	v_0 := v.Args[0]
  3834  	// match: (FSUBS a (FMULS x y))
  3835  	// cond: a.Block.Func.useFMA(v)
  3836  	// result: (FNMSUBS x y a)
  3837  	for {
  3838  		a := v_0
  3839  		if v_1.Op != OpRISCV64FMULS {
  3840  			break
  3841  		}
  3842  		y := v_1.Args[1]
  3843  		x := v_1.Args[0]
  3844  		if !(a.Block.Func.useFMA(v)) {
  3845  			break
  3846  		}
  3847  		v.reset(OpRISCV64FNMSUBS)
  3848  		v.AddArg3(x, y, a)
  3849  		return true
  3850  	}
  3851  	// match: (FSUBS (FMULS x y) a)
  3852  	// cond: a.Block.Func.useFMA(v)
  3853  	// result: (FMSUBS x y a)
  3854  	for {
  3855  		if v_0.Op != OpRISCV64FMULS {
  3856  			break
  3857  		}
  3858  		y := v_0.Args[1]
  3859  		x := v_0.Args[0]
  3860  		a := v_1
  3861  		if !(a.Block.Func.useFMA(v)) {
  3862  			break
  3863  		}
  3864  		v.reset(OpRISCV64FMSUBS)
  3865  		v.AddArg3(x, y, a)
  3866  		return true
  3867  	}
  3868  	return false
  3869  }
  3870  func rewriteValueRISCV64_OpRISCV64MOVBUload(v *Value) bool {
  3871  	v_1 := v.Args[1]
  3872  	v_0 := v.Args[0]
  3873  	// match: (MOVBUload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  3874  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
  3875  	// result: (MOVBUload [off1+off2] {mergeSym(sym1,sym2)} base mem)
  3876  	for {
  3877  		off1 := auxIntToInt32(v.AuxInt)
  3878  		sym1 := auxToSym(v.Aux)
  3879  		if v_0.Op != OpRISCV64MOVaddr {
  3880  			break
  3881  		}
  3882  		off2 := auxIntToInt32(v_0.AuxInt)
  3883  		sym2 := auxToSym(v_0.Aux)
  3884  		base := v_0.Args[0]
  3885  		mem := v_1
  3886  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
  3887  			break
  3888  		}
  3889  		v.reset(OpRISCV64MOVBUload)
  3890  		v.AuxInt = int32ToAuxInt(off1 + off2)
  3891  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3892  		v.AddArg2(base, mem)
  3893  		return true
  3894  	}
  3895  	// match: (MOVBUload [off1] {sym} (ADDI [off2] base) mem)
  3896  	// cond: is32Bit(int64(off1)+off2)
  3897  	// result: (MOVBUload [off1+int32(off2)] {sym} base mem)
  3898  	for {
  3899  		off1 := auxIntToInt32(v.AuxInt)
  3900  		sym := auxToSym(v.Aux)
  3901  		if v_0.Op != OpRISCV64ADDI {
  3902  			break
  3903  		}
  3904  		off2 := auxIntToInt64(v_0.AuxInt)
  3905  		base := v_0.Args[0]
  3906  		mem := v_1
  3907  		if !(is32Bit(int64(off1) + off2)) {
  3908  			break
  3909  		}
  3910  		v.reset(OpRISCV64MOVBUload)
  3911  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3912  		v.Aux = symToAux(sym)
  3913  		v.AddArg2(base, mem)
  3914  		return true
  3915  	}
  3916  	return false
  3917  }
  3918  func rewriteValueRISCV64_OpRISCV64MOVBUreg(v *Value) bool {
  3919  	v_0 := v.Args[0]
  3920  	b := v.Block
  3921  	// match: (MOVBUreg x:(FLES _ _))
  3922  	// result: x
  3923  	for {
  3924  		x := v_0
  3925  		if x.Op != OpRISCV64FLES {
  3926  			break
  3927  		}
  3928  		v.copyOf(x)
  3929  		return true
  3930  	}
  3931  	// match: (MOVBUreg x:(FLTS _ _))
  3932  	// result: x
  3933  	for {
  3934  		x := v_0
  3935  		if x.Op != OpRISCV64FLTS {
  3936  			break
  3937  		}
  3938  		v.copyOf(x)
  3939  		return true
  3940  	}
  3941  	// match: (MOVBUreg x:(FEQS _ _))
  3942  	// result: x
  3943  	for {
  3944  		x := v_0
  3945  		if x.Op != OpRISCV64FEQS {
  3946  			break
  3947  		}
  3948  		v.copyOf(x)
  3949  		return true
  3950  	}
  3951  	// match: (MOVBUreg x:(FNES _ _))
  3952  	// result: x
  3953  	for {
  3954  		x := v_0
  3955  		if x.Op != OpRISCV64FNES {
  3956  			break
  3957  		}
  3958  		v.copyOf(x)
  3959  		return true
  3960  	}
  3961  	// match: (MOVBUreg x:(FLED _ _))
  3962  	// result: x
  3963  	for {
  3964  		x := v_0
  3965  		if x.Op != OpRISCV64FLED {
  3966  			break
  3967  		}
  3968  		v.copyOf(x)
  3969  		return true
  3970  	}
  3971  	// match: (MOVBUreg x:(FLTD _ _))
  3972  	// result: x
  3973  	for {
  3974  		x := v_0
  3975  		if x.Op != OpRISCV64FLTD {
  3976  			break
  3977  		}
  3978  		v.copyOf(x)
  3979  		return true
  3980  	}
  3981  	// match: (MOVBUreg x:(FEQD _ _))
  3982  	// result: x
  3983  	for {
  3984  		x := v_0
  3985  		if x.Op != OpRISCV64FEQD {
  3986  			break
  3987  		}
  3988  		v.copyOf(x)
  3989  		return true
  3990  	}
  3991  	// match: (MOVBUreg x:(FNED _ _))
  3992  	// result: x
  3993  	for {
  3994  		x := v_0
  3995  		if x.Op != OpRISCV64FNED {
  3996  			break
  3997  		}
  3998  		v.copyOf(x)
  3999  		return true
  4000  	}
  4001  	// match: (MOVBUreg x:(SEQZ _))
  4002  	// result: x
  4003  	for {
  4004  		x := v_0
  4005  		if x.Op != OpRISCV64SEQZ {
  4006  			break
  4007  		}
  4008  		v.copyOf(x)
  4009  		return true
  4010  	}
  4011  	// match: (MOVBUreg x:(SNEZ _))
  4012  	// result: x
  4013  	for {
  4014  		x := v_0
  4015  		if x.Op != OpRISCV64SNEZ {
  4016  			break
  4017  		}
  4018  		v.copyOf(x)
  4019  		return true
  4020  	}
  4021  	// match: (MOVBUreg x:(SLT _ _))
  4022  	// result: x
  4023  	for {
  4024  		x := v_0
  4025  		if x.Op != OpRISCV64SLT {
  4026  			break
  4027  		}
  4028  		v.copyOf(x)
  4029  		return true
  4030  	}
  4031  	// match: (MOVBUreg x:(SLTU _ _))
  4032  	// result: x
  4033  	for {
  4034  		x := v_0
  4035  		if x.Op != OpRISCV64SLTU {
  4036  			break
  4037  		}
  4038  		v.copyOf(x)
  4039  		return true
  4040  	}
  4041  	// match: (MOVBUreg x:(ANDI [c] y))
  4042  	// cond: c >= 0 && int64(uint8(c)) == c
  4043  	// result: x
  4044  	for {
  4045  		x := v_0
  4046  		if x.Op != OpRISCV64ANDI {
  4047  			break
  4048  		}
  4049  		c := auxIntToInt64(x.AuxInt)
  4050  		if !(c >= 0 && int64(uint8(c)) == c) {
  4051  			break
  4052  		}
  4053  		v.copyOf(x)
  4054  		return true
  4055  	}
  4056  	// match: (MOVBUreg (ANDI [c] x))
  4057  	// cond: c < 0
  4058  	// result: (ANDI [int64(uint8(c))] x)
  4059  	for {
  4060  		if v_0.Op != OpRISCV64ANDI {
  4061  			break
  4062  		}
  4063  		c := auxIntToInt64(v_0.AuxInt)
  4064  		x := v_0.Args[0]
  4065  		if !(c < 0) {
  4066  			break
  4067  		}
  4068  		v.reset(OpRISCV64ANDI)
  4069  		v.AuxInt = int64ToAuxInt(int64(uint8(c)))
  4070  		v.AddArg(x)
  4071  		return true
  4072  	}
  4073  	// match: (MOVBUreg (MOVDconst [c]))
  4074  	// result: (MOVDconst [int64(uint8(c))])
  4075  	for {
  4076  		if v_0.Op != OpRISCV64MOVDconst {
  4077  			break
  4078  		}
  4079  		c := auxIntToInt64(v_0.AuxInt)
  4080  		v.reset(OpRISCV64MOVDconst)
  4081  		v.AuxInt = int64ToAuxInt(int64(uint8(c)))
  4082  		return true
  4083  	}
  4084  	// match: (MOVBUreg x:(MOVBUload _ _))
  4085  	// result: (MOVDreg x)
  4086  	for {
  4087  		x := v_0
  4088  		if x.Op != OpRISCV64MOVBUload {
  4089  			break
  4090  		}
  4091  		v.reset(OpRISCV64MOVDreg)
  4092  		v.AddArg(x)
  4093  		return true
  4094  	}
  4095  	// match: (MOVBUreg x:(Select0 (LoweredAtomicLoad8 _ _)))
  4096  	// result: (MOVDreg x)
  4097  	for {
  4098  		x := v_0
  4099  		if x.Op != OpSelect0 {
  4100  			break
  4101  		}
  4102  		x_0 := x.Args[0]
  4103  		if x_0.Op != OpRISCV64LoweredAtomicLoad8 {
  4104  			break
  4105  		}
  4106  		v.reset(OpRISCV64MOVDreg)
  4107  		v.AddArg(x)
  4108  		return true
  4109  	}
  4110  	// match: (MOVBUreg x:(Select0 (LoweredAtomicCas32 _ _ _ _)))
  4111  	// result: (MOVDreg x)
  4112  	for {
  4113  		x := v_0
  4114  		if x.Op != OpSelect0 {
  4115  			break
  4116  		}
  4117  		x_0 := x.Args[0]
  4118  		if x_0.Op != OpRISCV64LoweredAtomicCas32 {
  4119  			break
  4120  		}
  4121  		v.reset(OpRISCV64MOVDreg)
  4122  		v.AddArg(x)
  4123  		return true
  4124  	}
  4125  	// match: (MOVBUreg x:(Select0 (LoweredAtomicCas64 _ _ _ _)))
  4126  	// result: (MOVDreg x)
  4127  	for {
  4128  		x := v_0
  4129  		if x.Op != OpSelect0 {
  4130  			break
  4131  		}
  4132  		x_0 := x.Args[0]
  4133  		if x_0.Op != OpRISCV64LoweredAtomicCas64 {
  4134  			break
  4135  		}
  4136  		v.reset(OpRISCV64MOVDreg)
  4137  		v.AddArg(x)
  4138  		return true
  4139  	}
  4140  	// match: (MOVBUreg x:(MOVBUreg _))
  4141  	// result: (MOVDreg x)
  4142  	for {
  4143  		x := v_0
  4144  		if x.Op != OpRISCV64MOVBUreg {
  4145  			break
  4146  		}
  4147  		v.reset(OpRISCV64MOVDreg)
  4148  		v.AddArg(x)
  4149  		return true
  4150  	}
  4151  	// match: (MOVBUreg <t> x:(MOVBload [off] {sym} ptr mem))
  4152  	// cond: x.Uses == 1 && clobber(x)
  4153  	// result: @x.Block (MOVBUload <t> [off] {sym} ptr mem)
  4154  	for {
  4155  		t := v.Type
  4156  		x := v_0
  4157  		if x.Op != OpRISCV64MOVBload {
  4158  			break
  4159  		}
  4160  		off := auxIntToInt32(x.AuxInt)
  4161  		sym := auxToSym(x.Aux)
  4162  		mem := x.Args[1]
  4163  		ptr := x.Args[0]
  4164  		if !(x.Uses == 1 && clobber(x)) {
  4165  			break
  4166  		}
  4167  		b = x.Block
  4168  		v0 := b.NewValue0(x.Pos, OpRISCV64MOVBUload, t)
  4169  		v.copyOf(v0)
  4170  		v0.AuxInt = int32ToAuxInt(off)
  4171  		v0.Aux = symToAux(sym)
  4172  		v0.AddArg2(ptr, mem)
  4173  		return true
  4174  	}
  4175  	return false
  4176  }
  4177  func rewriteValueRISCV64_OpRISCV64MOVBload(v *Value) bool {
  4178  	v_1 := v.Args[1]
  4179  	v_0 := v.Args[0]
  4180  	// match: (MOVBload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  4181  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
  4182  	// result: (MOVBload [off1+off2] {mergeSym(sym1,sym2)} base mem)
  4183  	for {
  4184  		off1 := auxIntToInt32(v.AuxInt)
  4185  		sym1 := auxToSym(v.Aux)
  4186  		if v_0.Op != OpRISCV64MOVaddr {
  4187  			break
  4188  		}
  4189  		off2 := auxIntToInt32(v_0.AuxInt)
  4190  		sym2 := auxToSym(v_0.Aux)
  4191  		base := v_0.Args[0]
  4192  		mem := v_1
  4193  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
  4194  			break
  4195  		}
  4196  		v.reset(OpRISCV64MOVBload)
  4197  		v.AuxInt = int32ToAuxInt(off1 + off2)
  4198  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4199  		v.AddArg2(base, mem)
  4200  		return true
  4201  	}
  4202  	// match: (MOVBload [off1] {sym} (ADDI [off2] base) mem)
  4203  	// cond: is32Bit(int64(off1)+off2)
  4204  	// result: (MOVBload [off1+int32(off2)] {sym} base mem)
  4205  	for {
  4206  		off1 := auxIntToInt32(v.AuxInt)
  4207  		sym := auxToSym(v.Aux)
  4208  		if v_0.Op != OpRISCV64ADDI {
  4209  			break
  4210  		}
  4211  		off2 := auxIntToInt64(v_0.AuxInt)
  4212  		base := v_0.Args[0]
  4213  		mem := v_1
  4214  		if !(is32Bit(int64(off1) + off2)) {
  4215  			break
  4216  		}
  4217  		v.reset(OpRISCV64MOVBload)
  4218  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4219  		v.Aux = symToAux(sym)
  4220  		v.AddArg2(base, mem)
  4221  		return true
  4222  	}
  4223  	return false
  4224  }
  4225  func rewriteValueRISCV64_OpRISCV64MOVBreg(v *Value) bool {
  4226  	v_0 := v.Args[0]
  4227  	b := v.Block
  4228  	// match: (MOVBreg x:(ANDI [c] y))
  4229  	// cond: c >= 0 && int64(int8(c)) == c
  4230  	// result: x
  4231  	for {
  4232  		x := v_0
  4233  		if x.Op != OpRISCV64ANDI {
  4234  			break
  4235  		}
  4236  		c := auxIntToInt64(x.AuxInt)
  4237  		if !(c >= 0 && int64(int8(c)) == c) {
  4238  			break
  4239  		}
  4240  		v.copyOf(x)
  4241  		return true
  4242  	}
  4243  	// match: (MOVBreg (MOVDconst [c]))
  4244  	// result: (MOVDconst [int64(int8(c))])
  4245  	for {
  4246  		if v_0.Op != OpRISCV64MOVDconst {
  4247  			break
  4248  		}
  4249  		c := auxIntToInt64(v_0.AuxInt)
  4250  		v.reset(OpRISCV64MOVDconst)
  4251  		v.AuxInt = int64ToAuxInt(int64(int8(c)))
  4252  		return true
  4253  	}
  4254  	// match: (MOVBreg x:(MOVBload _ _))
  4255  	// result: (MOVDreg x)
  4256  	for {
  4257  		x := v_0
  4258  		if x.Op != OpRISCV64MOVBload {
  4259  			break
  4260  		}
  4261  		v.reset(OpRISCV64MOVDreg)
  4262  		v.AddArg(x)
  4263  		return true
  4264  	}
  4265  	// match: (MOVBreg x:(MOVBreg _))
  4266  	// result: (MOVDreg x)
  4267  	for {
  4268  		x := v_0
  4269  		if x.Op != OpRISCV64MOVBreg {
  4270  			break
  4271  		}
  4272  		v.reset(OpRISCV64MOVDreg)
  4273  		v.AddArg(x)
  4274  		return true
  4275  	}
  4276  	// match: (MOVBreg <t> x:(MOVBUload [off] {sym} ptr mem))
  4277  	// cond: x.Uses == 1 && clobber(x)
  4278  	// result: @x.Block (MOVBload <t> [off] {sym} ptr mem)
  4279  	for {
  4280  		t := v.Type
  4281  		x := v_0
  4282  		if x.Op != OpRISCV64MOVBUload {
  4283  			break
  4284  		}
  4285  		off := auxIntToInt32(x.AuxInt)
  4286  		sym := auxToSym(x.Aux)
  4287  		mem := x.Args[1]
  4288  		ptr := x.Args[0]
  4289  		if !(x.Uses == 1 && clobber(x)) {
  4290  			break
  4291  		}
  4292  		b = x.Block
  4293  		v0 := b.NewValue0(x.Pos, OpRISCV64MOVBload, t)
  4294  		v.copyOf(v0)
  4295  		v0.AuxInt = int32ToAuxInt(off)
  4296  		v0.Aux = symToAux(sym)
  4297  		v0.AddArg2(ptr, mem)
  4298  		return true
  4299  	}
  4300  	return false
  4301  }
  4302  func rewriteValueRISCV64_OpRISCV64MOVBstore(v *Value) bool {
  4303  	v_2 := v.Args[2]
  4304  	v_1 := v.Args[1]
  4305  	v_0 := v.Args[0]
  4306  	// match: (MOVBstore [off1] {sym1} (MOVaddr [off2] {sym2} base) val mem)
  4307  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
  4308  	// result: (MOVBstore [off1+off2] {mergeSym(sym1,sym2)} base val mem)
  4309  	for {
  4310  		off1 := auxIntToInt32(v.AuxInt)
  4311  		sym1 := auxToSym(v.Aux)
  4312  		if v_0.Op != OpRISCV64MOVaddr {
  4313  			break
  4314  		}
  4315  		off2 := auxIntToInt32(v_0.AuxInt)
  4316  		sym2 := auxToSym(v_0.Aux)
  4317  		base := v_0.Args[0]
  4318  		val := v_1
  4319  		mem := v_2
  4320  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
  4321  			break
  4322  		}
  4323  		v.reset(OpRISCV64MOVBstore)
  4324  		v.AuxInt = int32ToAuxInt(off1 + off2)
  4325  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4326  		v.AddArg3(base, val, mem)
  4327  		return true
  4328  	}
  4329  	// match: (MOVBstore [off1] {sym} (ADDI [off2] base) val mem)
  4330  	// cond: is32Bit(int64(off1)+off2)
  4331  	// result: (MOVBstore [off1+int32(off2)] {sym} base val mem)
  4332  	for {
  4333  		off1 := auxIntToInt32(v.AuxInt)
  4334  		sym := auxToSym(v.Aux)
  4335  		if v_0.Op != OpRISCV64ADDI {
  4336  			break
  4337  		}
  4338  		off2 := auxIntToInt64(v_0.AuxInt)
  4339  		base := v_0.Args[0]
  4340  		val := v_1
  4341  		mem := v_2
  4342  		if !(is32Bit(int64(off1) + off2)) {
  4343  			break
  4344  		}
  4345  		v.reset(OpRISCV64MOVBstore)
  4346  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4347  		v.Aux = symToAux(sym)
  4348  		v.AddArg3(base, val, mem)
  4349  		return true
  4350  	}
  4351  	// match: (MOVBstore [off] {sym} ptr (MOVDconst [0]) mem)
  4352  	// result: (MOVBstorezero [off] {sym} ptr mem)
  4353  	for {
  4354  		off := auxIntToInt32(v.AuxInt)
  4355  		sym := auxToSym(v.Aux)
  4356  		ptr := v_0
  4357  		if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
  4358  			break
  4359  		}
  4360  		mem := v_2
  4361  		v.reset(OpRISCV64MOVBstorezero)
  4362  		v.AuxInt = int32ToAuxInt(off)
  4363  		v.Aux = symToAux(sym)
  4364  		v.AddArg2(ptr, mem)
  4365  		return true
  4366  	}
  4367  	// match: (MOVBstore [off] {sym} ptr (MOVBreg x) mem)
  4368  	// result: (MOVBstore [off] {sym} ptr x mem)
  4369  	for {
  4370  		off := auxIntToInt32(v.AuxInt)
  4371  		sym := auxToSym(v.Aux)
  4372  		ptr := v_0
  4373  		if v_1.Op != OpRISCV64MOVBreg {
  4374  			break
  4375  		}
  4376  		x := v_1.Args[0]
  4377  		mem := v_2
  4378  		v.reset(OpRISCV64MOVBstore)
  4379  		v.AuxInt = int32ToAuxInt(off)
  4380  		v.Aux = symToAux(sym)
  4381  		v.AddArg3(ptr, x, mem)
  4382  		return true
  4383  	}
  4384  	// match: (MOVBstore [off] {sym} ptr (MOVHreg x) mem)
  4385  	// result: (MOVBstore [off] {sym} ptr x mem)
  4386  	for {
  4387  		off := auxIntToInt32(v.AuxInt)
  4388  		sym := auxToSym(v.Aux)
  4389  		ptr := v_0
  4390  		if v_1.Op != OpRISCV64MOVHreg {
  4391  			break
  4392  		}
  4393  		x := v_1.Args[0]
  4394  		mem := v_2
  4395  		v.reset(OpRISCV64MOVBstore)
  4396  		v.AuxInt = int32ToAuxInt(off)
  4397  		v.Aux = symToAux(sym)
  4398  		v.AddArg3(ptr, x, mem)
  4399  		return true
  4400  	}
  4401  	// match: (MOVBstore [off] {sym} ptr (MOVWreg x) mem)
  4402  	// result: (MOVBstore [off] {sym} ptr x mem)
  4403  	for {
  4404  		off := auxIntToInt32(v.AuxInt)
  4405  		sym := auxToSym(v.Aux)
  4406  		ptr := v_0
  4407  		if v_1.Op != OpRISCV64MOVWreg {
  4408  			break
  4409  		}
  4410  		x := v_1.Args[0]
  4411  		mem := v_2
  4412  		v.reset(OpRISCV64MOVBstore)
  4413  		v.AuxInt = int32ToAuxInt(off)
  4414  		v.Aux = symToAux(sym)
  4415  		v.AddArg3(ptr, x, mem)
  4416  		return true
  4417  	}
  4418  	// match: (MOVBstore [off] {sym} ptr (MOVBUreg x) mem)
  4419  	// result: (MOVBstore [off] {sym} ptr x mem)
  4420  	for {
  4421  		off := auxIntToInt32(v.AuxInt)
  4422  		sym := auxToSym(v.Aux)
  4423  		ptr := v_0
  4424  		if v_1.Op != OpRISCV64MOVBUreg {
  4425  			break
  4426  		}
  4427  		x := v_1.Args[0]
  4428  		mem := v_2
  4429  		v.reset(OpRISCV64MOVBstore)
  4430  		v.AuxInt = int32ToAuxInt(off)
  4431  		v.Aux = symToAux(sym)
  4432  		v.AddArg3(ptr, x, mem)
  4433  		return true
  4434  	}
  4435  	// match: (MOVBstore [off] {sym} ptr (MOVHUreg x) mem)
  4436  	// result: (MOVBstore [off] {sym} ptr x mem)
  4437  	for {
  4438  		off := auxIntToInt32(v.AuxInt)
  4439  		sym := auxToSym(v.Aux)
  4440  		ptr := v_0
  4441  		if v_1.Op != OpRISCV64MOVHUreg {
  4442  			break
  4443  		}
  4444  		x := v_1.Args[0]
  4445  		mem := v_2
  4446  		v.reset(OpRISCV64MOVBstore)
  4447  		v.AuxInt = int32ToAuxInt(off)
  4448  		v.Aux = symToAux(sym)
  4449  		v.AddArg3(ptr, x, mem)
  4450  		return true
  4451  	}
  4452  	// match: (MOVBstore [off] {sym} ptr (MOVWUreg x) mem)
  4453  	// result: (MOVBstore [off] {sym} ptr x mem)
  4454  	for {
  4455  		off := auxIntToInt32(v.AuxInt)
  4456  		sym := auxToSym(v.Aux)
  4457  		ptr := v_0
  4458  		if v_1.Op != OpRISCV64MOVWUreg {
  4459  			break
  4460  		}
  4461  		x := v_1.Args[0]
  4462  		mem := v_2
  4463  		v.reset(OpRISCV64MOVBstore)
  4464  		v.AuxInt = int32ToAuxInt(off)
  4465  		v.Aux = symToAux(sym)
  4466  		v.AddArg3(ptr, x, mem)
  4467  		return true
  4468  	}
  4469  	return false
  4470  }
  4471  func rewriteValueRISCV64_OpRISCV64MOVBstorezero(v *Value) bool {
  4472  	v_1 := v.Args[1]
  4473  	v_0 := v.Args[0]
  4474  	// match: (MOVBstorezero [off1] {sym1} (MOVaddr [off2] {sym2} ptr) mem)
  4475  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2))
  4476  	// result: (MOVBstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4477  	for {
  4478  		off1 := auxIntToInt32(v.AuxInt)
  4479  		sym1 := auxToSym(v.Aux)
  4480  		if v_0.Op != OpRISCV64MOVaddr {
  4481  			break
  4482  		}
  4483  		off2 := auxIntToInt32(v_0.AuxInt)
  4484  		sym2 := auxToSym(v_0.Aux)
  4485  		ptr := v_0.Args[0]
  4486  		mem := v_1
  4487  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
  4488  			break
  4489  		}
  4490  		v.reset(OpRISCV64MOVBstorezero)
  4491  		v.AuxInt = int32ToAuxInt(off1 + off2)
  4492  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4493  		v.AddArg2(ptr, mem)
  4494  		return true
  4495  	}
  4496  	// match: (MOVBstorezero [off1] {sym} (ADDI [off2] ptr) mem)
  4497  	// cond: is32Bit(int64(off1)+off2)
  4498  	// result: (MOVBstorezero [off1+int32(off2)] {sym} ptr mem)
  4499  	for {
  4500  		off1 := auxIntToInt32(v.AuxInt)
  4501  		sym := auxToSym(v.Aux)
  4502  		if v_0.Op != OpRISCV64ADDI {
  4503  			break
  4504  		}
  4505  		off2 := auxIntToInt64(v_0.AuxInt)
  4506  		ptr := v_0.Args[0]
  4507  		mem := v_1
  4508  		if !(is32Bit(int64(off1) + off2)) {
  4509  			break
  4510  		}
  4511  		v.reset(OpRISCV64MOVBstorezero)
  4512  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4513  		v.Aux = symToAux(sym)
  4514  		v.AddArg2(ptr, mem)
  4515  		return true
  4516  	}
  4517  	return false
  4518  }
  4519  func rewriteValueRISCV64_OpRISCV64MOVDload(v *Value) bool {
  4520  	v_1 := v.Args[1]
  4521  	v_0 := v.Args[0]
  4522  	// match: (MOVDload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  4523  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
  4524  	// result: (MOVDload [off1+off2] {mergeSym(sym1,sym2)} base mem)
  4525  	for {
  4526  		off1 := auxIntToInt32(v.AuxInt)
  4527  		sym1 := auxToSym(v.Aux)
  4528  		if v_0.Op != OpRISCV64MOVaddr {
  4529  			break
  4530  		}
  4531  		off2 := auxIntToInt32(v_0.AuxInt)
  4532  		sym2 := auxToSym(v_0.Aux)
  4533  		base := v_0.Args[0]
  4534  		mem := v_1
  4535  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
  4536  			break
  4537  		}
  4538  		v.reset(OpRISCV64MOVDload)
  4539  		v.AuxInt = int32ToAuxInt(off1 + off2)
  4540  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4541  		v.AddArg2(base, mem)
  4542  		return true
  4543  	}
  4544  	// match: (MOVDload [off1] {sym} (ADDI [off2] base) mem)
  4545  	// cond: is32Bit(int64(off1)+off2)
  4546  	// result: (MOVDload [off1+int32(off2)] {sym} base mem)
  4547  	for {
  4548  		off1 := auxIntToInt32(v.AuxInt)
  4549  		sym := auxToSym(v.Aux)
  4550  		if v_0.Op != OpRISCV64ADDI {
  4551  			break
  4552  		}
  4553  		off2 := auxIntToInt64(v_0.AuxInt)
  4554  		base := v_0.Args[0]
  4555  		mem := v_1
  4556  		if !(is32Bit(int64(off1) + off2)) {
  4557  			break
  4558  		}
  4559  		v.reset(OpRISCV64MOVDload)
  4560  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4561  		v.Aux = symToAux(sym)
  4562  		v.AddArg2(base, mem)
  4563  		return true
  4564  	}
  4565  	return false
  4566  }
  4567  func rewriteValueRISCV64_OpRISCV64MOVDnop(v *Value) bool {
  4568  	v_0 := v.Args[0]
  4569  	// match: (MOVDnop (MOVDconst [c]))
  4570  	// result: (MOVDconst [c])
  4571  	for {
  4572  		if v_0.Op != OpRISCV64MOVDconst {
  4573  			break
  4574  		}
  4575  		c := auxIntToInt64(v_0.AuxInt)
  4576  		v.reset(OpRISCV64MOVDconst)
  4577  		v.AuxInt = int64ToAuxInt(c)
  4578  		return true
  4579  	}
  4580  	return false
  4581  }
  4582  func rewriteValueRISCV64_OpRISCV64MOVDreg(v *Value) bool {
  4583  	v_0 := v.Args[0]
  4584  	// match: (MOVDreg x)
  4585  	// cond: x.Uses == 1
  4586  	// result: (MOVDnop x)
  4587  	for {
  4588  		x := v_0
  4589  		if !(x.Uses == 1) {
  4590  			break
  4591  		}
  4592  		v.reset(OpRISCV64MOVDnop)
  4593  		v.AddArg(x)
  4594  		return true
  4595  	}
  4596  	return false
  4597  }
  4598  func rewriteValueRISCV64_OpRISCV64MOVDstore(v *Value) bool {
  4599  	v_2 := v.Args[2]
  4600  	v_1 := v.Args[1]
  4601  	v_0 := v.Args[0]
  4602  	// match: (MOVDstore [off1] {sym1} (MOVaddr [off2] {sym2} base) val mem)
  4603  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
  4604  	// result: (MOVDstore [off1+off2] {mergeSym(sym1,sym2)} base val mem)
  4605  	for {
  4606  		off1 := auxIntToInt32(v.AuxInt)
  4607  		sym1 := auxToSym(v.Aux)
  4608  		if v_0.Op != OpRISCV64MOVaddr {
  4609  			break
  4610  		}
  4611  		off2 := auxIntToInt32(v_0.AuxInt)
  4612  		sym2 := auxToSym(v_0.Aux)
  4613  		base := v_0.Args[0]
  4614  		val := v_1
  4615  		mem := v_2
  4616  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
  4617  			break
  4618  		}
  4619  		v.reset(OpRISCV64MOVDstore)
  4620  		v.AuxInt = int32ToAuxInt(off1 + off2)
  4621  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4622  		v.AddArg3(base, val, mem)
  4623  		return true
  4624  	}
  4625  	// match: (MOVDstore [off1] {sym} (ADDI [off2] base) val mem)
  4626  	// cond: is32Bit(int64(off1)+off2)
  4627  	// result: (MOVDstore [off1+int32(off2)] {sym} base val mem)
  4628  	for {
  4629  		off1 := auxIntToInt32(v.AuxInt)
  4630  		sym := auxToSym(v.Aux)
  4631  		if v_0.Op != OpRISCV64ADDI {
  4632  			break
  4633  		}
  4634  		off2 := auxIntToInt64(v_0.AuxInt)
  4635  		base := v_0.Args[0]
  4636  		val := v_1
  4637  		mem := v_2
  4638  		if !(is32Bit(int64(off1) + off2)) {
  4639  			break
  4640  		}
  4641  		v.reset(OpRISCV64MOVDstore)
  4642  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4643  		v.Aux = symToAux(sym)
  4644  		v.AddArg3(base, val, mem)
  4645  		return true
  4646  	}
  4647  	// match: (MOVDstore [off] {sym} ptr (MOVDconst [0]) mem)
  4648  	// result: (MOVDstorezero [off] {sym} ptr mem)
  4649  	for {
  4650  		off := auxIntToInt32(v.AuxInt)
  4651  		sym := auxToSym(v.Aux)
  4652  		ptr := v_0
  4653  		if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
  4654  			break
  4655  		}
  4656  		mem := v_2
  4657  		v.reset(OpRISCV64MOVDstorezero)
  4658  		v.AuxInt = int32ToAuxInt(off)
  4659  		v.Aux = symToAux(sym)
  4660  		v.AddArg2(ptr, mem)
  4661  		return true
  4662  	}
  4663  	return false
  4664  }
  4665  func rewriteValueRISCV64_OpRISCV64MOVDstorezero(v *Value) bool {
  4666  	v_1 := v.Args[1]
  4667  	v_0 := v.Args[0]
  4668  	// match: (MOVDstorezero [off1] {sym1} (MOVaddr [off2] {sym2} ptr) mem)
  4669  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2))
  4670  	// result: (MOVDstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4671  	for {
  4672  		off1 := auxIntToInt32(v.AuxInt)
  4673  		sym1 := auxToSym(v.Aux)
  4674  		if v_0.Op != OpRISCV64MOVaddr {
  4675  			break
  4676  		}
  4677  		off2 := auxIntToInt32(v_0.AuxInt)
  4678  		sym2 := auxToSym(v_0.Aux)
  4679  		ptr := v_0.Args[0]
  4680  		mem := v_1
  4681  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
  4682  			break
  4683  		}
  4684  		v.reset(OpRISCV64MOVDstorezero)
  4685  		v.AuxInt = int32ToAuxInt(off1 + off2)
  4686  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4687  		v.AddArg2(ptr, mem)
  4688  		return true
  4689  	}
  4690  	// match: (MOVDstorezero [off1] {sym} (ADDI [off2] ptr) mem)
  4691  	// cond: is32Bit(int64(off1)+off2)
  4692  	// result: (MOVDstorezero [off1+int32(off2)] {sym} ptr mem)
  4693  	for {
  4694  		off1 := auxIntToInt32(v.AuxInt)
  4695  		sym := auxToSym(v.Aux)
  4696  		if v_0.Op != OpRISCV64ADDI {
  4697  			break
  4698  		}
  4699  		off2 := auxIntToInt64(v_0.AuxInt)
  4700  		ptr := v_0.Args[0]
  4701  		mem := v_1
  4702  		if !(is32Bit(int64(off1) + off2)) {
  4703  			break
  4704  		}
  4705  		v.reset(OpRISCV64MOVDstorezero)
  4706  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4707  		v.Aux = symToAux(sym)
  4708  		v.AddArg2(ptr, mem)
  4709  		return true
  4710  	}
  4711  	return false
  4712  }
  4713  func rewriteValueRISCV64_OpRISCV64MOVHUload(v *Value) bool {
  4714  	v_1 := v.Args[1]
  4715  	v_0 := v.Args[0]
  4716  	// match: (MOVHUload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  4717  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
  4718  	// result: (MOVHUload [off1+off2] {mergeSym(sym1,sym2)} base mem)
  4719  	for {
  4720  		off1 := auxIntToInt32(v.AuxInt)
  4721  		sym1 := auxToSym(v.Aux)
  4722  		if v_0.Op != OpRISCV64MOVaddr {
  4723  			break
  4724  		}
  4725  		off2 := auxIntToInt32(v_0.AuxInt)
  4726  		sym2 := auxToSym(v_0.Aux)
  4727  		base := v_0.Args[0]
  4728  		mem := v_1
  4729  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
  4730  			break
  4731  		}
  4732  		v.reset(OpRISCV64MOVHUload)
  4733  		v.AuxInt = int32ToAuxInt(off1 + off2)
  4734  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4735  		v.AddArg2(base, mem)
  4736  		return true
  4737  	}
  4738  	// match: (MOVHUload [off1] {sym} (ADDI [off2] base) mem)
  4739  	// cond: is32Bit(int64(off1)+off2)
  4740  	// result: (MOVHUload [off1+int32(off2)] {sym} base mem)
  4741  	for {
  4742  		off1 := auxIntToInt32(v.AuxInt)
  4743  		sym := auxToSym(v.Aux)
  4744  		if v_0.Op != OpRISCV64ADDI {
  4745  			break
  4746  		}
  4747  		off2 := auxIntToInt64(v_0.AuxInt)
  4748  		base := v_0.Args[0]
  4749  		mem := v_1
  4750  		if !(is32Bit(int64(off1) + off2)) {
  4751  			break
  4752  		}
  4753  		v.reset(OpRISCV64MOVHUload)
  4754  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4755  		v.Aux = symToAux(sym)
  4756  		v.AddArg2(base, mem)
  4757  		return true
  4758  	}
  4759  	return false
  4760  }
  4761  func rewriteValueRISCV64_OpRISCV64MOVHUreg(v *Value) bool {
  4762  	v_0 := v.Args[0]
  4763  	b := v.Block
  4764  	// match: (MOVHUreg x:(ANDI [c] y))
  4765  	// cond: c >= 0 && int64(uint16(c)) == c
  4766  	// result: x
  4767  	for {
  4768  		x := v_0
  4769  		if x.Op != OpRISCV64ANDI {
  4770  			break
  4771  		}
  4772  		c := auxIntToInt64(x.AuxInt)
  4773  		if !(c >= 0 && int64(uint16(c)) == c) {
  4774  			break
  4775  		}
  4776  		v.copyOf(x)
  4777  		return true
  4778  	}
  4779  	// match: (MOVHUreg (ANDI [c] x))
  4780  	// cond: c < 0
  4781  	// result: (ANDI [int64(uint16(c))] x)
  4782  	for {
  4783  		if v_0.Op != OpRISCV64ANDI {
  4784  			break
  4785  		}
  4786  		c := auxIntToInt64(v_0.AuxInt)
  4787  		x := v_0.Args[0]
  4788  		if !(c < 0) {
  4789  			break
  4790  		}
  4791  		v.reset(OpRISCV64ANDI)
  4792  		v.AuxInt = int64ToAuxInt(int64(uint16(c)))
  4793  		v.AddArg(x)
  4794  		return true
  4795  	}
  4796  	// match: (MOVHUreg (MOVDconst [c]))
  4797  	// result: (MOVDconst [int64(uint16(c))])
  4798  	for {
  4799  		if v_0.Op != OpRISCV64MOVDconst {
  4800  			break
  4801  		}
  4802  		c := auxIntToInt64(v_0.AuxInt)
  4803  		v.reset(OpRISCV64MOVDconst)
  4804  		v.AuxInt = int64ToAuxInt(int64(uint16(c)))
  4805  		return true
  4806  	}
  4807  	// match: (MOVHUreg x:(MOVBUload _ _))
  4808  	// result: (MOVDreg x)
  4809  	for {
  4810  		x := v_0
  4811  		if x.Op != OpRISCV64MOVBUload {
  4812  			break
  4813  		}
  4814  		v.reset(OpRISCV64MOVDreg)
  4815  		v.AddArg(x)
  4816  		return true
  4817  	}
  4818  	// match: (MOVHUreg x:(MOVHUload _ _))
  4819  	// result: (MOVDreg x)
  4820  	for {
  4821  		x := v_0
  4822  		if x.Op != OpRISCV64MOVHUload {
  4823  			break
  4824  		}
  4825  		v.reset(OpRISCV64MOVDreg)
  4826  		v.AddArg(x)
  4827  		return true
  4828  	}
  4829  	// match: (MOVHUreg x:(MOVBUreg _))
  4830  	// result: (MOVDreg x)
  4831  	for {
  4832  		x := v_0
  4833  		if x.Op != OpRISCV64MOVBUreg {
  4834  			break
  4835  		}
  4836  		v.reset(OpRISCV64MOVDreg)
  4837  		v.AddArg(x)
  4838  		return true
  4839  	}
  4840  	// match: (MOVHUreg x:(MOVHUreg _))
  4841  	// result: (MOVDreg x)
  4842  	for {
  4843  		x := v_0
  4844  		if x.Op != OpRISCV64MOVHUreg {
  4845  			break
  4846  		}
  4847  		v.reset(OpRISCV64MOVDreg)
  4848  		v.AddArg(x)
  4849  		return true
  4850  	}
  4851  	// match: (MOVHUreg <t> x:(MOVHload [off] {sym} ptr mem))
  4852  	// cond: x.Uses == 1 && clobber(x)
  4853  	// result: @x.Block (MOVHUload <t> [off] {sym} ptr mem)
  4854  	for {
  4855  		t := v.Type
  4856  		x := v_0
  4857  		if x.Op != OpRISCV64MOVHload {
  4858  			break
  4859  		}
  4860  		off := auxIntToInt32(x.AuxInt)
  4861  		sym := auxToSym(x.Aux)
  4862  		mem := x.Args[1]
  4863  		ptr := x.Args[0]
  4864  		if !(x.Uses == 1 && clobber(x)) {
  4865  			break
  4866  		}
  4867  		b = x.Block
  4868  		v0 := b.NewValue0(x.Pos, OpRISCV64MOVHUload, t)
  4869  		v.copyOf(v0)
  4870  		v0.AuxInt = int32ToAuxInt(off)
  4871  		v0.Aux = symToAux(sym)
  4872  		v0.AddArg2(ptr, mem)
  4873  		return true
  4874  	}
  4875  	return false
  4876  }
  4877  func rewriteValueRISCV64_OpRISCV64MOVHload(v *Value) bool {
  4878  	v_1 := v.Args[1]
  4879  	v_0 := v.Args[0]
  4880  	// match: (MOVHload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  4881  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
  4882  	// result: (MOVHload [off1+off2] {mergeSym(sym1,sym2)} base mem)
  4883  	for {
  4884  		off1 := auxIntToInt32(v.AuxInt)
  4885  		sym1 := auxToSym(v.Aux)
  4886  		if v_0.Op != OpRISCV64MOVaddr {
  4887  			break
  4888  		}
  4889  		off2 := auxIntToInt32(v_0.AuxInt)
  4890  		sym2 := auxToSym(v_0.Aux)
  4891  		base := v_0.Args[0]
  4892  		mem := v_1
  4893  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
  4894  			break
  4895  		}
  4896  		v.reset(OpRISCV64MOVHload)
  4897  		v.AuxInt = int32ToAuxInt(off1 + off2)
  4898  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4899  		v.AddArg2(base, mem)
  4900  		return true
  4901  	}
  4902  	// match: (MOVHload [off1] {sym} (ADDI [off2] base) mem)
  4903  	// cond: is32Bit(int64(off1)+off2)
  4904  	// result: (MOVHload [off1+int32(off2)] {sym} base mem)
  4905  	for {
  4906  		off1 := auxIntToInt32(v.AuxInt)
  4907  		sym := auxToSym(v.Aux)
  4908  		if v_0.Op != OpRISCV64ADDI {
  4909  			break
  4910  		}
  4911  		off2 := auxIntToInt64(v_0.AuxInt)
  4912  		base := v_0.Args[0]
  4913  		mem := v_1
  4914  		if !(is32Bit(int64(off1) + off2)) {
  4915  			break
  4916  		}
  4917  		v.reset(OpRISCV64MOVHload)
  4918  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4919  		v.Aux = symToAux(sym)
  4920  		v.AddArg2(base, mem)
  4921  		return true
  4922  	}
  4923  	return false
  4924  }
  4925  func rewriteValueRISCV64_OpRISCV64MOVHreg(v *Value) bool {
  4926  	v_0 := v.Args[0]
  4927  	b := v.Block
  4928  	// match: (MOVHreg x:(ANDI [c] y))
  4929  	// cond: c >= 0 && int64(int16(c)) == c
  4930  	// result: x
  4931  	for {
  4932  		x := v_0
  4933  		if x.Op != OpRISCV64ANDI {
  4934  			break
  4935  		}
  4936  		c := auxIntToInt64(x.AuxInt)
  4937  		if !(c >= 0 && int64(int16(c)) == c) {
  4938  			break
  4939  		}
  4940  		v.copyOf(x)
  4941  		return true
  4942  	}
  4943  	// match: (MOVHreg (MOVDconst [c]))
  4944  	// result: (MOVDconst [int64(int16(c))])
  4945  	for {
  4946  		if v_0.Op != OpRISCV64MOVDconst {
  4947  			break
  4948  		}
  4949  		c := auxIntToInt64(v_0.AuxInt)
  4950  		v.reset(OpRISCV64MOVDconst)
  4951  		v.AuxInt = int64ToAuxInt(int64(int16(c)))
  4952  		return true
  4953  	}
  4954  	// match: (MOVHreg x:(MOVBload _ _))
  4955  	// result: (MOVDreg x)
  4956  	for {
  4957  		x := v_0
  4958  		if x.Op != OpRISCV64MOVBload {
  4959  			break
  4960  		}
  4961  		v.reset(OpRISCV64MOVDreg)
  4962  		v.AddArg(x)
  4963  		return true
  4964  	}
  4965  	// match: (MOVHreg x:(MOVBUload _ _))
  4966  	// result: (MOVDreg x)
  4967  	for {
  4968  		x := v_0
  4969  		if x.Op != OpRISCV64MOVBUload {
  4970  			break
  4971  		}
  4972  		v.reset(OpRISCV64MOVDreg)
  4973  		v.AddArg(x)
  4974  		return true
  4975  	}
  4976  	// match: (MOVHreg x:(MOVHload _ _))
  4977  	// result: (MOVDreg x)
  4978  	for {
  4979  		x := v_0
  4980  		if x.Op != OpRISCV64MOVHload {
  4981  			break
  4982  		}
  4983  		v.reset(OpRISCV64MOVDreg)
  4984  		v.AddArg(x)
  4985  		return true
  4986  	}
  4987  	// match: (MOVHreg x:(MOVBreg _))
  4988  	// result: (MOVDreg x)
  4989  	for {
  4990  		x := v_0
  4991  		if x.Op != OpRISCV64MOVBreg {
  4992  			break
  4993  		}
  4994  		v.reset(OpRISCV64MOVDreg)
  4995  		v.AddArg(x)
  4996  		return true
  4997  	}
  4998  	// match: (MOVHreg x:(MOVBUreg _))
  4999  	// result: (MOVDreg x)
  5000  	for {
  5001  		x := v_0
  5002  		if x.Op != OpRISCV64MOVBUreg {
  5003  			break
  5004  		}
  5005  		v.reset(OpRISCV64MOVDreg)
  5006  		v.AddArg(x)
  5007  		return true
  5008  	}
  5009  	// match: (MOVHreg x:(MOVHreg _))
  5010  	// result: (MOVDreg x)
  5011  	for {
  5012  		x := v_0
  5013  		if x.Op != OpRISCV64MOVHreg {
  5014  			break
  5015  		}
  5016  		v.reset(OpRISCV64MOVDreg)
  5017  		v.AddArg(x)
  5018  		return true
  5019  	}
  5020  	// match: (MOVHreg <t> x:(MOVHUload [off] {sym} ptr mem))
  5021  	// cond: x.Uses == 1 && clobber(x)
  5022  	// result: @x.Block (MOVHload <t> [off] {sym} ptr mem)
  5023  	for {
  5024  		t := v.Type
  5025  		x := v_0
  5026  		if x.Op != OpRISCV64MOVHUload {
  5027  			break
  5028  		}
  5029  		off := auxIntToInt32(x.AuxInt)
  5030  		sym := auxToSym(x.Aux)
  5031  		mem := x.Args[1]
  5032  		ptr := x.Args[0]
  5033  		if !(x.Uses == 1 && clobber(x)) {
  5034  			break
  5035  		}
  5036  		b = x.Block
  5037  		v0 := b.NewValue0(x.Pos, OpRISCV64MOVHload, t)
  5038  		v.copyOf(v0)
  5039  		v0.AuxInt = int32ToAuxInt(off)
  5040  		v0.Aux = symToAux(sym)
  5041  		v0.AddArg2(ptr, mem)
  5042  		return true
  5043  	}
  5044  	return false
  5045  }
  5046  func rewriteValueRISCV64_OpRISCV64MOVHstore(v *Value) bool {
  5047  	v_2 := v.Args[2]
  5048  	v_1 := v.Args[1]
  5049  	v_0 := v.Args[0]
  5050  	// match: (MOVHstore [off1] {sym1} (MOVaddr [off2] {sym2} base) val mem)
  5051  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
  5052  	// result: (MOVHstore [off1+off2] {mergeSym(sym1,sym2)} base val mem)
  5053  	for {
  5054  		off1 := auxIntToInt32(v.AuxInt)
  5055  		sym1 := auxToSym(v.Aux)
  5056  		if v_0.Op != OpRISCV64MOVaddr {
  5057  			break
  5058  		}
  5059  		off2 := auxIntToInt32(v_0.AuxInt)
  5060  		sym2 := auxToSym(v_0.Aux)
  5061  		base := v_0.Args[0]
  5062  		val := v_1
  5063  		mem := v_2
  5064  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
  5065  			break
  5066  		}
  5067  		v.reset(OpRISCV64MOVHstore)
  5068  		v.AuxInt = int32ToAuxInt(off1 + off2)
  5069  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5070  		v.AddArg3(base, val, mem)
  5071  		return true
  5072  	}
  5073  	// match: (MOVHstore [off1] {sym} (ADDI [off2] base) val mem)
  5074  	// cond: is32Bit(int64(off1)+off2)
  5075  	// result: (MOVHstore [off1+int32(off2)] {sym} base val mem)
  5076  	for {
  5077  		off1 := auxIntToInt32(v.AuxInt)
  5078  		sym := auxToSym(v.Aux)
  5079  		if v_0.Op != OpRISCV64ADDI {
  5080  			break
  5081  		}
  5082  		off2 := auxIntToInt64(v_0.AuxInt)
  5083  		base := v_0.Args[0]
  5084  		val := v_1
  5085  		mem := v_2
  5086  		if !(is32Bit(int64(off1) + off2)) {
  5087  			break
  5088  		}
  5089  		v.reset(OpRISCV64MOVHstore)
  5090  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5091  		v.Aux = symToAux(sym)
  5092  		v.AddArg3(base, val, mem)
  5093  		return true
  5094  	}
  5095  	// match: (MOVHstore [off] {sym} ptr (MOVDconst [0]) mem)
  5096  	// result: (MOVHstorezero [off] {sym} ptr mem)
  5097  	for {
  5098  		off := auxIntToInt32(v.AuxInt)
  5099  		sym := auxToSym(v.Aux)
  5100  		ptr := v_0
  5101  		if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
  5102  			break
  5103  		}
  5104  		mem := v_2
  5105  		v.reset(OpRISCV64MOVHstorezero)
  5106  		v.AuxInt = int32ToAuxInt(off)
  5107  		v.Aux = symToAux(sym)
  5108  		v.AddArg2(ptr, mem)
  5109  		return true
  5110  	}
  5111  	// match: (MOVHstore [off] {sym} ptr (MOVHreg x) mem)
  5112  	// result: (MOVHstore [off] {sym} ptr x mem)
  5113  	for {
  5114  		off := auxIntToInt32(v.AuxInt)
  5115  		sym := auxToSym(v.Aux)
  5116  		ptr := v_0
  5117  		if v_1.Op != OpRISCV64MOVHreg {
  5118  			break
  5119  		}
  5120  		x := v_1.Args[0]
  5121  		mem := v_2
  5122  		v.reset(OpRISCV64MOVHstore)
  5123  		v.AuxInt = int32ToAuxInt(off)
  5124  		v.Aux = symToAux(sym)
  5125  		v.AddArg3(ptr, x, mem)
  5126  		return true
  5127  	}
  5128  	// match: (MOVHstore [off] {sym} ptr (MOVWreg x) mem)
  5129  	// result: (MOVHstore [off] {sym} ptr x mem)
  5130  	for {
  5131  		off := auxIntToInt32(v.AuxInt)
  5132  		sym := auxToSym(v.Aux)
  5133  		ptr := v_0
  5134  		if v_1.Op != OpRISCV64MOVWreg {
  5135  			break
  5136  		}
  5137  		x := v_1.Args[0]
  5138  		mem := v_2
  5139  		v.reset(OpRISCV64MOVHstore)
  5140  		v.AuxInt = int32ToAuxInt(off)
  5141  		v.Aux = symToAux(sym)
  5142  		v.AddArg3(ptr, x, mem)
  5143  		return true
  5144  	}
  5145  	// match: (MOVHstore [off] {sym} ptr (MOVHUreg x) mem)
  5146  	// result: (MOVHstore [off] {sym} ptr x mem)
  5147  	for {
  5148  		off := auxIntToInt32(v.AuxInt)
  5149  		sym := auxToSym(v.Aux)
  5150  		ptr := v_0
  5151  		if v_1.Op != OpRISCV64MOVHUreg {
  5152  			break
  5153  		}
  5154  		x := v_1.Args[0]
  5155  		mem := v_2
  5156  		v.reset(OpRISCV64MOVHstore)
  5157  		v.AuxInt = int32ToAuxInt(off)
  5158  		v.Aux = symToAux(sym)
  5159  		v.AddArg3(ptr, x, mem)
  5160  		return true
  5161  	}
  5162  	// match: (MOVHstore [off] {sym} ptr (MOVWUreg x) mem)
  5163  	// result: (MOVHstore [off] {sym} ptr x mem)
  5164  	for {
  5165  		off := auxIntToInt32(v.AuxInt)
  5166  		sym := auxToSym(v.Aux)
  5167  		ptr := v_0
  5168  		if v_1.Op != OpRISCV64MOVWUreg {
  5169  			break
  5170  		}
  5171  		x := v_1.Args[0]
  5172  		mem := v_2
  5173  		v.reset(OpRISCV64MOVHstore)
  5174  		v.AuxInt = int32ToAuxInt(off)
  5175  		v.Aux = symToAux(sym)
  5176  		v.AddArg3(ptr, x, mem)
  5177  		return true
  5178  	}
  5179  	return false
  5180  }
  5181  func rewriteValueRISCV64_OpRISCV64MOVHstorezero(v *Value) bool {
  5182  	v_1 := v.Args[1]
  5183  	v_0 := v.Args[0]
  5184  	// match: (MOVHstorezero [off1] {sym1} (MOVaddr [off2] {sym2} ptr) mem)
  5185  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2))
  5186  	// result: (MOVHstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  5187  	for {
  5188  		off1 := auxIntToInt32(v.AuxInt)
  5189  		sym1 := auxToSym(v.Aux)
  5190  		if v_0.Op != OpRISCV64MOVaddr {
  5191  			break
  5192  		}
  5193  		off2 := auxIntToInt32(v_0.AuxInt)
  5194  		sym2 := auxToSym(v_0.Aux)
  5195  		ptr := v_0.Args[0]
  5196  		mem := v_1
  5197  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
  5198  			break
  5199  		}
  5200  		v.reset(OpRISCV64MOVHstorezero)
  5201  		v.AuxInt = int32ToAuxInt(off1 + off2)
  5202  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5203  		v.AddArg2(ptr, mem)
  5204  		return true
  5205  	}
  5206  	// match: (MOVHstorezero [off1] {sym} (ADDI [off2] ptr) mem)
  5207  	// cond: is32Bit(int64(off1)+off2)
  5208  	// result: (MOVHstorezero [off1+int32(off2)] {sym} ptr mem)
  5209  	for {
  5210  		off1 := auxIntToInt32(v.AuxInt)
  5211  		sym := auxToSym(v.Aux)
  5212  		if v_0.Op != OpRISCV64ADDI {
  5213  			break
  5214  		}
  5215  		off2 := auxIntToInt64(v_0.AuxInt)
  5216  		ptr := v_0.Args[0]
  5217  		mem := v_1
  5218  		if !(is32Bit(int64(off1) + off2)) {
  5219  			break
  5220  		}
  5221  		v.reset(OpRISCV64MOVHstorezero)
  5222  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5223  		v.Aux = symToAux(sym)
  5224  		v.AddArg2(ptr, mem)
  5225  		return true
  5226  	}
  5227  	return false
  5228  }
  5229  func rewriteValueRISCV64_OpRISCV64MOVWUload(v *Value) bool {
  5230  	v_1 := v.Args[1]
  5231  	v_0 := v.Args[0]
  5232  	// match: (MOVWUload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  5233  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
  5234  	// result: (MOVWUload [off1+off2] {mergeSym(sym1,sym2)} base mem)
  5235  	for {
  5236  		off1 := auxIntToInt32(v.AuxInt)
  5237  		sym1 := auxToSym(v.Aux)
  5238  		if v_0.Op != OpRISCV64MOVaddr {
  5239  			break
  5240  		}
  5241  		off2 := auxIntToInt32(v_0.AuxInt)
  5242  		sym2 := auxToSym(v_0.Aux)
  5243  		base := v_0.Args[0]
  5244  		mem := v_1
  5245  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
  5246  			break
  5247  		}
  5248  		v.reset(OpRISCV64MOVWUload)
  5249  		v.AuxInt = int32ToAuxInt(off1 + off2)
  5250  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5251  		v.AddArg2(base, mem)
  5252  		return true
  5253  	}
  5254  	// match: (MOVWUload [off1] {sym} (ADDI [off2] base) mem)
  5255  	// cond: is32Bit(int64(off1)+off2)
  5256  	// result: (MOVWUload [off1+int32(off2)] {sym} base mem)
  5257  	for {
  5258  		off1 := auxIntToInt32(v.AuxInt)
  5259  		sym := auxToSym(v.Aux)
  5260  		if v_0.Op != OpRISCV64ADDI {
  5261  			break
  5262  		}
  5263  		off2 := auxIntToInt64(v_0.AuxInt)
  5264  		base := v_0.Args[0]
  5265  		mem := v_1
  5266  		if !(is32Bit(int64(off1) + off2)) {
  5267  			break
  5268  		}
  5269  		v.reset(OpRISCV64MOVWUload)
  5270  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5271  		v.Aux = symToAux(sym)
  5272  		v.AddArg2(base, mem)
  5273  		return true
  5274  	}
  5275  	return false
  5276  }
  5277  func rewriteValueRISCV64_OpRISCV64MOVWUreg(v *Value) bool {
  5278  	v_0 := v.Args[0]
  5279  	b := v.Block
  5280  	typ := &b.Func.Config.Types
  5281  	// match: (MOVWUreg x:(ANDI [c] y))
  5282  	// cond: c >= 0 && int64(uint32(c)) == c
  5283  	// result: x
  5284  	for {
  5285  		x := v_0
  5286  		if x.Op != OpRISCV64ANDI {
  5287  			break
  5288  		}
  5289  		c := auxIntToInt64(x.AuxInt)
  5290  		if !(c >= 0 && int64(uint32(c)) == c) {
  5291  			break
  5292  		}
  5293  		v.copyOf(x)
  5294  		return true
  5295  	}
  5296  	// match: (MOVWUreg (ANDI [c] x))
  5297  	// cond: c < 0
  5298  	// result: (AND (MOVDconst [int64(uint32(c))]) x)
  5299  	for {
  5300  		if v_0.Op != OpRISCV64ANDI {
  5301  			break
  5302  		}
  5303  		c := auxIntToInt64(v_0.AuxInt)
  5304  		x := v_0.Args[0]
  5305  		if !(c < 0) {
  5306  			break
  5307  		}
  5308  		v.reset(OpRISCV64AND)
  5309  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  5310  		v0.AuxInt = int64ToAuxInt(int64(uint32(c)))
  5311  		v.AddArg2(v0, x)
  5312  		return true
  5313  	}
  5314  	// match: (MOVWUreg (MOVDconst [c]))
  5315  	// result: (MOVDconst [int64(uint32(c))])
  5316  	for {
  5317  		if v_0.Op != OpRISCV64MOVDconst {
  5318  			break
  5319  		}
  5320  		c := auxIntToInt64(v_0.AuxInt)
  5321  		v.reset(OpRISCV64MOVDconst)
  5322  		v.AuxInt = int64ToAuxInt(int64(uint32(c)))
  5323  		return true
  5324  	}
  5325  	// match: (MOVWUreg x:(MOVBUload _ _))
  5326  	// result: (MOVDreg x)
  5327  	for {
  5328  		x := v_0
  5329  		if x.Op != OpRISCV64MOVBUload {
  5330  			break
  5331  		}
  5332  		v.reset(OpRISCV64MOVDreg)
  5333  		v.AddArg(x)
  5334  		return true
  5335  	}
  5336  	// match: (MOVWUreg x:(MOVHUload _ _))
  5337  	// result: (MOVDreg x)
  5338  	for {
  5339  		x := v_0
  5340  		if x.Op != OpRISCV64MOVHUload {
  5341  			break
  5342  		}
  5343  		v.reset(OpRISCV64MOVDreg)
  5344  		v.AddArg(x)
  5345  		return true
  5346  	}
  5347  	// match: (MOVWUreg x:(MOVWUload _ _))
  5348  	// result: (MOVDreg x)
  5349  	for {
  5350  		x := v_0
  5351  		if x.Op != OpRISCV64MOVWUload {
  5352  			break
  5353  		}
  5354  		v.reset(OpRISCV64MOVDreg)
  5355  		v.AddArg(x)
  5356  		return true
  5357  	}
  5358  	// match: (MOVWUreg x:(MOVBUreg _))
  5359  	// result: (MOVDreg x)
  5360  	for {
  5361  		x := v_0
  5362  		if x.Op != OpRISCV64MOVBUreg {
  5363  			break
  5364  		}
  5365  		v.reset(OpRISCV64MOVDreg)
  5366  		v.AddArg(x)
  5367  		return true
  5368  	}
  5369  	// match: (MOVWUreg x:(MOVHUreg _))
  5370  	// result: (MOVDreg x)
  5371  	for {
  5372  		x := v_0
  5373  		if x.Op != OpRISCV64MOVHUreg {
  5374  			break
  5375  		}
  5376  		v.reset(OpRISCV64MOVDreg)
  5377  		v.AddArg(x)
  5378  		return true
  5379  	}
  5380  	// match: (MOVWUreg x:(MOVWUreg _))
  5381  	// result: (MOVDreg x)
  5382  	for {
  5383  		x := v_0
  5384  		if x.Op != OpRISCV64MOVWUreg {
  5385  			break
  5386  		}
  5387  		v.reset(OpRISCV64MOVDreg)
  5388  		v.AddArg(x)
  5389  		return true
  5390  	}
  5391  	// match: (MOVWUreg <t> x:(MOVWload [off] {sym} ptr mem))
  5392  	// cond: x.Uses == 1 && clobber(x)
  5393  	// result: @x.Block (MOVWUload <t> [off] {sym} ptr mem)
  5394  	for {
  5395  		t := v.Type
  5396  		x := v_0
  5397  		if x.Op != OpRISCV64MOVWload {
  5398  			break
  5399  		}
  5400  		off := auxIntToInt32(x.AuxInt)
  5401  		sym := auxToSym(x.Aux)
  5402  		mem := x.Args[1]
  5403  		ptr := x.Args[0]
  5404  		if !(x.Uses == 1 && clobber(x)) {
  5405  			break
  5406  		}
  5407  		b = x.Block
  5408  		v0 := b.NewValue0(x.Pos, OpRISCV64MOVWUload, t)
  5409  		v.copyOf(v0)
  5410  		v0.AuxInt = int32ToAuxInt(off)
  5411  		v0.Aux = symToAux(sym)
  5412  		v0.AddArg2(ptr, mem)
  5413  		return true
  5414  	}
  5415  	return false
  5416  }
  5417  func rewriteValueRISCV64_OpRISCV64MOVWload(v *Value) bool {
  5418  	v_1 := v.Args[1]
  5419  	v_0 := v.Args[0]
  5420  	// match: (MOVWload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  5421  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
  5422  	// result: (MOVWload [off1+off2] {mergeSym(sym1,sym2)} base mem)
  5423  	for {
  5424  		off1 := auxIntToInt32(v.AuxInt)
  5425  		sym1 := auxToSym(v.Aux)
  5426  		if v_0.Op != OpRISCV64MOVaddr {
  5427  			break
  5428  		}
  5429  		off2 := auxIntToInt32(v_0.AuxInt)
  5430  		sym2 := auxToSym(v_0.Aux)
  5431  		base := v_0.Args[0]
  5432  		mem := v_1
  5433  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
  5434  			break
  5435  		}
  5436  		v.reset(OpRISCV64MOVWload)
  5437  		v.AuxInt = int32ToAuxInt(off1 + off2)
  5438  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5439  		v.AddArg2(base, mem)
  5440  		return true
  5441  	}
  5442  	// match: (MOVWload [off1] {sym} (ADDI [off2] base) mem)
  5443  	// cond: is32Bit(int64(off1)+off2)
  5444  	// result: (MOVWload [off1+int32(off2)] {sym} base mem)
  5445  	for {
  5446  		off1 := auxIntToInt32(v.AuxInt)
  5447  		sym := auxToSym(v.Aux)
  5448  		if v_0.Op != OpRISCV64ADDI {
  5449  			break
  5450  		}
  5451  		off2 := auxIntToInt64(v_0.AuxInt)
  5452  		base := v_0.Args[0]
  5453  		mem := v_1
  5454  		if !(is32Bit(int64(off1) + off2)) {
  5455  			break
  5456  		}
  5457  		v.reset(OpRISCV64MOVWload)
  5458  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5459  		v.Aux = symToAux(sym)
  5460  		v.AddArg2(base, mem)
  5461  		return true
  5462  	}
  5463  	return false
  5464  }
  5465  func rewriteValueRISCV64_OpRISCV64MOVWreg(v *Value) bool {
  5466  	v_0 := v.Args[0]
  5467  	b := v.Block
  5468  	// match: (MOVWreg x:(ANDI [c] y))
  5469  	// cond: c >= 0 && int64(int32(c)) == c
  5470  	// result: x
  5471  	for {
  5472  		x := v_0
  5473  		if x.Op != OpRISCV64ANDI {
  5474  			break
  5475  		}
  5476  		c := auxIntToInt64(x.AuxInt)
  5477  		if !(c >= 0 && int64(int32(c)) == c) {
  5478  			break
  5479  		}
  5480  		v.copyOf(x)
  5481  		return true
  5482  	}
  5483  	// match: (MOVWreg (MOVDconst [c]))
  5484  	// result: (MOVDconst [int64(int32(c))])
  5485  	for {
  5486  		if v_0.Op != OpRISCV64MOVDconst {
  5487  			break
  5488  		}
  5489  		c := auxIntToInt64(v_0.AuxInt)
  5490  		v.reset(OpRISCV64MOVDconst)
  5491  		v.AuxInt = int64ToAuxInt(int64(int32(c)))
  5492  		return true
  5493  	}
  5494  	// match: (MOVWreg x:(MOVBload _ _))
  5495  	// result: (MOVDreg x)
  5496  	for {
  5497  		x := v_0
  5498  		if x.Op != OpRISCV64MOVBload {
  5499  			break
  5500  		}
  5501  		v.reset(OpRISCV64MOVDreg)
  5502  		v.AddArg(x)
  5503  		return true
  5504  	}
  5505  	// match: (MOVWreg x:(MOVBUload _ _))
  5506  	// result: (MOVDreg x)
  5507  	for {
  5508  		x := v_0
  5509  		if x.Op != OpRISCV64MOVBUload {
  5510  			break
  5511  		}
  5512  		v.reset(OpRISCV64MOVDreg)
  5513  		v.AddArg(x)
  5514  		return true
  5515  	}
  5516  	// match: (MOVWreg x:(MOVHload _ _))
  5517  	// result: (MOVDreg x)
  5518  	for {
  5519  		x := v_0
  5520  		if x.Op != OpRISCV64MOVHload {
  5521  			break
  5522  		}
  5523  		v.reset(OpRISCV64MOVDreg)
  5524  		v.AddArg(x)
  5525  		return true
  5526  	}
  5527  	// match: (MOVWreg x:(MOVHUload _ _))
  5528  	// result: (MOVDreg x)
  5529  	for {
  5530  		x := v_0
  5531  		if x.Op != OpRISCV64MOVHUload {
  5532  			break
  5533  		}
  5534  		v.reset(OpRISCV64MOVDreg)
  5535  		v.AddArg(x)
  5536  		return true
  5537  	}
  5538  	// match: (MOVWreg x:(MOVWload _ _))
  5539  	// result: (MOVDreg x)
  5540  	for {
  5541  		x := v_0
  5542  		if x.Op != OpRISCV64MOVWload {
  5543  			break
  5544  		}
  5545  		v.reset(OpRISCV64MOVDreg)
  5546  		v.AddArg(x)
  5547  		return true
  5548  	}
  5549  	// match: (MOVWreg x:(ADDIW _))
  5550  	// result: (MOVDreg x)
  5551  	for {
  5552  		x := v_0
  5553  		if x.Op != OpRISCV64ADDIW {
  5554  			break
  5555  		}
  5556  		v.reset(OpRISCV64MOVDreg)
  5557  		v.AddArg(x)
  5558  		return true
  5559  	}
  5560  	// match: (MOVWreg x:(SUBW _ _))
  5561  	// result: (MOVDreg x)
  5562  	for {
  5563  		x := v_0
  5564  		if x.Op != OpRISCV64SUBW {
  5565  			break
  5566  		}
  5567  		v.reset(OpRISCV64MOVDreg)
  5568  		v.AddArg(x)
  5569  		return true
  5570  	}
  5571  	// match: (MOVWreg x:(NEGW _))
  5572  	// result: (MOVDreg x)
  5573  	for {
  5574  		x := v_0
  5575  		if x.Op != OpRISCV64NEGW {
  5576  			break
  5577  		}
  5578  		v.reset(OpRISCV64MOVDreg)
  5579  		v.AddArg(x)
  5580  		return true
  5581  	}
  5582  	// match: (MOVWreg x:(MULW _ _))
  5583  	// result: (MOVDreg x)
  5584  	for {
  5585  		x := v_0
  5586  		if x.Op != OpRISCV64MULW {
  5587  			break
  5588  		}
  5589  		v.reset(OpRISCV64MOVDreg)
  5590  		v.AddArg(x)
  5591  		return true
  5592  	}
  5593  	// match: (MOVWreg x:(DIVW _ _))
  5594  	// result: (MOVDreg x)
  5595  	for {
  5596  		x := v_0
  5597  		if x.Op != OpRISCV64DIVW {
  5598  			break
  5599  		}
  5600  		v.reset(OpRISCV64MOVDreg)
  5601  		v.AddArg(x)
  5602  		return true
  5603  	}
  5604  	// match: (MOVWreg x:(DIVUW _ _))
  5605  	// result: (MOVDreg x)
  5606  	for {
  5607  		x := v_0
  5608  		if x.Op != OpRISCV64DIVUW {
  5609  			break
  5610  		}
  5611  		v.reset(OpRISCV64MOVDreg)
  5612  		v.AddArg(x)
  5613  		return true
  5614  	}
  5615  	// match: (MOVWreg x:(REMW _ _))
  5616  	// result: (MOVDreg x)
  5617  	for {
  5618  		x := v_0
  5619  		if x.Op != OpRISCV64REMW {
  5620  			break
  5621  		}
  5622  		v.reset(OpRISCV64MOVDreg)
  5623  		v.AddArg(x)
  5624  		return true
  5625  	}
  5626  	// match: (MOVWreg x:(REMUW _ _))
  5627  	// result: (MOVDreg x)
  5628  	for {
  5629  		x := v_0
  5630  		if x.Op != OpRISCV64REMUW {
  5631  			break
  5632  		}
  5633  		v.reset(OpRISCV64MOVDreg)
  5634  		v.AddArg(x)
  5635  		return true
  5636  	}
  5637  	// match: (MOVWreg x:(ROLW _ _))
  5638  	// result: (MOVDreg x)
  5639  	for {
  5640  		x := v_0
  5641  		if x.Op != OpRISCV64ROLW {
  5642  			break
  5643  		}
  5644  		v.reset(OpRISCV64MOVDreg)
  5645  		v.AddArg(x)
  5646  		return true
  5647  	}
  5648  	// match: (MOVWreg x:(RORW _ _))
  5649  	// result: (MOVDreg x)
  5650  	for {
  5651  		x := v_0
  5652  		if x.Op != OpRISCV64RORW {
  5653  			break
  5654  		}
  5655  		v.reset(OpRISCV64MOVDreg)
  5656  		v.AddArg(x)
  5657  		return true
  5658  	}
  5659  	// match: (MOVWreg x:(RORIW _))
  5660  	// result: (MOVDreg x)
  5661  	for {
  5662  		x := v_0
  5663  		if x.Op != OpRISCV64RORIW {
  5664  			break
  5665  		}
  5666  		v.reset(OpRISCV64MOVDreg)
  5667  		v.AddArg(x)
  5668  		return true
  5669  	}
  5670  	// match: (MOVWreg x:(MOVBreg _))
  5671  	// result: (MOVDreg x)
  5672  	for {
  5673  		x := v_0
  5674  		if x.Op != OpRISCV64MOVBreg {
  5675  			break
  5676  		}
  5677  		v.reset(OpRISCV64MOVDreg)
  5678  		v.AddArg(x)
  5679  		return true
  5680  	}
  5681  	// match: (MOVWreg x:(MOVBUreg _))
  5682  	// result: (MOVDreg x)
  5683  	for {
  5684  		x := v_0
  5685  		if x.Op != OpRISCV64MOVBUreg {
  5686  			break
  5687  		}
  5688  		v.reset(OpRISCV64MOVDreg)
  5689  		v.AddArg(x)
  5690  		return true
  5691  	}
  5692  	// match: (MOVWreg x:(MOVHreg _))
  5693  	// result: (MOVDreg x)
  5694  	for {
  5695  		x := v_0
  5696  		if x.Op != OpRISCV64MOVHreg {
  5697  			break
  5698  		}
  5699  		v.reset(OpRISCV64MOVDreg)
  5700  		v.AddArg(x)
  5701  		return true
  5702  	}
  5703  	// match: (MOVWreg x:(MOVWreg _))
  5704  	// result: (MOVDreg x)
  5705  	for {
  5706  		x := v_0
  5707  		if x.Op != OpRISCV64MOVWreg {
  5708  			break
  5709  		}
  5710  		v.reset(OpRISCV64MOVDreg)
  5711  		v.AddArg(x)
  5712  		return true
  5713  	}
  5714  	// match: (MOVWreg <t> x:(MOVWUload [off] {sym} ptr mem))
  5715  	// cond: x.Uses == 1 && clobber(x)
  5716  	// result: @x.Block (MOVWload <t> [off] {sym} ptr mem)
  5717  	for {
  5718  		t := v.Type
  5719  		x := v_0
  5720  		if x.Op != OpRISCV64MOVWUload {
  5721  			break
  5722  		}
  5723  		off := auxIntToInt32(x.AuxInt)
  5724  		sym := auxToSym(x.Aux)
  5725  		mem := x.Args[1]
  5726  		ptr := x.Args[0]
  5727  		if !(x.Uses == 1 && clobber(x)) {
  5728  			break
  5729  		}
  5730  		b = x.Block
  5731  		v0 := b.NewValue0(x.Pos, OpRISCV64MOVWload, t)
  5732  		v.copyOf(v0)
  5733  		v0.AuxInt = int32ToAuxInt(off)
  5734  		v0.Aux = symToAux(sym)
  5735  		v0.AddArg2(ptr, mem)
  5736  		return true
  5737  	}
  5738  	return false
  5739  }
  5740  func rewriteValueRISCV64_OpRISCV64MOVWstore(v *Value) bool {
  5741  	v_2 := v.Args[2]
  5742  	v_1 := v.Args[1]
  5743  	v_0 := v.Args[0]
  5744  	// match: (MOVWstore [off1] {sym1} (MOVaddr [off2] {sym2} base) val mem)
  5745  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
  5746  	// result: (MOVWstore [off1+off2] {mergeSym(sym1,sym2)} base val mem)
  5747  	for {
  5748  		off1 := auxIntToInt32(v.AuxInt)
  5749  		sym1 := auxToSym(v.Aux)
  5750  		if v_0.Op != OpRISCV64MOVaddr {
  5751  			break
  5752  		}
  5753  		off2 := auxIntToInt32(v_0.AuxInt)
  5754  		sym2 := auxToSym(v_0.Aux)
  5755  		base := v_0.Args[0]
  5756  		val := v_1
  5757  		mem := v_2
  5758  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
  5759  			break
  5760  		}
  5761  		v.reset(OpRISCV64MOVWstore)
  5762  		v.AuxInt = int32ToAuxInt(off1 + off2)
  5763  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5764  		v.AddArg3(base, val, mem)
  5765  		return true
  5766  	}
  5767  	// match: (MOVWstore [off1] {sym} (ADDI [off2] base) val mem)
  5768  	// cond: is32Bit(int64(off1)+off2)
  5769  	// result: (MOVWstore [off1+int32(off2)] {sym} base val mem)
  5770  	for {
  5771  		off1 := auxIntToInt32(v.AuxInt)
  5772  		sym := auxToSym(v.Aux)
  5773  		if v_0.Op != OpRISCV64ADDI {
  5774  			break
  5775  		}
  5776  		off2 := auxIntToInt64(v_0.AuxInt)
  5777  		base := v_0.Args[0]
  5778  		val := v_1
  5779  		mem := v_2
  5780  		if !(is32Bit(int64(off1) + off2)) {
  5781  			break
  5782  		}
  5783  		v.reset(OpRISCV64MOVWstore)
  5784  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5785  		v.Aux = symToAux(sym)
  5786  		v.AddArg3(base, val, mem)
  5787  		return true
  5788  	}
  5789  	// match: (MOVWstore [off] {sym} ptr (MOVDconst [0]) mem)
  5790  	// result: (MOVWstorezero [off] {sym} ptr mem)
  5791  	for {
  5792  		off := auxIntToInt32(v.AuxInt)
  5793  		sym := auxToSym(v.Aux)
  5794  		ptr := v_0
  5795  		if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
  5796  			break
  5797  		}
  5798  		mem := v_2
  5799  		v.reset(OpRISCV64MOVWstorezero)
  5800  		v.AuxInt = int32ToAuxInt(off)
  5801  		v.Aux = symToAux(sym)
  5802  		v.AddArg2(ptr, mem)
  5803  		return true
  5804  	}
  5805  	// match: (MOVWstore [off] {sym} ptr (MOVWreg x) mem)
  5806  	// result: (MOVWstore [off] {sym} ptr x mem)
  5807  	for {
  5808  		off := auxIntToInt32(v.AuxInt)
  5809  		sym := auxToSym(v.Aux)
  5810  		ptr := v_0
  5811  		if v_1.Op != OpRISCV64MOVWreg {
  5812  			break
  5813  		}
  5814  		x := v_1.Args[0]
  5815  		mem := v_2
  5816  		v.reset(OpRISCV64MOVWstore)
  5817  		v.AuxInt = int32ToAuxInt(off)
  5818  		v.Aux = symToAux(sym)
  5819  		v.AddArg3(ptr, x, mem)
  5820  		return true
  5821  	}
  5822  	// match: (MOVWstore [off] {sym} ptr (MOVWUreg x) mem)
  5823  	// result: (MOVWstore [off] {sym} ptr x mem)
  5824  	for {
  5825  		off := auxIntToInt32(v.AuxInt)
  5826  		sym := auxToSym(v.Aux)
  5827  		ptr := v_0
  5828  		if v_1.Op != OpRISCV64MOVWUreg {
  5829  			break
  5830  		}
  5831  		x := v_1.Args[0]
  5832  		mem := v_2
  5833  		v.reset(OpRISCV64MOVWstore)
  5834  		v.AuxInt = int32ToAuxInt(off)
  5835  		v.Aux = symToAux(sym)
  5836  		v.AddArg3(ptr, x, mem)
  5837  		return true
  5838  	}
  5839  	return false
  5840  }
  5841  func rewriteValueRISCV64_OpRISCV64MOVWstorezero(v *Value) bool {
  5842  	v_1 := v.Args[1]
  5843  	v_0 := v.Args[0]
  5844  	// match: (MOVWstorezero [off1] {sym1} (MOVaddr [off2] {sym2} ptr) mem)
  5845  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2))
  5846  	// result: (MOVWstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  5847  	for {
  5848  		off1 := auxIntToInt32(v.AuxInt)
  5849  		sym1 := auxToSym(v.Aux)
  5850  		if v_0.Op != OpRISCV64MOVaddr {
  5851  			break
  5852  		}
  5853  		off2 := auxIntToInt32(v_0.AuxInt)
  5854  		sym2 := auxToSym(v_0.Aux)
  5855  		ptr := v_0.Args[0]
  5856  		mem := v_1
  5857  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
  5858  			break
  5859  		}
  5860  		v.reset(OpRISCV64MOVWstorezero)
  5861  		v.AuxInt = int32ToAuxInt(off1 + off2)
  5862  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5863  		v.AddArg2(ptr, mem)
  5864  		return true
  5865  	}
  5866  	// match: (MOVWstorezero [off1] {sym} (ADDI [off2] ptr) mem)
  5867  	// cond: is32Bit(int64(off1)+off2)
  5868  	// result: (MOVWstorezero [off1+int32(off2)] {sym} ptr mem)
  5869  	for {
  5870  		off1 := auxIntToInt32(v.AuxInt)
  5871  		sym := auxToSym(v.Aux)
  5872  		if v_0.Op != OpRISCV64ADDI {
  5873  			break
  5874  		}
  5875  		off2 := auxIntToInt64(v_0.AuxInt)
  5876  		ptr := v_0.Args[0]
  5877  		mem := v_1
  5878  		if !(is32Bit(int64(off1) + off2)) {
  5879  			break
  5880  		}
  5881  		v.reset(OpRISCV64MOVWstorezero)
  5882  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5883  		v.Aux = symToAux(sym)
  5884  		v.AddArg2(ptr, mem)
  5885  		return true
  5886  	}
  5887  	return false
  5888  }
  5889  func rewriteValueRISCV64_OpRISCV64NEG(v *Value) bool {
  5890  	v_0 := v.Args[0]
  5891  	b := v.Block
  5892  	// match: (NEG (SUB x y))
  5893  	// result: (SUB y x)
  5894  	for {
  5895  		if v_0.Op != OpRISCV64SUB {
  5896  			break
  5897  		}
  5898  		y := v_0.Args[1]
  5899  		x := v_0.Args[0]
  5900  		v.reset(OpRISCV64SUB)
  5901  		v.AddArg2(y, x)
  5902  		return true
  5903  	}
  5904  	// match: (NEG <t> s:(ADDI [val] (SUB x y)))
  5905  	// cond: s.Uses == 1 && is32Bit(-val)
  5906  	// result: (ADDI [-val] (SUB <t> y x))
  5907  	for {
  5908  		t := v.Type
  5909  		s := v_0
  5910  		if s.Op != OpRISCV64ADDI {
  5911  			break
  5912  		}
  5913  		val := auxIntToInt64(s.AuxInt)
  5914  		s_0 := s.Args[0]
  5915  		if s_0.Op != OpRISCV64SUB {
  5916  			break
  5917  		}
  5918  		y := s_0.Args[1]
  5919  		x := s_0.Args[0]
  5920  		if !(s.Uses == 1 && is32Bit(-val)) {
  5921  			break
  5922  		}
  5923  		v.reset(OpRISCV64ADDI)
  5924  		v.AuxInt = int64ToAuxInt(-val)
  5925  		v0 := b.NewValue0(v.Pos, OpRISCV64SUB, t)
  5926  		v0.AddArg2(y, x)
  5927  		v.AddArg(v0)
  5928  		return true
  5929  	}
  5930  	// match: (NEG (NEG x))
  5931  	// result: x
  5932  	for {
  5933  		if v_0.Op != OpRISCV64NEG {
  5934  			break
  5935  		}
  5936  		x := v_0.Args[0]
  5937  		v.copyOf(x)
  5938  		return true
  5939  	}
  5940  	// match: (NEG (MOVDconst [x]))
  5941  	// result: (MOVDconst [-x])
  5942  	for {
  5943  		if v_0.Op != OpRISCV64MOVDconst {
  5944  			break
  5945  		}
  5946  		x := auxIntToInt64(v_0.AuxInt)
  5947  		v.reset(OpRISCV64MOVDconst)
  5948  		v.AuxInt = int64ToAuxInt(-x)
  5949  		return true
  5950  	}
  5951  	return false
  5952  }
  5953  func rewriteValueRISCV64_OpRISCV64NEGW(v *Value) bool {
  5954  	v_0 := v.Args[0]
  5955  	// match: (NEGW (MOVDconst [x]))
  5956  	// result: (MOVDconst [int64(int32(-x))])
  5957  	for {
  5958  		if v_0.Op != OpRISCV64MOVDconst {
  5959  			break
  5960  		}
  5961  		x := auxIntToInt64(v_0.AuxInt)
  5962  		v.reset(OpRISCV64MOVDconst)
  5963  		v.AuxInt = int64ToAuxInt(int64(int32(-x)))
  5964  		return true
  5965  	}
  5966  	return false
  5967  }
  5968  func rewriteValueRISCV64_OpRISCV64OR(v *Value) bool {
  5969  	v_1 := v.Args[1]
  5970  	v_0 := v.Args[0]
  5971  	// match: (OR (MOVDconst [val]) x)
  5972  	// cond: is32Bit(val)
  5973  	// result: (ORI [val] x)
  5974  	for {
  5975  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  5976  			if v_0.Op != OpRISCV64MOVDconst {
  5977  				continue
  5978  			}
  5979  			val := auxIntToInt64(v_0.AuxInt)
  5980  			x := v_1
  5981  			if !(is32Bit(val)) {
  5982  				continue
  5983  			}
  5984  			v.reset(OpRISCV64ORI)
  5985  			v.AuxInt = int64ToAuxInt(val)
  5986  			v.AddArg(x)
  5987  			return true
  5988  		}
  5989  		break
  5990  	}
  5991  	return false
  5992  }
  5993  func rewriteValueRISCV64_OpRISCV64ORI(v *Value) bool {
  5994  	v_0 := v.Args[0]
  5995  	// match: (ORI [0] x)
  5996  	// result: x
  5997  	for {
  5998  		if auxIntToInt64(v.AuxInt) != 0 {
  5999  			break
  6000  		}
  6001  		x := v_0
  6002  		v.copyOf(x)
  6003  		return true
  6004  	}
  6005  	// match: (ORI [-1] x)
  6006  	// result: (MOVDconst [-1])
  6007  	for {
  6008  		if auxIntToInt64(v.AuxInt) != -1 {
  6009  			break
  6010  		}
  6011  		v.reset(OpRISCV64MOVDconst)
  6012  		v.AuxInt = int64ToAuxInt(-1)
  6013  		return true
  6014  	}
  6015  	// match: (ORI [x] (MOVDconst [y]))
  6016  	// result: (MOVDconst [x | y])
  6017  	for {
  6018  		x := auxIntToInt64(v.AuxInt)
  6019  		if v_0.Op != OpRISCV64MOVDconst {
  6020  			break
  6021  		}
  6022  		y := auxIntToInt64(v_0.AuxInt)
  6023  		v.reset(OpRISCV64MOVDconst)
  6024  		v.AuxInt = int64ToAuxInt(x | y)
  6025  		return true
  6026  	}
  6027  	// match: (ORI [x] (ORI [y] z))
  6028  	// result: (ORI [x | y] z)
  6029  	for {
  6030  		x := auxIntToInt64(v.AuxInt)
  6031  		if v_0.Op != OpRISCV64ORI {
  6032  			break
  6033  		}
  6034  		y := auxIntToInt64(v_0.AuxInt)
  6035  		z := v_0.Args[0]
  6036  		v.reset(OpRISCV64ORI)
  6037  		v.AuxInt = int64ToAuxInt(x | y)
  6038  		v.AddArg(z)
  6039  		return true
  6040  	}
  6041  	return false
  6042  }
  6043  func rewriteValueRISCV64_OpRISCV64ROL(v *Value) bool {
  6044  	v_1 := v.Args[1]
  6045  	v_0 := v.Args[0]
  6046  	// match: (ROL x (MOVDconst [val]))
  6047  	// result: (RORI [int64(int8(-val)&63)] x)
  6048  	for {
  6049  		x := v_0
  6050  		if v_1.Op != OpRISCV64MOVDconst {
  6051  			break
  6052  		}
  6053  		val := auxIntToInt64(v_1.AuxInt)
  6054  		v.reset(OpRISCV64RORI)
  6055  		v.AuxInt = int64ToAuxInt(int64(int8(-val) & 63))
  6056  		v.AddArg(x)
  6057  		return true
  6058  	}
  6059  	// match: (ROL x (NEG y))
  6060  	// result: (ROR x y)
  6061  	for {
  6062  		x := v_0
  6063  		if v_1.Op != OpRISCV64NEG {
  6064  			break
  6065  		}
  6066  		y := v_1.Args[0]
  6067  		v.reset(OpRISCV64ROR)
  6068  		v.AddArg2(x, y)
  6069  		return true
  6070  	}
  6071  	return false
  6072  }
  6073  func rewriteValueRISCV64_OpRISCV64ROLW(v *Value) bool {
  6074  	v_1 := v.Args[1]
  6075  	v_0 := v.Args[0]
  6076  	// match: (ROLW x (MOVDconst [val]))
  6077  	// result: (RORIW [int64(int8(-val)&31)] x)
  6078  	for {
  6079  		x := v_0
  6080  		if v_1.Op != OpRISCV64MOVDconst {
  6081  			break
  6082  		}
  6083  		val := auxIntToInt64(v_1.AuxInt)
  6084  		v.reset(OpRISCV64RORIW)
  6085  		v.AuxInt = int64ToAuxInt(int64(int8(-val) & 31))
  6086  		v.AddArg(x)
  6087  		return true
  6088  	}
  6089  	// match: (ROLW x (NEG y))
  6090  	// result: (RORW x y)
  6091  	for {
  6092  		x := v_0
  6093  		if v_1.Op != OpRISCV64NEG {
  6094  			break
  6095  		}
  6096  		y := v_1.Args[0]
  6097  		v.reset(OpRISCV64RORW)
  6098  		v.AddArg2(x, y)
  6099  		return true
  6100  	}
  6101  	return false
  6102  }
  6103  func rewriteValueRISCV64_OpRISCV64ROR(v *Value) bool {
  6104  	v_1 := v.Args[1]
  6105  	v_0 := v.Args[0]
  6106  	// match: (ROR x (MOVDconst [val]))
  6107  	// result: (RORI [int64(val&63)] x)
  6108  	for {
  6109  		x := v_0
  6110  		if v_1.Op != OpRISCV64MOVDconst {
  6111  			break
  6112  		}
  6113  		val := auxIntToInt64(v_1.AuxInt)
  6114  		v.reset(OpRISCV64RORI)
  6115  		v.AuxInt = int64ToAuxInt(int64(val & 63))
  6116  		v.AddArg(x)
  6117  		return true
  6118  	}
  6119  	return false
  6120  }
  6121  func rewriteValueRISCV64_OpRISCV64RORW(v *Value) bool {
  6122  	v_1 := v.Args[1]
  6123  	v_0 := v.Args[0]
  6124  	// match: (RORW x (MOVDconst [val]))
  6125  	// result: (RORIW [int64(val&31)] x)
  6126  	for {
  6127  		x := v_0
  6128  		if v_1.Op != OpRISCV64MOVDconst {
  6129  			break
  6130  		}
  6131  		val := auxIntToInt64(v_1.AuxInt)
  6132  		v.reset(OpRISCV64RORIW)
  6133  		v.AuxInt = int64ToAuxInt(int64(val & 31))
  6134  		v.AddArg(x)
  6135  		return true
  6136  	}
  6137  	return false
  6138  }
  6139  func rewriteValueRISCV64_OpRISCV64SEQZ(v *Value) bool {
  6140  	v_0 := v.Args[0]
  6141  	// match: (SEQZ (NEG x))
  6142  	// result: (SEQZ x)
  6143  	for {
  6144  		if v_0.Op != OpRISCV64NEG {
  6145  			break
  6146  		}
  6147  		x := v_0.Args[0]
  6148  		v.reset(OpRISCV64SEQZ)
  6149  		v.AddArg(x)
  6150  		return true
  6151  	}
  6152  	// match: (SEQZ (SEQZ x))
  6153  	// result: (SNEZ x)
  6154  	for {
  6155  		if v_0.Op != OpRISCV64SEQZ {
  6156  			break
  6157  		}
  6158  		x := v_0.Args[0]
  6159  		v.reset(OpRISCV64SNEZ)
  6160  		v.AddArg(x)
  6161  		return true
  6162  	}
  6163  	// match: (SEQZ (SNEZ x))
  6164  	// result: (SEQZ x)
  6165  	for {
  6166  		if v_0.Op != OpRISCV64SNEZ {
  6167  			break
  6168  		}
  6169  		x := v_0.Args[0]
  6170  		v.reset(OpRISCV64SEQZ)
  6171  		v.AddArg(x)
  6172  		return true
  6173  	}
  6174  	return false
  6175  }
  6176  func rewriteValueRISCV64_OpRISCV64SLL(v *Value) bool {
  6177  	v_1 := v.Args[1]
  6178  	v_0 := v.Args[0]
  6179  	// match: (SLL x (MOVDconst [val]))
  6180  	// result: (SLLI [int64(val&63)] x)
  6181  	for {
  6182  		x := v_0
  6183  		if v_1.Op != OpRISCV64MOVDconst {
  6184  			break
  6185  		}
  6186  		val := auxIntToInt64(v_1.AuxInt)
  6187  		v.reset(OpRISCV64SLLI)
  6188  		v.AuxInt = int64ToAuxInt(int64(val & 63))
  6189  		v.AddArg(x)
  6190  		return true
  6191  	}
  6192  	return false
  6193  }
  6194  func rewriteValueRISCV64_OpRISCV64SLLI(v *Value) bool {
  6195  	v_0 := v.Args[0]
  6196  	// match: (SLLI [x] (MOVDconst [y]))
  6197  	// cond: is32Bit(y << uint32(x))
  6198  	// result: (MOVDconst [y << uint32(x)])
  6199  	for {
  6200  		x := auxIntToInt64(v.AuxInt)
  6201  		if v_0.Op != OpRISCV64MOVDconst {
  6202  			break
  6203  		}
  6204  		y := auxIntToInt64(v_0.AuxInt)
  6205  		if !(is32Bit(y << uint32(x))) {
  6206  			break
  6207  		}
  6208  		v.reset(OpRISCV64MOVDconst)
  6209  		v.AuxInt = int64ToAuxInt(y << uint32(x))
  6210  		return true
  6211  	}
  6212  	return false
  6213  }
  6214  func rewriteValueRISCV64_OpRISCV64SLLW(v *Value) bool {
  6215  	v_1 := v.Args[1]
  6216  	v_0 := v.Args[0]
  6217  	// match: (SLLW x (MOVDconst [val]))
  6218  	// result: (SLLIW [int64(val&31)] x)
  6219  	for {
  6220  		x := v_0
  6221  		if v_1.Op != OpRISCV64MOVDconst {
  6222  			break
  6223  		}
  6224  		val := auxIntToInt64(v_1.AuxInt)
  6225  		v.reset(OpRISCV64SLLIW)
  6226  		v.AuxInt = int64ToAuxInt(int64(val & 31))
  6227  		v.AddArg(x)
  6228  		return true
  6229  	}
  6230  	return false
  6231  }
  6232  func rewriteValueRISCV64_OpRISCV64SLT(v *Value) bool {
  6233  	v_1 := v.Args[1]
  6234  	v_0 := v.Args[0]
  6235  	// match: (SLT x (MOVDconst [val]))
  6236  	// cond: val >= -2048 && val <= 2047
  6237  	// result: (SLTI [val] x)
  6238  	for {
  6239  		x := v_0
  6240  		if v_1.Op != OpRISCV64MOVDconst {
  6241  			break
  6242  		}
  6243  		val := auxIntToInt64(v_1.AuxInt)
  6244  		if !(val >= -2048 && val <= 2047) {
  6245  			break
  6246  		}
  6247  		v.reset(OpRISCV64SLTI)
  6248  		v.AuxInt = int64ToAuxInt(val)
  6249  		v.AddArg(x)
  6250  		return true
  6251  	}
  6252  	// match: (SLT x x)
  6253  	// result: (MOVDconst [0])
  6254  	for {
  6255  		x := v_0
  6256  		if x != v_1 {
  6257  			break
  6258  		}
  6259  		v.reset(OpRISCV64MOVDconst)
  6260  		v.AuxInt = int64ToAuxInt(0)
  6261  		return true
  6262  	}
  6263  	return false
  6264  }
  6265  func rewriteValueRISCV64_OpRISCV64SLTI(v *Value) bool {
  6266  	v_0 := v.Args[0]
  6267  	// match: (SLTI [x] (MOVDconst [y]))
  6268  	// result: (MOVDconst [b2i(int64(y) < int64(x))])
  6269  	for {
  6270  		x := auxIntToInt64(v.AuxInt)
  6271  		if v_0.Op != OpRISCV64MOVDconst {
  6272  			break
  6273  		}
  6274  		y := auxIntToInt64(v_0.AuxInt)
  6275  		v.reset(OpRISCV64MOVDconst)
  6276  		v.AuxInt = int64ToAuxInt(b2i(int64(y) < int64(x)))
  6277  		return true
  6278  	}
  6279  	// match: (SLTI [x] (ANDI [y] _))
  6280  	// cond: y >= 0 && int64(y) < int64(x)
  6281  	// result: (MOVDconst [1])
  6282  	for {
  6283  		x := auxIntToInt64(v.AuxInt)
  6284  		if v_0.Op != OpRISCV64ANDI {
  6285  			break
  6286  		}
  6287  		y := auxIntToInt64(v_0.AuxInt)
  6288  		if !(y >= 0 && int64(y) < int64(x)) {
  6289  			break
  6290  		}
  6291  		v.reset(OpRISCV64MOVDconst)
  6292  		v.AuxInt = int64ToAuxInt(1)
  6293  		return true
  6294  	}
  6295  	// match: (SLTI [x] (ORI [y] _))
  6296  	// cond: y >= 0 && int64(y) >= int64(x)
  6297  	// result: (MOVDconst [0])
  6298  	for {
  6299  		x := auxIntToInt64(v.AuxInt)
  6300  		if v_0.Op != OpRISCV64ORI {
  6301  			break
  6302  		}
  6303  		y := auxIntToInt64(v_0.AuxInt)
  6304  		if !(y >= 0 && int64(y) >= int64(x)) {
  6305  			break
  6306  		}
  6307  		v.reset(OpRISCV64MOVDconst)
  6308  		v.AuxInt = int64ToAuxInt(0)
  6309  		return true
  6310  	}
  6311  	return false
  6312  }
  6313  func rewriteValueRISCV64_OpRISCV64SLTIU(v *Value) bool {
  6314  	v_0 := v.Args[0]
  6315  	// match: (SLTIU [x] (MOVDconst [y]))
  6316  	// result: (MOVDconst [b2i(uint64(y) < uint64(x))])
  6317  	for {
  6318  		x := auxIntToInt64(v.AuxInt)
  6319  		if v_0.Op != OpRISCV64MOVDconst {
  6320  			break
  6321  		}
  6322  		y := auxIntToInt64(v_0.AuxInt)
  6323  		v.reset(OpRISCV64MOVDconst)
  6324  		v.AuxInt = int64ToAuxInt(b2i(uint64(y) < uint64(x)))
  6325  		return true
  6326  	}
  6327  	// match: (SLTIU [x] (ANDI [y] _))
  6328  	// cond: y >= 0 && uint64(y) < uint64(x)
  6329  	// result: (MOVDconst [1])
  6330  	for {
  6331  		x := auxIntToInt64(v.AuxInt)
  6332  		if v_0.Op != OpRISCV64ANDI {
  6333  			break
  6334  		}
  6335  		y := auxIntToInt64(v_0.AuxInt)
  6336  		if !(y >= 0 && uint64(y) < uint64(x)) {
  6337  			break
  6338  		}
  6339  		v.reset(OpRISCV64MOVDconst)
  6340  		v.AuxInt = int64ToAuxInt(1)
  6341  		return true
  6342  	}
  6343  	// match: (SLTIU [x] (ORI [y] _))
  6344  	// cond: y >= 0 && uint64(y) >= uint64(x)
  6345  	// result: (MOVDconst [0])
  6346  	for {
  6347  		x := auxIntToInt64(v.AuxInt)
  6348  		if v_0.Op != OpRISCV64ORI {
  6349  			break
  6350  		}
  6351  		y := auxIntToInt64(v_0.AuxInt)
  6352  		if !(y >= 0 && uint64(y) >= uint64(x)) {
  6353  			break
  6354  		}
  6355  		v.reset(OpRISCV64MOVDconst)
  6356  		v.AuxInt = int64ToAuxInt(0)
  6357  		return true
  6358  	}
  6359  	return false
  6360  }
  6361  func rewriteValueRISCV64_OpRISCV64SLTU(v *Value) bool {
  6362  	v_1 := v.Args[1]
  6363  	v_0 := v.Args[0]
  6364  	// match: (SLTU x (MOVDconst [val]))
  6365  	// cond: val >= -2048 && val <= 2047
  6366  	// result: (SLTIU [val] x)
  6367  	for {
  6368  		x := v_0
  6369  		if v_1.Op != OpRISCV64MOVDconst {
  6370  			break
  6371  		}
  6372  		val := auxIntToInt64(v_1.AuxInt)
  6373  		if !(val >= -2048 && val <= 2047) {
  6374  			break
  6375  		}
  6376  		v.reset(OpRISCV64SLTIU)
  6377  		v.AuxInt = int64ToAuxInt(val)
  6378  		v.AddArg(x)
  6379  		return true
  6380  	}
  6381  	// match: (SLTU x x)
  6382  	// result: (MOVDconst [0])
  6383  	for {
  6384  		x := v_0
  6385  		if x != v_1 {
  6386  			break
  6387  		}
  6388  		v.reset(OpRISCV64MOVDconst)
  6389  		v.AuxInt = int64ToAuxInt(0)
  6390  		return true
  6391  	}
  6392  	return false
  6393  }
  6394  func rewriteValueRISCV64_OpRISCV64SNEZ(v *Value) bool {
  6395  	v_0 := v.Args[0]
  6396  	// match: (SNEZ (NEG x))
  6397  	// result: (SNEZ x)
  6398  	for {
  6399  		if v_0.Op != OpRISCV64NEG {
  6400  			break
  6401  		}
  6402  		x := v_0.Args[0]
  6403  		v.reset(OpRISCV64SNEZ)
  6404  		v.AddArg(x)
  6405  		return true
  6406  	}
  6407  	// match: (SNEZ (SEQZ x))
  6408  	// result: (SEQZ x)
  6409  	for {
  6410  		if v_0.Op != OpRISCV64SEQZ {
  6411  			break
  6412  		}
  6413  		x := v_0.Args[0]
  6414  		v.reset(OpRISCV64SEQZ)
  6415  		v.AddArg(x)
  6416  		return true
  6417  	}
  6418  	// match: (SNEZ (SNEZ x))
  6419  	// result: (SNEZ x)
  6420  	for {
  6421  		if v_0.Op != OpRISCV64SNEZ {
  6422  			break
  6423  		}
  6424  		x := v_0.Args[0]
  6425  		v.reset(OpRISCV64SNEZ)
  6426  		v.AddArg(x)
  6427  		return true
  6428  	}
  6429  	return false
  6430  }
  6431  func rewriteValueRISCV64_OpRISCV64SRA(v *Value) bool {
  6432  	v_1 := v.Args[1]
  6433  	v_0 := v.Args[0]
  6434  	// match: (SRA x (MOVDconst [val]))
  6435  	// result: (SRAI [int64(val&63)] x)
  6436  	for {
  6437  		x := v_0
  6438  		if v_1.Op != OpRISCV64MOVDconst {
  6439  			break
  6440  		}
  6441  		val := auxIntToInt64(v_1.AuxInt)
  6442  		v.reset(OpRISCV64SRAI)
  6443  		v.AuxInt = int64ToAuxInt(int64(val & 63))
  6444  		v.AddArg(x)
  6445  		return true
  6446  	}
  6447  	return false
  6448  }
  6449  func rewriteValueRISCV64_OpRISCV64SRAI(v *Value) bool {
  6450  	v_0 := v.Args[0]
  6451  	b := v.Block
  6452  	// match: (SRAI <t> [x] (MOVWreg y))
  6453  	// cond: x >= 0 && x <= 31
  6454  	// result: (SRAIW <t> [int64(x)] y)
  6455  	for {
  6456  		t := v.Type
  6457  		x := auxIntToInt64(v.AuxInt)
  6458  		if v_0.Op != OpRISCV64MOVWreg {
  6459  			break
  6460  		}
  6461  		y := v_0.Args[0]
  6462  		if !(x >= 0 && x <= 31) {
  6463  			break
  6464  		}
  6465  		v.reset(OpRISCV64SRAIW)
  6466  		v.Type = t
  6467  		v.AuxInt = int64ToAuxInt(int64(x))
  6468  		v.AddArg(y)
  6469  		return true
  6470  	}
  6471  	// match: (SRAI <t> [x] (MOVBreg y))
  6472  	// cond: x >= 8
  6473  	// result: (SRAI [63] (SLLI <t> [56] y))
  6474  	for {
  6475  		t := v.Type
  6476  		x := auxIntToInt64(v.AuxInt)
  6477  		if v_0.Op != OpRISCV64MOVBreg {
  6478  			break
  6479  		}
  6480  		y := v_0.Args[0]
  6481  		if !(x >= 8) {
  6482  			break
  6483  		}
  6484  		v.reset(OpRISCV64SRAI)
  6485  		v.AuxInt = int64ToAuxInt(63)
  6486  		v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, t)
  6487  		v0.AuxInt = int64ToAuxInt(56)
  6488  		v0.AddArg(y)
  6489  		v.AddArg(v0)
  6490  		return true
  6491  	}
  6492  	// match: (SRAI <t> [x] (MOVHreg y))
  6493  	// cond: x >= 16
  6494  	// result: (SRAI [63] (SLLI <t> [48] y))
  6495  	for {
  6496  		t := v.Type
  6497  		x := auxIntToInt64(v.AuxInt)
  6498  		if v_0.Op != OpRISCV64MOVHreg {
  6499  			break
  6500  		}
  6501  		y := v_0.Args[0]
  6502  		if !(x >= 16) {
  6503  			break
  6504  		}
  6505  		v.reset(OpRISCV64SRAI)
  6506  		v.AuxInt = int64ToAuxInt(63)
  6507  		v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, t)
  6508  		v0.AuxInt = int64ToAuxInt(48)
  6509  		v0.AddArg(y)
  6510  		v.AddArg(v0)
  6511  		return true
  6512  	}
  6513  	// match: (SRAI <t> [x] (MOVWreg y))
  6514  	// cond: x >= 32
  6515  	// result: (SRAIW [31] y)
  6516  	for {
  6517  		x := auxIntToInt64(v.AuxInt)
  6518  		if v_0.Op != OpRISCV64MOVWreg {
  6519  			break
  6520  		}
  6521  		y := v_0.Args[0]
  6522  		if !(x >= 32) {
  6523  			break
  6524  		}
  6525  		v.reset(OpRISCV64SRAIW)
  6526  		v.AuxInt = int64ToAuxInt(31)
  6527  		v.AddArg(y)
  6528  		return true
  6529  	}
  6530  	// match: (SRAI [x] (MOVDconst [y]))
  6531  	// result: (MOVDconst [int64(y) >> uint32(x)])
  6532  	for {
  6533  		x := auxIntToInt64(v.AuxInt)
  6534  		if v_0.Op != OpRISCV64MOVDconst {
  6535  			break
  6536  		}
  6537  		y := auxIntToInt64(v_0.AuxInt)
  6538  		v.reset(OpRISCV64MOVDconst)
  6539  		v.AuxInt = int64ToAuxInt(int64(y) >> uint32(x))
  6540  		return true
  6541  	}
  6542  	return false
  6543  }
  6544  func rewriteValueRISCV64_OpRISCV64SRAW(v *Value) bool {
  6545  	v_1 := v.Args[1]
  6546  	v_0 := v.Args[0]
  6547  	// match: (SRAW x (MOVDconst [val]))
  6548  	// result: (SRAIW [int64(val&31)] x)
  6549  	for {
  6550  		x := v_0
  6551  		if v_1.Op != OpRISCV64MOVDconst {
  6552  			break
  6553  		}
  6554  		val := auxIntToInt64(v_1.AuxInt)
  6555  		v.reset(OpRISCV64SRAIW)
  6556  		v.AuxInt = int64ToAuxInt(int64(val & 31))
  6557  		v.AddArg(x)
  6558  		return true
  6559  	}
  6560  	return false
  6561  }
  6562  func rewriteValueRISCV64_OpRISCV64SRL(v *Value) bool {
  6563  	v_1 := v.Args[1]
  6564  	v_0 := v.Args[0]
  6565  	// match: (SRL x (MOVDconst [val]))
  6566  	// result: (SRLI [int64(val&63)] x)
  6567  	for {
  6568  		x := v_0
  6569  		if v_1.Op != OpRISCV64MOVDconst {
  6570  			break
  6571  		}
  6572  		val := auxIntToInt64(v_1.AuxInt)
  6573  		v.reset(OpRISCV64SRLI)
  6574  		v.AuxInt = int64ToAuxInt(int64(val & 63))
  6575  		v.AddArg(x)
  6576  		return true
  6577  	}
  6578  	return false
  6579  }
  6580  func rewriteValueRISCV64_OpRISCV64SRLI(v *Value) bool {
  6581  	v_0 := v.Args[0]
  6582  	// match: (SRLI <t> [x] (MOVWUreg y))
  6583  	// cond: x >= 0 && x <= 31
  6584  	// result: (SRLIW <t> [int64(x)] y)
  6585  	for {
  6586  		t := v.Type
  6587  		x := auxIntToInt64(v.AuxInt)
  6588  		if v_0.Op != OpRISCV64MOVWUreg {
  6589  			break
  6590  		}
  6591  		y := v_0.Args[0]
  6592  		if !(x >= 0 && x <= 31) {
  6593  			break
  6594  		}
  6595  		v.reset(OpRISCV64SRLIW)
  6596  		v.Type = t
  6597  		v.AuxInt = int64ToAuxInt(int64(x))
  6598  		v.AddArg(y)
  6599  		return true
  6600  	}
  6601  	// match: (SRLI <t> [x] (MOVBUreg y))
  6602  	// cond: x >= 8
  6603  	// result: (MOVDconst <t> [0])
  6604  	for {
  6605  		t := v.Type
  6606  		x := auxIntToInt64(v.AuxInt)
  6607  		if v_0.Op != OpRISCV64MOVBUreg {
  6608  			break
  6609  		}
  6610  		if !(x >= 8) {
  6611  			break
  6612  		}
  6613  		v.reset(OpRISCV64MOVDconst)
  6614  		v.Type = t
  6615  		v.AuxInt = int64ToAuxInt(0)
  6616  		return true
  6617  	}
  6618  	// match: (SRLI <t> [x] (MOVHUreg y))
  6619  	// cond: x >= 16
  6620  	// result: (MOVDconst <t> [0])
  6621  	for {
  6622  		t := v.Type
  6623  		x := auxIntToInt64(v.AuxInt)
  6624  		if v_0.Op != OpRISCV64MOVHUreg {
  6625  			break
  6626  		}
  6627  		if !(x >= 16) {
  6628  			break
  6629  		}
  6630  		v.reset(OpRISCV64MOVDconst)
  6631  		v.Type = t
  6632  		v.AuxInt = int64ToAuxInt(0)
  6633  		return true
  6634  	}
  6635  	// match: (SRLI <t> [x] (MOVWUreg y))
  6636  	// cond: x >= 32
  6637  	// result: (MOVDconst <t> [0])
  6638  	for {
  6639  		t := v.Type
  6640  		x := auxIntToInt64(v.AuxInt)
  6641  		if v_0.Op != OpRISCV64MOVWUreg {
  6642  			break
  6643  		}
  6644  		if !(x >= 32) {
  6645  			break
  6646  		}
  6647  		v.reset(OpRISCV64MOVDconst)
  6648  		v.Type = t
  6649  		v.AuxInt = int64ToAuxInt(0)
  6650  		return true
  6651  	}
  6652  	// match: (SRLI [x] (MOVDconst [y]))
  6653  	// result: (MOVDconst [int64(uint64(y) >> uint32(x))])
  6654  	for {
  6655  		x := auxIntToInt64(v.AuxInt)
  6656  		if v_0.Op != OpRISCV64MOVDconst {
  6657  			break
  6658  		}
  6659  		y := auxIntToInt64(v_0.AuxInt)
  6660  		v.reset(OpRISCV64MOVDconst)
  6661  		v.AuxInt = int64ToAuxInt(int64(uint64(y) >> uint32(x)))
  6662  		return true
  6663  	}
  6664  	return false
  6665  }
  6666  func rewriteValueRISCV64_OpRISCV64SRLW(v *Value) bool {
  6667  	v_1 := v.Args[1]
  6668  	v_0 := v.Args[0]
  6669  	// match: (SRLW x (MOVDconst [val]))
  6670  	// result: (SRLIW [int64(val&31)] x)
  6671  	for {
  6672  		x := v_0
  6673  		if v_1.Op != OpRISCV64MOVDconst {
  6674  			break
  6675  		}
  6676  		val := auxIntToInt64(v_1.AuxInt)
  6677  		v.reset(OpRISCV64SRLIW)
  6678  		v.AuxInt = int64ToAuxInt(int64(val & 31))
  6679  		v.AddArg(x)
  6680  		return true
  6681  	}
  6682  	return false
  6683  }
  6684  func rewriteValueRISCV64_OpRISCV64SUB(v *Value) bool {
  6685  	v_1 := v.Args[1]
  6686  	v_0 := v.Args[0]
  6687  	b := v.Block
  6688  	// match: (SUB x (MOVDconst [val]))
  6689  	// cond: is32Bit(-val)
  6690  	// result: (ADDI [-val] x)
  6691  	for {
  6692  		x := v_0
  6693  		if v_1.Op != OpRISCV64MOVDconst {
  6694  			break
  6695  		}
  6696  		val := auxIntToInt64(v_1.AuxInt)
  6697  		if !(is32Bit(-val)) {
  6698  			break
  6699  		}
  6700  		v.reset(OpRISCV64ADDI)
  6701  		v.AuxInt = int64ToAuxInt(-val)
  6702  		v.AddArg(x)
  6703  		return true
  6704  	}
  6705  	// match: (SUB <t> (MOVDconst [val]) y)
  6706  	// cond: is32Bit(-val)
  6707  	// result: (NEG (ADDI <t> [-val] y))
  6708  	for {
  6709  		t := v.Type
  6710  		if v_0.Op != OpRISCV64MOVDconst {
  6711  			break
  6712  		}
  6713  		val := auxIntToInt64(v_0.AuxInt)
  6714  		y := v_1
  6715  		if !(is32Bit(-val)) {
  6716  			break
  6717  		}
  6718  		v.reset(OpRISCV64NEG)
  6719  		v0 := b.NewValue0(v.Pos, OpRISCV64ADDI, t)
  6720  		v0.AuxInt = int64ToAuxInt(-val)
  6721  		v0.AddArg(y)
  6722  		v.AddArg(v0)
  6723  		return true
  6724  	}
  6725  	// match: (SUB x (MOVDconst [0]))
  6726  	// result: x
  6727  	for {
  6728  		x := v_0
  6729  		if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
  6730  			break
  6731  		}
  6732  		v.copyOf(x)
  6733  		return true
  6734  	}
  6735  	// match: (SUB (MOVDconst [0]) x)
  6736  	// result: (NEG x)
  6737  	for {
  6738  		if v_0.Op != OpRISCV64MOVDconst || auxIntToInt64(v_0.AuxInt) != 0 {
  6739  			break
  6740  		}
  6741  		x := v_1
  6742  		v.reset(OpRISCV64NEG)
  6743  		v.AddArg(x)
  6744  		return true
  6745  	}
  6746  	return false
  6747  }
  6748  func rewriteValueRISCV64_OpRISCV64SUBW(v *Value) bool {
  6749  	v_1 := v.Args[1]
  6750  	v_0 := v.Args[0]
  6751  	// match: (SUBW x (MOVDconst [0]))
  6752  	// result: (ADDIW [0] x)
  6753  	for {
  6754  		x := v_0
  6755  		if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
  6756  			break
  6757  		}
  6758  		v.reset(OpRISCV64ADDIW)
  6759  		v.AuxInt = int64ToAuxInt(0)
  6760  		v.AddArg(x)
  6761  		return true
  6762  	}
  6763  	// match: (SUBW (MOVDconst [0]) x)
  6764  	// result: (NEGW x)
  6765  	for {
  6766  		if v_0.Op != OpRISCV64MOVDconst || auxIntToInt64(v_0.AuxInt) != 0 {
  6767  			break
  6768  		}
  6769  		x := v_1
  6770  		v.reset(OpRISCV64NEGW)
  6771  		v.AddArg(x)
  6772  		return true
  6773  	}
  6774  	return false
  6775  }
  6776  func rewriteValueRISCV64_OpRISCV64XOR(v *Value) bool {
  6777  	v_1 := v.Args[1]
  6778  	v_0 := v.Args[0]
  6779  	// match: (XOR (MOVDconst [val]) x)
  6780  	// cond: is32Bit(val)
  6781  	// result: (XORI [val] x)
  6782  	for {
  6783  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  6784  			if v_0.Op != OpRISCV64MOVDconst {
  6785  				continue
  6786  			}
  6787  			val := auxIntToInt64(v_0.AuxInt)
  6788  			x := v_1
  6789  			if !(is32Bit(val)) {
  6790  				continue
  6791  			}
  6792  			v.reset(OpRISCV64XORI)
  6793  			v.AuxInt = int64ToAuxInt(val)
  6794  			v.AddArg(x)
  6795  			return true
  6796  		}
  6797  		break
  6798  	}
  6799  	return false
  6800  }
  6801  func rewriteValueRISCV64_OpRotateLeft16(v *Value) bool {
  6802  	v_1 := v.Args[1]
  6803  	v_0 := v.Args[0]
  6804  	b := v.Block
  6805  	typ := &b.Func.Config.Types
  6806  	// match: (RotateLeft16 <t> x y)
  6807  	// result: (OR (SLL <t> x (ANDI [15] <y.Type> y)) (SRL <t> (ZeroExt16to64 x) (ANDI [15] <y.Type> (NEG <y.Type> y))))
  6808  	for {
  6809  		t := v.Type
  6810  		x := v_0
  6811  		y := v_1
  6812  		v.reset(OpRISCV64OR)
  6813  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  6814  		v1 := b.NewValue0(v.Pos, OpRISCV64ANDI, y.Type)
  6815  		v1.AuxInt = int64ToAuxInt(15)
  6816  		v1.AddArg(y)
  6817  		v0.AddArg2(x, v1)
  6818  		v2 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  6819  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6820  		v3.AddArg(x)
  6821  		v4 := b.NewValue0(v.Pos, OpRISCV64ANDI, y.Type)
  6822  		v4.AuxInt = int64ToAuxInt(15)
  6823  		v5 := b.NewValue0(v.Pos, OpRISCV64NEG, y.Type)
  6824  		v5.AddArg(y)
  6825  		v4.AddArg(v5)
  6826  		v2.AddArg2(v3, v4)
  6827  		v.AddArg2(v0, v2)
  6828  		return true
  6829  	}
  6830  }
  6831  func rewriteValueRISCV64_OpRotateLeft8(v *Value) bool {
  6832  	v_1 := v.Args[1]
  6833  	v_0 := v.Args[0]
  6834  	b := v.Block
  6835  	typ := &b.Func.Config.Types
  6836  	// match: (RotateLeft8 <t> x y)
  6837  	// result: (OR (SLL <t> x (ANDI [7] <y.Type> y)) (SRL <t> (ZeroExt8to64 x) (ANDI [7] <y.Type> (NEG <y.Type> y))))
  6838  	for {
  6839  		t := v.Type
  6840  		x := v_0
  6841  		y := v_1
  6842  		v.reset(OpRISCV64OR)
  6843  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  6844  		v1 := b.NewValue0(v.Pos, OpRISCV64ANDI, y.Type)
  6845  		v1.AuxInt = int64ToAuxInt(7)
  6846  		v1.AddArg(y)
  6847  		v0.AddArg2(x, v1)
  6848  		v2 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  6849  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  6850  		v3.AddArg(x)
  6851  		v4 := b.NewValue0(v.Pos, OpRISCV64ANDI, y.Type)
  6852  		v4.AuxInt = int64ToAuxInt(7)
  6853  		v5 := b.NewValue0(v.Pos, OpRISCV64NEG, y.Type)
  6854  		v5.AddArg(y)
  6855  		v4.AddArg(v5)
  6856  		v2.AddArg2(v3, v4)
  6857  		v.AddArg2(v0, v2)
  6858  		return true
  6859  	}
  6860  }
  6861  func rewriteValueRISCV64_OpRsh16Ux16(v *Value) bool {
  6862  	v_1 := v.Args[1]
  6863  	v_0 := v.Args[0]
  6864  	b := v.Block
  6865  	typ := &b.Func.Config.Types
  6866  	// match: (Rsh16Ux16 <t> x y)
  6867  	// cond: !shiftIsBounded(v)
  6868  	// result: (AND (SRL <t> (ZeroExt16to64 x) y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
  6869  	for {
  6870  		t := v.Type
  6871  		x := v_0
  6872  		y := v_1
  6873  		if !(!shiftIsBounded(v)) {
  6874  			break
  6875  		}
  6876  		v.reset(OpRISCV64AND)
  6877  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  6878  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6879  		v1.AddArg(x)
  6880  		v0.AddArg2(v1, y)
  6881  		v2 := b.NewValue0(v.Pos, OpNeg16, t)
  6882  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  6883  		v3.AuxInt = int64ToAuxInt(64)
  6884  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6885  		v4.AddArg(y)
  6886  		v3.AddArg(v4)
  6887  		v2.AddArg(v3)
  6888  		v.AddArg2(v0, v2)
  6889  		return true
  6890  	}
  6891  	// match: (Rsh16Ux16 x y)
  6892  	// cond: shiftIsBounded(v)
  6893  	// result: (SRL (ZeroExt16to64 x) y)
  6894  	for {
  6895  		x := v_0
  6896  		y := v_1
  6897  		if !(shiftIsBounded(v)) {
  6898  			break
  6899  		}
  6900  		v.reset(OpRISCV64SRL)
  6901  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6902  		v0.AddArg(x)
  6903  		v.AddArg2(v0, y)
  6904  		return true
  6905  	}
  6906  	return false
  6907  }
  6908  func rewriteValueRISCV64_OpRsh16Ux32(v *Value) bool {
  6909  	v_1 := v.Args[1]
  6910  	v_0 := v.Args[0]
  6911  	b := v.Block
  6912  	typ := &b.Func.Config.Types
  6913  	// match: (Rsh16Ux32 <t> x y)
  6914  	// cond: !shiftIsBounded(v)
  6915  	// result: (AND (SRL <t> (ZeroExt16to64 x) y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
  6916  	for {
  6917  		t := v.Type
  6918  		x := v_0
  6919  		y := v_1
  6920  		if !(!shiftIsBounded(v)) {
  6921  			break
  6922  		}
  6923  		v.reset(OpRISCV64AND)
  6924  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  6925  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6926  		v1.AddArg(x)
  6927  		v0.AddArg2(v1, y)
  6928  		v2 := b.NewValue0(v.Pos, OpNeg16, t)
  6929  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  6930  		v3.AuxInt = int64ToAuxInt(64)
  6931  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6932  		v4.AddArg(y)
  6933  		v3.AddArg(v4)
  6934  		v2.AddArg(v3)
  6935  		v.AddArg2(v0, v2)
  6936  		return true
  6937  	}
  6938  	// match: (Rsh16Ux32 x y)
  6939  	// cond: shiftIsBounded(v)
  6940  	// result: (SRL (ZeroExt16to64 x) y)
  6941  	for {
  6942  		x := v_0
  6943  		y := v_1
  6944  		if !(shiftIsBounded(v)) {
  6945  			break
  6946  		}
  6947  		v.reset(OpRISCV64SRL)
  6948  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6949  		v0.AddArg(x)
  6950  		v.AddArg2(v0, y)
  6951  		return true
  6952  	}
  6953  	return false
  6954  }
  6955  func rewriteValueRISCV64_OpRsh16Ux64(v *Value) bool {
  6956  	v_1 := v.Args[1]
  6957  	v_0 := v.Args[0]
  6958  	b := v.Block
  6959  	typ := &b.Func.Config.Types
  6960  	// match: (Rsh16Ux64 <t> x y)
  6961  	// cond: !shiftIsBounded(v)
  6962  	// result: (AND (SRL <t> (ZeroExt16to64 x) y) (Neg16 <t> (SLTIU <t> [64] y)))
  6963  	for {
  6964  		t := v.Type
  6965  		x := v_0
  6966  		y := v_1
  6967  		if !(!shiftIsBounded(v)) {
  6968  			break
  6969  		}
  6970  		v.reset(OpRISCV64AND)
  6971  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  6972  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6973  		v1.AddArg(x)
  6974  		v0.AddArg2(v1, y)
  6975  		v2 := b.NewValue0(v.Pos, OpNeg16, t)
  6976  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  6977  		v3.AuxInt = int64ToAuxInt(64)
  6978  		v3.AddArg(y)
  6979  		v2.AddArg(v3)
  6980  		v.AddArg2(v0, v2)
  6981  		return true
  6982  	}
  6983  	// match: (Rsh16Ux64 x y)
  6984  	// cond: shiftIsBounded(v)
  6985  	// result: (SRL (ZeroExt16to64 x) y)
  6986  	for {
  6987  		x := v_0
  6988  		y := v_1
  6989  		if !(shiftIsBounded(v)) {
  6990  			break
  6991  		}
  6992  		v.reset(OpRISCV64SRL)
  6993  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6994  		v0.AddArg(x)
  6995  		v.AddArg2(v0, y)
  6996  		return true
  6997  	}
  6998  	return false
  6999  }
  7000  func rewriteValueRISCV64_OpRsh16Ux8(v *Value) bool {
  7001  	v_1 := v.Args[1]
  7002  	v_0 := v.Args[0]
  7003  	b := v.Block
  7004  	typ := &b.Func.Config.Types
  7005  	// match: (Rsh16Ux8 <t> x y)
  7006  	// cond: !shiftIsBounded(v)
  7007  	// result: (AND (SRL <t> (ZeroExt16to64 x) y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
  7008  	for {
  7009  		t := v.Type
  7010  		x := v_0
  7011  		y := v_1
  7012  		if !(!shiftIsBounded(v)) {
  7013  			break
  7014  		}
  7015  		v.reset(OpRISCV64AND)
  7016  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  7017  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7018  		v1.AddArg(x)
  7019  		v0.AddArg2(v1, y)
  7020  		v2 := b.NewValue0(v.Pos, OpNeg16, t)
  7021  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  7022  		v3.AuxInt = int64ToAuxInt(64)
  7023  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7024  		v4.AddArg(y)
  7025  		v3.AddArg(v4)
  7026  		v2.AddArg(v3)
  7027  		v.AddArg2(v0, v2)
  7028  		return true
  7029  	}
  7030  	// match: (Rsh16Ux8 x y)
  7031  	// cond: shiftIsBounded(v)
  7032  	// result: (SRL (ZeroExt16to64 x) y)
  7033  	for {
  7034  		x := v_0
  7035  		y := v_1
  7036  		if !(shiftIsBounded(v)) {
  7037  			break
  7038  		}
  7039  		v.reset(OpRISCV64SRL)
  7040  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7041  		v0.AddArg(x)
  7042  		v.AddArg2(v0, y)
  7043  		return true
  7044  	}
  7045  	return false
  7046  }
  7047  func rewriteValueRISCV64_OpRsh16x16(v *Value) bool {
  7048  	v_1 := v.Args[1]
  7049  	v_0 := v.Args[0]
  7050  	b := v.Block
  7051  	typ := &b.Func.Config.Types
  7052  	// match: (Rsh16x16 <t> x y)
  7053  	// cond: !shiftIsBounded(v)
  7054  	// result: (SRA <t> (SignExt16to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt16to64 y)))))
  7055  	for {
  7056  		t := v.Type
  7057  		x := v_0
  7058  		y := v_1
  7059  		if !(!shiftIsBounded(v)) {
  7060  			break
  7061  		}
  7062  		v.reset(OpRISCV64SRA)
  7063  		v.Type = t
  7064  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  7065  		v0.AddArg(x)
  7066  		v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  7067  		v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  7068  		v2.AuxInt = int64ToAuxInt(-1)
  7069  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  7070  		v3.AuxInt = int64ToAuxInt(64)
  7071  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7072  		v4.AddArg(y)
  7073  		v3.AddArg(v4)
  7074  		v2.AddArg(v3)
  7075  		v1.AddArg2(y, v2)
  7076  		v.AddArg2(v0, v1)
  7077  		return true
  7078  	}
  7079  	// match: (Rsh16x16 x y)
  7080  	// cond: shiftIsBounded(v)
  7081  	// result: (SRA (SignExt16to64 x) y)
  7082  	for {
  7083  		x := v_0
  7084  		y := v_1
  7085  		if !(shiftIsBounded(v)) {
  7086  			break
  7087  		}
  7088  		v.reset(OpRISCV64SRA)
  7089  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  7090  		v0.AddArg(x)
  7091  		v.AddArg2(v0, y)
  7092  		return true
  7093  	}
  7094  	return false
  7095  }
  7096  func rewriteValueRISCV64_OpRsh16x32(v *Value) bool {
  7097  	v_1 := v.Args[1]
  7098  	v_0 := v.Args[0]
  7099  	b := v.Block
  7100  	typ := &b.Func.Config.Types
  7101  	// match: (Rsh16x32 <t> x y)
  7102  	// cond: !shiftIsBounded(v)
  7103  	// result: (SRA <t> (SignExt16to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt32to64 y)))))
  7104  	for {
  7105  		t := v.Type
  7106  		x := v_0
  7107  		y := v_1
  7108  		if !(!shiftIsBounded(v)) {
  7109  			break
  7110  		}
  7111  		v.reset(OpRISCV64SRA)
  7112  		v.Type = t
  7113  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  7114  		v0.AddArg(x)
  7115  		v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  7116  		v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  7117  		v2.AuxInt = int64ToAuxInt(-1)
  7118  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  7119  		v3.AuxInt = int64ToAuxInt(64)
  7120  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7121  		v4.AddArg(y)
  7122  		v3.AddArg(v4)
  7123  		v2.AddArg(v3)
  7124  		v1.AddArg2(y, v2)
  7125  		v.AddArg2(v0, v1)
  7126  		return true
  7127  	}
  7128  	// match: (Rsh16x32 x y)
  7129  	// cond: shiftIsBounded(v)
  7130  	// result: (SRA (SignExt16to64 x) y)
  7131  	for {
  7132  		x := v_0
  7133  		y := v_1
  7134  		if !(shiftIsBounded(v)) {
  7135  			break
  7136  		}
  7137  		v.reset(OpRISCV64SRA)
  7138  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  7139  		v0.AddArg(x)
  7140  		v.AddArg2(v0, y)
  7141  		return true
  7142  	}
  7143  	return false
  7144  }
  7145  func rewriteValueRISCV64_OpRsh16x64(v *Value) bool {
  7146  	v_1 := v.Args[1]
  7147  	v_0 := v.Args[0]
  7148  	b := v.Block
  7149  	typ := &b.Func.Config.Types
  7150  	// match: (Rsh16x64 <t> x y)
  7151  	// cond: !shiftIsBounded(v)
  7152  	// result: (SRA <t> (SignExt16to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] y))))
  7153  	for {
  7154  		t := v.Type
  7155  		x := v_0
  7156  		y := v_1
  7157  		if !(!shiftIsBounded(v)) {
  7158  			break
  7159  		}
  7160  		v.reset(OpRISCV64SRA)
  7161  		v.Type = t
  7162  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  7163  		v0.AddArg(x)
  7164  		v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  7165  		v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  7166  		v2.AuxInt = int64ToAuxInt(-1)
  7167  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  7168  		v3.AuxInt = int64ToAuxInt(64)
  7169  		v3.AddArg(y)
  7170  		v2.AddArg(v3)
  7171  		v1.AddArg2(y, v2)
  7172  		v.AddArg2(v0, v1)
  7173  		return true
  7174  	}
  7175  	// match: (Rsh16x64 x y)
  7176  	// cond: shiftIsBounded(v)
  7177  	// result: (SRA (SignExt16to64 x) y)
  7178  	for {
  7179  		x := v_0
  7180  		y := v_1
  7181  		if !(shiftIsBounded(v)) {
  7182  			break
  7183  		}
  7184  		v.reset(OpRISCV64SRA)
  7185  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  7186  		v0.AddArg(x)
  7187  		v.AddArg2(v0, y)
  7188  		return true
  7189  	}
  7190  	return false
  7191  }
  7192  func rewriteValueRISCV64_OpRsh16x8(v *Value) bool {
  7193  	v_1 := v.Args[1]
  7194  	v_0 := v.Args[0]
  7195  	b := v.Block
  7196  	typ := &b.Func.Config.Types
  7197  	// match: (Rsh16x8 <t> x y)
  7198  	// cond: !shiftIsBounded(v)
  7199  	// result: (SRA <t> (SignExt16to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt8to64 y)))))
  7200  	for {
  7201  		t := v.Type
  7202  		x := v_0
  7203  		y := v_1
  7204  		if !(!shiftIsBounded(v)) {
  7205  			break
  7206  		}
  7207  		v.reset(OpRISCV64SRA)
  7208  		v.Type = t
  7209  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  7210  		v0.AddArg(x)
  7211  		v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  7212  		v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  7213  		v2.AuxInt = int64ToAuxInt(-1)
  7214  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  7215  		v3.AuxInt = int64ToAuxInt(64)
  7216  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7217  		v4.AddArg(y)
  7218  		v3.AddArg(v4)
  7219  		v2.AddArg(v3)
  7220  		v1.AddArg2(y, v2)
  7221  		v.AddArg2(v0, v1)
  7222  		return true
  7223  	}
  7224  	// match: (Rsh16x8 x y)
  7225  	// cond: shiftIsBounded(v)
  7226  	// result: (SRA (SignExt16to64 x) y)
  7227  	for {
  7228  		x := v_0
  7229  		y := v_1
  7230  		if !(shiftIsBounded(v)) {
  7231  			break
  7232  		}
  7233  		v.reset(OpRISCV64SRA)
  7234  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  7235  		v0.AddArg(x)
  7236  		v.AddArg2(v0, y)
  7237  		return true
  7238  	}
  7239  	return false
  7240  }
  7241  func rewriteValueRISCV64_OpRsh32Ux16(v *Value) bool {
  7242  	v_1 := v.Args[1]
  7243  	v_0 := v.Args[0]
  7244  	b := v.Block
  7245  	typ := &b.Func.Config.Types
  7246  	// match: (Rsh32Ux16 <t> x y)
  7247  	// cond: !shiftIsBounded(v)
  7248  	// result: (AND (SRLW <t> x y) (Neg32 <t> (SLTIU <t> [32] (ZeroExt16to64 y))))
  7249  	for {
  7250  		t := v.Type
  7251  		x := v_0
  7252  		y := v_1
  7253  		if !(!shiftIsBounded(v)) {
  7254  			break
  7255  		}
  7256  		v.reset(OpRISCV64AND)
  7257  		v0 := b.NewValue0(v.Pos, OpRISCV64SRLW, t)
  7258  		v0.AddArg2(x, y)
  7259  		v1 := b.NewValue0(v.Pos, OpNeg32, t)
  7260  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  7261  		v2.AuxInt = int64ToAuxInt(32)
  7262  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7263  		v3.AddArg(y)
  7264  		v2.AddArg(v3)
  7265  		v1.AddArg(v2)
  7266  		v.AddArg2(v0, v1)
  7267  		return true
  7268  	}
  7269  	// match: (Rsh32Ux16 x y)
  7270  	// cond: shiftIsBounded(v)
  7271  	// result: (SRLW x y)
  7272  	for {
  7273  		x := v_0
  7274  		y := v_1
  7275  		if !(shiftIsBounded(v)) {
  7276  			break
  7277  		}
  7278  		v.reset(OpRISCV64SRLW)
  7279  		v.AddArg2(x, y)
  7280  		return true
  7281  	}
  7282  	return false
  7283  }
  7284  func rewriteValueRISCV64_OpRsh32Ux32(v *Value) bool {
  7285  	v_1 := v.Args[1]
  7286  	v_0 := v.Args[0]
  7287  	b := v.Block
  7288  	typ := &b.Func.Config.Types
  7289  	// match: (Rsh32Ux32 <t> x y)
  7290  	// cond: !shiftIsBounded(v)
  7291  	// result: (AND (SRLW <t> x y) (Neg32 <t> (SLTIU <t> [32] (ZeroExt32to64 y))))
  7292  	for {
  7293  		t := v.Type
  7294  		x := v_0
  7295  		y := v_1
  7296  		if !(!shiftIsBounded(v)) {
  7297  			break
  7298  		}
  7299  		v.reset(OpRISCV64AND)
  7300  		v0 := b.NewValue0(v.Pos, OpRISCV64SRLW, t)
  7301  		v0.AddArg2(x, y)
  7302  		v1 := b.NewValue0(v.Pos, OpNeg32, t)
  7303  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  7304  		v2.AuxInt = int64ToAuxInt(32)
  7305  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7306  		v3.AddArg(y)
  7307  		v2.AddArg(v3)
  7308  		v1.AddArg(v2)
  7309  		v.AddArg2(v0, v1)
  7310  		return true
  7311  	}
  7312  	// match: (Rsh32Ux32 x y)
  7313  	// cond: shiftIsBounded(v)
  7314  	// result: (SRLW x y)
  7315  	for {
  7316  		x := v_0
  7317  		y := v_1
  7318  		if !(shiftIsBounded(v)) {
  7319  			break
  7320  		}
  7321  		v.reset(OpRISCV64SRLW)
  7322  		v.AddArg2(x, y)
  7323  		return true
  7324  	}
  7325  	return false
  7326  }
  7327  func rewriteValueRISCV64_OpRsh32Ux64(v *Value) bool {
  7328  	v_1 := v.Args[1]
  7329  	v_0 := v.Args[0]
  7330  	b := v.Block
  7331  	// match: (Rsh32Ux64 <t> x y)
  7332  	// cond: !shiftIsBounded(v)
  7333  	// result: (AND (SRLW <t> x y) (Neg32 <t> (SLTIU <t> [32] y)))
  7334  	for {
  7335  		t := v.Type
  7336  		x := v_0
  7337  		y := v_1
  7338  		if !(!shiftIsBounded(v)) {
  7339  			break
  7340  		}
  7341  		v.reset(OpRISCV64AND)
  7342  		v0 := b.NewValue0(v.Pos, OpRISCV64SRLW, t)
  7343  		v0.AddArg2(x, y)
  7344  		v1 := b.NewValue0(v.Pos, OpNeg32, t)
  7345  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  7346  		v2.AuxInt = int64ToAuxInt(32)
  7347  		v2.AddArg(y)
  7348  		v1.AddArg(v2)
  7349  		v.AddArg2(v0, v1)
  7350  		return true
  7351  	}
  7352  	// match: (Rsh32Ux64 x y)
  7353  	// cond: shiftIsBounded(v)
  7354  	// result: (SRLW x y)
  7355  	for {
  7356  		x := v_0
  7357  		y := v_1
  7358  		if !(shiftIsBounded(v)) {
  7359  			break
  7360  		}
  7361  		v.reset(OpRISCV64SRLW)
  7362  		v.AddArg2(x, y)
  7363  		return true
  7364  	}
  7365  	return false
  7366  }
  7367  func rewriteValueRISCV64_OpRsh32Ux8(v *Value) bool {
  7368  	v_1 := v.Args[1]
  7369  	v_0 := v.Args[0]
  7370  	b := v.Block
  7371  	typ := &b.Func.Config.Types
  7372  	// match: (Rsh32Ux8 <t> x y)
  7373  	// cond: !shiftIsBounded(v)
  7374  	// result: (AND (SRLW <t> x y) (Neg32 <t> (SLTIU <t> [32] (ZeroExt8to64 y))))
  7375  	for {
  7376  		t := v.Type
  7377  		x := v_0
  7378  		y := v_1
  7379  		if !(!shiftIsBounded(v)) {
  7380  			break
  7381  		}
  7382  		v.reset(OpRISCV64AND)
  7383  		v0 := b.NewValue0(v.Pos, OpRISCV64SRLW, t)
  7384  		v0.AddArg2(x, y)
  7385  		v1 := b.NewValue0(v.Pos, OpNeg32, t)
  7386  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  7387  		v2.AuxInt = int64ToAuxInt(32)
  7388  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7389  		v3.AddArg(y)
  7390  		v2.AddArg(v3)
  7391  		v1.AddArg(v2)
  7392  		v.AddArg2(v0, v1)
  7393  		return true
  7394  	}
  7395  	// match: (Rsh32Ux8 x y)
  7396  	// cond: shiftIsBounded(v)
  7397  	// result: (SRLW x y)
  7398  	for {
  7399  		x := v_0
  7400  		y := v_1
  7401  		if !(shiftIsBounded(v)) {
  7402  			break
  7403  		}
  7404  		v.reset(OpRISCV64SRLW)
  7405  		v.AddArg2(x, y)
  7406  		return true
  7407  	}
  7408  	return false
  7409  }
  7410  func rewriteValueRISCV64_OpRsh32x16(v *Value) bool {
  7411  	v_1 := v.Args[1]
  7412  	v_0 := v.Args[0]
  7413  	b := v.Block
  7414  	typ := &b.Func.Config.Types
  7415  	// match: (Rsh32x16 <t> x y)
  7416  	// cond: !shiftIsBounded(v)
  7417  	// result: (SRAW <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [32] (ZeroExt16to64 y)))))
  7418  	for {
  7419  		t := v.Type
  7420  		x := v_0
  7421  		y := v_1
  7422  		if !(!shiftIsBounded(v)) {
  7423  			break
  7424  		}
  7425  		v.reset(OpRISCV64SRAW)
  7426  		v.Type = t
  7427  		v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  7428  		v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  7429  		v1.AuxInt = int64ToAuxInt(-1)
  7430  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  7431  		v2.AuxInt = int64ToAuxInt(32)
  7432  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7433  		v3.AddArg(y)
  7434  		v2.AddArg(v3)
  7435  		v1.AddArg(v2)
  7436  		v0.AddArg2(y, v1)
  7437  		v.AddArg2(x, v0)
  7438  		return true
  7439  	}
  7440  	// match: (Rsh32x16 x y)
  7441  	// cond: shiftIsBounded(v)
  7442  	// result: (SRAW x y)
  7443  	for {
  7444  		x := v_0
  7445  		y := v_1
  7446  		if !(shiftIsBounded(v)) {
  7447  			break
  7448  		}
  7449  		v.reset(OpRISCV64SRAW)
  7450  		v.AddArg2(x, y)
  7451  		return true
  7452  	}
  7453  	return false
  7454  }
  7455  func rewriteValueRISCV64_OpRsh32x32(v *Value) bool {
  7456  	v_1 := v.Args[1]
  7457  	v_0 := v.Args[0]
  7458  	b := v.Block
  7459  	typ := &b.Func.Config.Types
  7460  	// match: (Rsh32x32 <t> x y)
  7461  	// cond: !shiftIsBounded(v)
  7462  	// result: (SRAW <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [32] (ZeroExt32to64 y)))))
  7463  	for {
  7464  		t := v.Type
  7465  		x := v_0
  7466  		y := v_1
  7467  		if !(!shiftIsBounded(v)) {
  7468  			break
  7469  		}
  7470  		v.reset(OpRISCV64SRAW)
  7471  		v.Type = t
  7472  		v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  7473  		v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  7474  		v1.AuxInt = int64ToAuxInt(-1)
  7475  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  7476  		v2.AuxInt = int64ToAuxInt(32)
  7477  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7478  		v3.AddArg(y)
  7479  		v2.AddArg(v3)
  7480  		v1.AddArg(v2)
  7481  		v0.AddArg2(y, v1)
  7482  		v.AddArg2(x, v0)
  7483  		return true
  7484  	}
  7485  	// match: (Rsh32x32 x y)
  7486  	// cond: shiftIsBounded(v)
  7487  	// result: (SRAW x y)
  7488  	for {
  7489  		x := v_0
  7490  		y := v_1
  7491  		if !(shiftIsBounded(v)) {
  7492  			break
  7493  		}
  7494  		v.reset(OpRISCV64SRAW)
  7495  		v.AddArg2(x, y)
  7496  		return true
  7497  	}
  7498  	return false
  7499  }
  7500  func rewriteValueRISCV64_OpRsh32x64(v *Value) bool {
  7501  	v_1 := v.Args[1]
  7502  	v_0 := v.Args[0]
  7503  	b := v.Block
  7504  	// match: (Rsh32x64 <t> x y)
  7505  	// cond: !shiftIsBounded(v)
  7506  	// result: (SRAW <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [32] y))))
  7507  	for {
  7508  		t := v.Type
  7509  		x := v_0
  7510  		y := v_1
  7511  		if !(!shiftIsBounded(v)) {
  7512  			break
  7513  		}
  7514  		v.reset(OpRISCV64SRAW)
  7515  		v.Type = t
  7516  		v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  7517  		v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  7518  		v1.AuxInt = int64ToAuxInt(-1)
  7519  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  7520  		v2.AuxInt = int64ToAuxInt(32)
  7521  		v2.AddArg(y)
  7522  		v1.AddArg(v2)
  7523  		v0.AddArg2(y, v1)
  7524  		v.AddArg2(x, v0)
  7525  		return true
  7526  	}
  7527  	// match: (Rsh32x64 x y)
  7528  	// cond: shiftIsBounded(v)
  7529  	// result: (SRAW x y)
  7530  	for {
  7531  		x := v_0
  7532  		y := v_1
  7533  		if !(shiftIsBounded(v)) {
  7534  			break
  7535  		}
  7536  		v.reset(OpRISCV64SRAW)
  7537  		v.AddArg2(x, y)
  7538  		return true
  7539  	}
  7540  	return false
  7541  }
  7542  func rewriteValueRISCV64_OpRsh32x8(v *Value) bool {
  7543  	v_1 := v.Args[1]
  7544  	v_0 := v.Args[0]
  7545  	b := v.Block
  7546  	typ := &b.Func.Config.Types
  7547  	// match: (Rsh32x8 <t> x y)
  7548  	// cond: !shiftIsBounded(v)
  7549  	// result: (SRAW <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [32] (ZeroExt8to64 y)))))
  7550  	for {
  7551  		t := v.Type
  7552  		x := v_0
  7553  		y := v_1
  7554  		if !(!shiftIsBounded(v)) {
  7555  			break
  7556  		}
  7557  		v.reset(OpRISCV64SRAW)
  7558  		v.Type = t
  7559  		v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  7560  		v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  7561  		v1.AuxInt = int64ToAuxInt(-1)
  7562  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  7563  		v2.AuxInt = int64ToAuxInt(32)
  7564  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7565  		v3.AddArg(y)
  7566  		v2.AddArg(v3)
  7567  		v1.AddArg(v2)
  7568  		v0.AddArg2(y, v1)
  7569  		v.AddArg2(x, v0)
  7570  		return true
  7571  	}
  7572  	// match: (Rsh32x8 x y)
  7573  	// cond: shiftIsBounded(v)
  7574  	// result: (SRAW x y)
  7575  	for {
  7576  		x := v_0
  7577  		y := v_1
  7578  		if !(shiftIsBounded(v)) {
  7579  			break
  7580  		}
  7581  		v.reset(OpRISCV64SRAW)
  7582  		v.AddArg2(x, y)
  7583  		return true
  7584  	}
  7585  	return false
  7586  }
  7587  func rewriteValueRISCV64_OpRsh64Ux16(v *Value) bool {
  7588  	v_1 := v.Args[1]
  7589  	v_0 := v.Args[0]
  7590  	b := v.Block
  7591  	typ := &b.Func.Config.Types
  7592  	// match: (Rsh64Ux16 <t> x y)
  7593  	// cond: !shiftIsBounded(v)
  7594  	// result: (AND (SRL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
  7595  	for {
  7596  		t := v.Type
  7597  		x := v_0
  7598  		y := v_1
  7599  		if !(!shiftIsBounded(v)) {
  7600  			break
  7601  		}
  7602  		v.reset(OpRISCV64AND)
  7603  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  7604  		v0.AddArg2(x, y)
  7605  		v1 := b.NewValue0(v.Pos, OpNeg64, t)
  7606  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  7607  		v2.AuxInt = int64ToAuxInt(64)
  7608  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7609  		v3.AddArg(y)
  7610  		v2.AddArg(v3)
  7611  		v1.AddArg(v2)
  7612  		v.AddArg2(v0, v1)
  7613  		return true
  7614  	}
  7615  	// match: (Rsh64Ux16 x y)
  7616  	// cond: shiftIsBounded(v)
  7617  	// result: (SRL x y)
  7618  	for {
  7619  		x := v_0
  7620  		y := v_1
  7621  		if !(shiftIsBounded(v)) {
  7622  			break
  7623  		}
  7624  		v.reset(OpRISCV64SRL)
  7625  		v.AddArg2(x, y)
  7626  		return true
  7627  	}
  7628  	return false
  7629  }
  7630  func rewriteValueRISCV64_OpRsh64Ux32(v *Value) bool {
  7631  	v_1 := v.Args[1]
  7632  	v_0 := v.Args[0]
  7633  	b := v.Block
  7634  	typ := &b.Func.Config.Types
  7635  	// match: (Rsh64Ux32 <t> x y)
  7636  	// cond: !shiftIsBounded(v)
  7637  	// result: (AND (SRL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
  7638  	for {
  7639  		t := v.Type
  7640  		x := v_0
  7641  		y := v_1
  7642  		if !(!shiftIsBounded(v)) {
  7643  			break
  7644  		}
  7645  		v.reset(OpRISCV64AND)
  7646  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  7647  		v0.AddArg2(x, y)
  7648  		v1 := b.NewValue0(v.Pos, OpNeg64, t)
  7649  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  7650  		v2.AuxInt = int64ToAuxInt(64)
  7651  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7652  		v3.AddArg(y)
  7653  		v2.AddArg(v3)
  7654  		v1.AddArg(v2)
  7655  		v.AddArg2(v0, v1)
  7656  		return true
  7657  	}
  7658  	// match: (Rsh64Ux32 x y)
  7659  	// cond: shiftIsBounded(v)
  7660  	// result: (SRL x y)
  7661  	for {
  7662  		x := v_0
  7663  		y := v_1
  7664  		if !(shiftIsBounded(v)) {
  7665  			break
  7666  		}
  7667  		v.reset(OpRISCV64SRL)
  7668  		v.AddArg2(x, y)
  7669  		return true
  7670  	}
  7671  	return false
  7672  }
  7673  func rewriteValueRISCV64_OpRsh64Ux64(v *Value) bool {
  7674  	v_1 := v.Args[1]
  7675  	v_0 := v.Args[0]
  7676  	b := v.Block
  7677  	// match: (Rsh64Ux64 <t> x y)
  7678  	// cond: !shiftIsBounded(v)
  7679  	// result: (AND (SRL <t> x y) (Neg64 <t> (SLTIU <t> [64] y)))
  7680  	for {
  7681  		t := v.Type
  7682  		x := v_0
  7683  		y := v_1
  7684  		if !(!shiftIsBounded(v)) {
  7685  			break
  7686  		}
  7687  		v.reset(OpRISCV64AND)
  7688  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  7689  		v0.AddArg2(x, y)
  7690  		v1 := b.NewValue0(v.Pos, OpNeg64, t)
  7691  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  7692  		v2.AuxInt = int64ToAuxInt(64)
  7693  		v2.AddArg(y)
  7694  		v1.AddArg(v2)
  7695  		v.AddArg2(v0, v1)
  7696  		return true
  7697  	}
  7698  	// match: (Rsh64Ux64 x y)
  7699  	// cond: shiftIsBounded(v)
  7700  	// result: (SRL x y)
  7701  	for {
  7702  		x := v_0
  7703  		y := v_1
  7704  		if !(shiftIsBounded(v)) {
  7705  			break
  7706  		}
  7707  		v.reset(OpRISCV64SRL)
  7708  		v.AddArg2(x, y)
  7709  		return true
  7710  	}
  7711  	return false
  7712  }
  7713  func rewriteValueRISCV64_OpRsh64Ux8(v *Value) bool {
  7714  	v_1 := v.Args[1]
  7715  	v_0 := v.Args[0]
  7716  	b := v.Block
  7717  	typ := &b.Func.Config.Types
  7718  	// match: (Rsh64Ux8 <t> x y)
  7719  	// cond: !shiftIsBounded(v)
  7720  	// result: (AND (SRL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
  7721  	for {
  7722  		t := v.Type
  7723  		x := v_0
  7724  		y := v_1
  7725  		if !(!shiftIsBounded(v)) {
  7726  			break
  7727  		}
  7728  		v.reset(OpRISCV64AND)
  7729  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  7730  		v0.AddArg2(x, y)
  7731  		v1 := b.NewValue0(v.Pos, OpNeg64, t)
  7732  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  7733  		v2.AuxInt = int64ToAuxInt(64)
  7734  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7735  		v3.AddArg(y)
  7736  		v2.AddArg(v3)
  7737  		v1.AddArg(v2)
  7738  		v.AddArg2(v0, v1)
  7739  		return true
  7740  	}
  7741  	// match: (Rsh64Ux8 x y)
  7742  	// cond: shiftIsBounded(v)
  7743  	// result: (SRL x y)
  7744  	for {
  7745  		x := v_0
  7746  		y := v_1
  7747  		if !(shiftIsBounded(v)) {
  7748  			break
  7749  		}
  7750  		v.reset(OpRISCV64SRL)
  7751  		v.AddArg2(x, y)
  7752  		return true
  7753  	}
  7754  	return false
  7755  }
  7756  func rewriteValueRISCV64_OpRsh64x16(v *Value) bool {
  7757  	v_1 := v.Args[1]
  7758  	v_0 := v.Args[0]
  7759  	b := v.Block
  7760  	typ := &b.Func.Config.Types
  7761  	// match: (Rsh64x16 <t> x y)
  7762  	// cond: !shiftIsBounded(v)
  7763  	// result: (SRA <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt16to64 y)))))
  7764  	for {
  7765  		t := v.Type
  7766  		x := v_0
  7767  		y := v_1
  7768  		if !(!shiftIsBounded(v)) {
  7769  			break
  7770  		}
  7771  		v.reset(OpRISCV64SRA)
  7772  		v.Type = t
  7773  		v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  7774  		v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  7775  		v1.AuxInt = int64ToAuxInt(-1)
  7776  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  7777  		v2.AuxInt = int64ToAuxInt(64)
  7778  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7779  		v3.AddArg(y)
  7780  		v2.AddArg(v3)
  7781  		v1.AddArg(v2)
  7782  		v0.AddArg2(y, v1)
  7783  		v.AddArg2(x, v0)
  7784  		return true
  7785  	}
  7786  	// match: (Rsh64x16 x y)
  7787  	// cond: shiftIsBounded(v)
  7788  	// result: (SRA x y)
  7789  	for {
  7790  		x := v_0
  7791  		y := v_1
  7792  		if !(shiftIsBounded(v)) {
  7793  			break
  7794  		}
  7795  		v.reset(OpRISCV64SRA)
  7796  		v.AddArg2(x, y)
  7797  		return true
  7798  	}
  7799  	return false
  7800  }
  7801  func rewriteValueRISCV64_OpRsh64x32(v *Value) bool {
  7802  	v_1 := v.Args[1]
  7803  	v_0 := v.Args[0]
  7804  	b := v.Block
  7805  	typ := &b.Func.Config.Types
  7806  	// match: (Rsh64x32 <t> x y)
  7807  	// cond: !shiftIsBounded(v)
  7808  	// result: (SRA <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt32to64 y)))))
  7809  	for {
  7810  		t := v.Type
  7811  		x := v_0
  7812  		y := v_1
  7813  		if !(!shiftIsBounded(v)) {
  7814  			break
  7815  		}
  7816  		v.reset(OpRISCV64SRA)
  7817  		v.Type = t
  7818  		v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  7819  		v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  7820  		v1.AuxInt = int64ToAuxInt(-1)
  7821  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  7822  		v2.AuxInt = int64ToAuxInt(64)
  7823  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7824  		v3.AddArg(y)
  7825  		v2.AddArg(v3)
  7826  		v1.AddArg(v2)
  7827  		v0.AddArg2(y, v1)
  7828  		v.AddArg2(x, v0)
  7829  		return true
  7830  	}
  7831  	// match: (Rsh64x32 x y)
  7832  	// cond: shiftIsBounded(v)
  7833  	// result: (SRA x y)
  7834  	for {
  7835  		x := v_0
  7836  		y := v_1
  7837  		if !(shiftIsBounded(v)) {
  7838  			break
  7839  		}
  7840  		v.reset(OpRISCV64SRA)
  7841  		v.AddArg2(x, y)
  7842  		return true
  7843  	}
  7844  	return false
  7845  }
  7846  func rewriteValueRISCV64_OpRsh64x64(v *Value) bool {
  7847  	v_1 := v.Args[1]
  7848  	v_0 := v.Args[0]
  7849  	b := v.Block
  7850  	// match: (Rsh64x64 <t> x y)
  7851  	// cond: !shiftIsBounded(v)
  7852  	// result: (SRA <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] y))))
  7853  	for {
  7854  		t := v.Type
  7855  		x := v_0
  7856  		y := v_1
  7857  		if !(!shiftIsBounded(v)) {
  7858  			break
  7859  		}
  7860  		v.reset(OpRISCV64SRA)
  7861  		v.Type = t
  7862  		v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  7863  		v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  7864  		v1.AuxInt = int64ToAuxInt(-1)
  7865  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  7866  		v2.AuxInt = int64ToAuxInt(64)
  7867  		v2.AddArg(y)
  7868  		v1.AddArg(v2)
  7869  		v0.AddArg2(y, v1)
  7870  		v.AddArg2(x, v0)
  7871  		return true
  7872  	}
  7873  	// match: (Rsh64x64 x y)
  7874  	// cond: shiftIsBounded(v)
  7875  	// result: (SRA x y)
  7876  	for {
  7877  		x := v_0
  7878  		y := v_1
  7879  		if !(shiftIsBounded(v)) {
  7880  			break
  7881  		}
  7882  		v.reset(OpRISCV64SRA)
  7883  		v.AddArg2(x, y)
  7884  		return true
  7885  	}
  7886  	return false
  7887  }
  7888  func rewriteValueRISCV64_OpRsh64x8(v *Value) bool {
  7889  	v_1 := v.Args[1]
  7890  	v_0 := v.Args[0]
  7891  	b := v.Block
  7892  	typ := &b.Func.Config.Types
  7893  	// match: (Rsh64x8 <t> x y)
  7894  	// cond: !shiftIsBounded(v)
  7895  	// result: (SRA <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt8to64 y)))))
  7896  	for {
  7897  		t := v.Type
  7898  		x := v_0
  7899  		y := v_1
  7900  		if !(!shiftIsBounded(v)) {
  7901  			break
  7902  		}
  7903  		v.reset(OpRISCV64SRA)
  7904  		v.Type = t
  7905  		v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  7906  		v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  7907  		v1.AuxInt = int64ToAuxInt(-1)
  7908  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  7909  		v2.AuxInt = int64ToAuxInt(64)
  7910  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7911  		v3.AddArg(y)
  7912  		v2.AddArg(v3)
  7913  		v1.AddArg(v2)
  7914  		v0.AddArg2(y, v1)
  7915  		v.AddArg2(x, v0)
  7916  		return true
  7917  	}
  7918  	// match: (Rsh64x8 x y)
  7919  	// cond: shiftIsBounded(v)
  7920  	// result: (SRA x y)
  7921  	for {
  7922  		x := v_0
  7923  		y := v_1
  7924  		if !(shiftIsBounded(v)) {
  7925  			break
  7926  		}
  7927  		v.reset(OpRISCV64SRA)
  7928  		v.AddArg2(x, y)
  7929  		return true
  7930  	}
  7931  	return false
  7932  }
  7933  func rewriteValueRISCV64_OpRsh8Ux16(v *Value) bool {
  7934  	v_1 := v.Args[1]
  7935  	v_0 := v.Args[0]
  7936  	b := v.Block
  7937  	typ := &b.Func.Config.Types
  7938  	// match: (Rsh8Ux16 <t> x y)
  7939  	// cond: !shiftIsBounded(v)
  7940  	// result: (AND (SRL <t> (ZeroExt8to64 x) y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
  7941  	for {
  7942  		t := v.Type
  7943  		x := v_0
  7944  		y := v_1
  7945  		if !(!shiftIsBounded(v)) {
  7946  			break
  7947  		}
  7948  		v.reset(OpRISCV64AND)
  7949  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  7950  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7951  		v1.AddArg(x)
  7952  		v0.AddArg2(v1, y)
  7953  		v2 := b.NewValue0(v.Pos, OpNeg8, t)
  7954  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  7955  		v3.AuxInt = int64ToAuxInt(64)
  7956  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7957  		v4.AddArg(y)
  7958  		v3.AddArg(v4)
  7959  		v2.AddArg(v3)
  7960  		v.AddArg2(v0, v2)
  7961  		return true
  7962  	}
  7963  	// match: (Rsh8Ux16 x y)
  7964  	// cond: shiftIsBounded(v)
  7965  	// result: (SRL (ZeroExt8to64 x) y)
  7966  	for {
  7967  		x := v_0
  7968  		y := v_1
  7969  		if !(shiftIsBounded(v)) {
  7970  			break
  7971  		}
  7972  		v.reset(OpRISCV64SRL)
  7973  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7974  		v0.AddArg(x)
  7975  		v.AddArg2(v0, y)
  7976  		return true
  7977  	}
  7978  	return false
  7979  }
  7980  func rewriteValueRISCV64_OpRsh8Ux32(v *Value) bool {
  7981  	v_1 := v.Args[1]
  7982  	v_0 := v.Args[0]
  7983  	b := v.Block
  7984  	typ := &b.Func.Config.Types
  7985  	// match: (Rsh8Ux32 <t> x y)
  7986  	// cond: !shiftIsBounded(v)
  7987  	// result: (AND (SRL <t> (ZeroExt8to64 x) y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
  7988  	for {
  7989  		t := v.Type
  7990  		x := v_0
  7991  		y := v_1
  7992  		if !(!shiftIsBounded(v)) {
  7993  			break
  7994  		}
  7995  		v.reset(OpRISCV64AND)
  7996  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  7997  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7998  		v1.AddArg(x)
  7999  		v0.AddArg2(v1, y)
  8000  		v2 := b.NewValue0(v.Pos, OpNeg8, t)
  8001  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  8002  		v3.AuxInt = int64ToAuxInt(64)
  8003  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8004  		v4.AddArg(y)
  8005  		v3.AddArg(v4)
  8006  		v2.AddArg(v3)
  8007  		v.AddArg2(v0, v2)
  8008  		return true
  8009  	}
  8010  	// match: (Rsh8Ux32 x y)
  8011  	// cond: shiftIsBounded(v)
  8012  	// result: (SRL (ZeroExt8to64 x) y)
  8013  	for {
  8014  		x := v_0
  8015  		y := v_1
  8016  		if !(shiftIsBounded(v)) {
  8017  			break
  8018  		}
  8019  		v.reset(OpRISCV64SRL)
  8020  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8021  		v0.AddArg(x)
  8022  		v.AddArg2(v0, y)
  8023  		return true
  8024  	}
  8025  	return false
  8026  }
  8027  func rewriteValueRISCV64_OpRsh8Ux64(v *Value) bool {
  8028  	v_1 := v.Args[1]
  8029  	v_0 := v.Args[0]
  8030  	b := v.Block
  8031  	typ := &b.Func.Config.Types
  8032  	// match: (Rsh8Ux64 <t> x y)
  8033  	// cond: !shiftIsBounded(v)
  8034  	// result: (AND (SRL <t> (ZeroExt8to64 x) y) (Neg8 <t> (SLTIU <t> [64] y)))
  8035  	for {
  8036  		t := v.Type
  8037  		x := v_0
  8038  		y := v_1
  8039  		if !(!shiftIsBounded(v)) {
  8040  			break
  8041  		}
  8042  		v.reset(OpRISCV64AND)
  8043  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  8044  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8045  		v1.AddArg(x)
  8046  		v0.AddArg2(v1, y)
  8047  		v2 := b.NewValue0(v.Pos, OpNeg8, t)
  8048  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  8049  		v3.AuxInt = int64ToAuxInt(64)
  8050  		v3.AddArg(y)
  8051  		v2.AddArg(v3)
  8052  		v.AddArg2(v0, v2)
  8053  		return true
  8054  	}
  8055  	// match: (Rsh8Ux64 x y)
  8056  	// cond: shiftIsBounded(v)
  8057  	// result: (SRL (ZeroExt8to64 x) y)
  8058  	for {
  8059  		x := v_0
  8060  		y := v_1
  8061  		if !(shiftIsBounded(v)) {
  8062  			break
  8063  		}
  8064  		v.reset(OpRISCV64SRL)
  8065  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8066  		v0.AddArg(x)
  8067  		v.AddArg2(v0, y)
  8068  		return true
  8069  	}
  8070  	return false
  8071  }
  8072  func rewriteValueRISCV64_OpRsh8Ux8(v *Value) bool {
  8073  	v_1 := v.Args[1]
  8074  	v_0 := v.Args[0]
  8075  	b := v.Block
  8076  	typ := &b.Func.Config.Types
  8077  	// match: (Rsh8Ux8 <t> x y)
  8078  	// cond: !shiftIsBounded(v)
  8079  	// result: (AND (SRL <t> (ZeroExt8to64 x) y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
  8080  	for {
  8081  		t := v.Type
  8082  		x := v_0
  8083  		y := v_1
  8084  		if !(!shiftIsBounded(v)) {
  8085  			break
  8086  		}
  8087  		v.reset(OpRISCV64AND)
  8088  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  8089  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8090  		v1.AddArg(x)
  8091  		v0.AddArg2(v1, y)
  8092  		v2 := b.NewValue0(v.Pos, OpNeg8, t)
  8093  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  8094  		v3.AuxInt = int64ToAuxInt(64)
  8095  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8096  		v4.AddArg(y)
  8097  		v3.AddArg(v4)
  8098  		v2.AddArg(v3)
  8099  		v.AddArg2(v0, v2)
  8100  		return true
  8101  	}
  8102  	// match: (Rsh8Ux8 x y)
  8103  	// cond: shiftIsBounded(v)
  8104  	// result: (SRL (ZeroExt8to64 x) y)
  8105  	for {
  8106  		x := v_0
  8107  		y := v_1
  8108  		if !(shiftIsBounded(v)) {
  8109  			break
  8110  		}
  8111  		v.reset(OpRISCV64SRL)
  8112  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8113  		v0.AddArg(x)
  8114  		v.AddArg2(v0, y)
  8115  		return true
  8116  	}
  8117  	return false
  8118  }
  8119  func rewriteValueRISCV64_OpRsh8x16(v *Value) bool {
  8120  	v_1 := v.Args[1]
  8121  	v_0 := v.Args[0]
  8122  	b := v.Block
  8123  	typ := &b.Func.Config.Types
  8124  	// match: (Rsh8x16 <t> x y)
  8125  	// cond: !shiftIsBounded(v)
  8126  	// result: (SRA <t> (SignExt8to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt16to64 y)))))
  8127  	for {
  8128  		t := v.Type
  8129  		x := v_0
  8130  		y := v_1
  8131  		if !(!shiftIsBounded(v)) {
  8132  			break
  8133  		}
  8134  		v.reset(OpRISCV64SRA)
  8135  		v.Type = t
  8136  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  8137  		v0.AddArg(x)
  8138  		v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  8139  		v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  8140  		v2.AuxInt = int64ToAuxInt(-1)
  8141  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  8142  		v3.AuxInt = int64ToAuxInt(64)
  8143  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8144  		v4.AddArg(y)
  8145  		v3.AddArg(v4)
  8146  		v2.AddArg(v3)
  8147  		v1.AddArg2(y, v2)
  8148  		v.AddArg2(v0, v1)
  8149  		return true
  8150  	}
  8151  	// match: (Rsh8x16 x y)
  8152  	// cond: shiftIsBounded(v)
  8153  	// result: (SRA (SignExt8to64 x) y)
  8154  	for {
  8155  		x := v_0
  8156  		y := v_1
  8157  		if !(shiftIsBounded(v)) {
  8158  			break
  8159  		}
  8160  		v.reset(OpRISCV64SRA)
  8161  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  8162  		v0.AddArg(x)
  8163  		v.AddArg2(v0, y)
  8164  		return true
  8165  	}
  8166  	return false
  8167  }
  8168  func rewriteValueRISCV64_OpRsh8x32(v *Value) bool {
  8169  	v_1 := v.Args[1]
  8170  	v_0 := v.Args[0]
  8171  	b := v.Block
  8172  	typ := &b.Func.Config.Types
  8173  	// match: (Rsh8x32 <t> x y)
  8174  	// cond: !shiftIsBounded(v)
  8175  	// result: (SRA <t> (SignExt8to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt32to64 y)))))
  8176  	for {
  8177  		t := v.Type
  8178  		x := v_0
  8179  		y := v_1
  8180  		if !(!shiftIsBounded(v)) {
  8181  			break
  8182  		}
  8183  		v.reset(OpRISCV64SRA)
  8184  		v.Type = t
  8185  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  8186  		v0.AddArg(x)
  8187  		v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  8188  		v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  8189  		v2.AuxInt = int64ToAuxInt(-1)
  8190  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  8191  		v3.AuxInt = int64ToAuxInt(64)
  8192  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8193  		v4.AddArg(y)
  8194  		v3.AddArg(v4)
  8195  		v2.AddArg(v3)
  8196  		v1.AddArg2(y, v2)
  8197  		v.AddArg2(v0, v1)
  8198  		return true
  8199  	}
  8200  	// match: (Rsh8x32 x y)
  8201  	// cond: shiftIsBounded(v)
  8202  	// result: (SRA (SignExt8to64 x) y)
  8203  	for {
  8204  		x := v_0
  8205  		y := v_1
  8206  		if !(shiftIsBounded(v)) {
  8207  			break
  8208  		}
  8209  		v.reset(OpRISCV64SRA)
  8210  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  8211  		v0.AddArg(x)
  8212  		v.AddArg2(v0, y)
  8213  		return true
  8214  	}
  8215  	return false
  8216  }
  8217  func rewriteValueRISCV64_OpRsh8x64(v *Value) bool {
  8218  	v_1 := v.Args[1]
  8219  	v_0 := v.Args[0]
  8220  	b := v.Block
  8221  	typ := &b.Func.Config.Types
  8222  	// match: (Rsh8x64 <t> x y)
  8223  	// cond: !shiftIsBounded(v)
  8224  	// result: (SRA <t> (SignExt8to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] y))))
  8225  	for {
  8226  		t := v.Type
  8227  		x := v_0
  8228  		y := v_1
  8229  		if !(!shiftIsBounded(v)) {
  8230  			break
  8231  		}
  8232  		v.reset(OpRISCV64SRA)
  8233  		v.Type = t
  8234  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  8235  		v0.AddArg(x)
  8236  		v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  8237  		v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  8238  		v2.AuxInt = int64ToAuxInt(-1)
  8239  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  8240  		v3.AuxInt = int64ToAuxInt(64)
  8241  		v3.AddArg(y)
  8242  		v2.AddArg(v3)
  8243  		v1.AddArg2(y, v2)
  8244  		v.AddArg2(v0, v1)
  8245  		return true
  8246  	}
  8247  	// match: (Rsh8x64 x y)
  8248  	// cond: shiftIsBounded(v)
  8249  	// result: (SRA (SignExt8to64 x) y)
  8250  	for {
  8251  		x := v_0
  8252  		y := v_1
  8253  		if !(shiftIsBounded(v)) {
  8254  			break
  8255  		}
  8256  		v.reset(OpRISCV64SRA)
  8257  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  8258  		v0.AddArg(x)
  8259  		v.AddArg2(v0, y)
  8260  		return true
  8261  	}
  8262  	return false
  8263  }
  8264  func rewriteValueRISCV64_OpRsh8x8(v *Value) bool {
  8265  	v_1 := v.Args[1]
  8266  	v_0 := v.Args[0]
  8267  	b := v.Block
  8268  	typ := &b.Func.Config.Types
  8269  	// match: (Rsh8x8 <t> x y)
  8270  	// cond: !shiftIsBounded(v)
  8271  	// result: (SRA <t> (SignExt8to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt8to64 y)))))
  8272  	for {
  8273  		t := v.Type
  8274  		x := v_0
  8275  		y := v_1
  8276  		if !(!shiftIsBounded(v)) {
  8277  			break
  8278  		}
  8279  		v.reset(OpRISCV64SRA)
  8280  		v.Type = t
  8281  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  8282  		v0.AddArg(x)
  8283  		v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  8284  		v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  8285  		v2.AuxInt = int64ToAuxInt(-1)
  8286  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  8287  		v3.AuxInt = int64ToAuxInt(64)
  8288  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8289  		v4.AddArg(y)
  8290  		v3.AddArg(v4)
  8291  		v2.AddArg(v3)
  8292  		v1.AddArg2(y, v2)
  8293  		v.AddArg2(v0, v1)
  8294  		return true
  8295  	}
  8296  	// match: (Rsh8x8 x y)
  8297  	// cond: shiftIsBounded(v)
  8298  	// result: (SRA (SignExt8to64 x) y)
  8299  	for {
  8300  		x := v_0
  8301  		y := v_1
  8302  		if !(shiftIsBounded(v)) {
  8303  			break
  8304  		}
  8305  		v.reset(OpRISCV64SRA)
  8306  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  8307  		v0.AddArg(x)
  8308  		v.AddArg2(v0, y)
  8309  		return true
  8310  	}
  8311  	return false
  8312  }
  8313  func rewriteValueRISCV64_OpSelect0(v *Value) bool {
  8314  	v_0 := v.Args[0]
  8315  	b := v.Block
  8316  	typ := &b.Func.Config.Types
  8317  	// match: (Select0 (Add64carry x y c))
  8318  	// result: (ADD (ADD <typ.UInt64> x y) c)
  8319  	for {
  8320  		if v_0.Op != OpAdd64carry {
  8321  			break
  8322  		}
  8323  		c := v_0.Args[2]
  8324  		x := v_0.Args[0]
  8325  		y := v_0.Args[1]
  8326  		v.reset(OpRISCV64ADD)
  8327  		v0 := b.NewValue0(v.Pos, OpRISCV64ADD, typ.UInt64)
  8328  		v0.AddArg2(x, y)
  8329  		v.AddArg2(v0, c)
  8330  		return true
  8331  	}
  8332  	// match: (Select0 (Sub64borrow x y c))
  8333  	// result: (SUB (SUB <typ.UInt64> x y) c)
  8334  	for {
  8335  		if v_0.Op != OpSub64borrow {
  8336  			break
  8337  		}
  8338  		c := v_0.Args[2]
  8339  		x := v_0.Args[0]
  8340  		y := v_0.Args[1]
  8341  		v.reset(OpRISCV64SUB)
  8342  		v0 := b.NewValue0(v.Pos, OpRISCV64SUB, typ.UInt64)
  8343  		v0.AddArg2(x, y)
  8344  		v.AddArg2(v0, c)
  8345  		return true
  8346  	}
  8347  	// match: (Select0 m:(LoweredMuluhilo x y))
  8348  	// cond: m.Uses == 1
  8349  	// result: (MULHU x y)
  8350  	for {
  8351  		m := v_0
  8352  		if m.Op != OpRISCV64LoweredMuluhilo {
  8353  			break
  8354  		}
  8355  		y := m.Args[1]
  8356  		x := m.Args[0]
  8357  		if !(m.Uses == 1) {
  8358  			break
  8359  		}
  8360  		v.reset(OpRISCV64MULHU)
  8361  		v.AddArg2(x, y)
  8362  		return true
  8363  	}
  8364  	return false
  8365  }
  8366  func rewriteValueRISCV64_OpSelect1(v *Value) bool {
  8367  	v_0 := v.Args[0]
  8368  	b := v.Block
  8369  	typ := &b.Func.Config.Types
  8370  	// match: (Select1 (Add64carry x y c))
  8371  	// result: (OR (SLTU <typ.UInt64> s:(ADD <typ.UInt64> x y) x) (SLTU <typ.UInt64> (ADD <typ.UInt64> s c) s))
  8372  	for {
  8373  		if v_0.Op != OpAdd64carry {
  8374  			break
  8375  		}
  8376  		c := v_0.Args[2]
  8377  		x := v_0.Args[0]
  8378  		y := v_0.Args[1]
  8379  		v.reset(OpRISCV64OR)
  8380  		v0 := b.NewValue0(v.Pos, OpRISCV64SLTU, typ.UInt64)
  8381  		s := b.NewValue0(v.Pos, OpRISCV64ADD, typ.UInt64)
  8382  		s.AddArg2(x, y)
  8383  		v0.AddArg2(s, x)
  8384  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTU, typ.UInt64)
  8385  		v3 := b.NewValue0(v.Pos, OpRISCV64ADD, typ.UInt64)
  8386  		v3.AddArg2(s, c)
  8387  		v2.AddArg2(v3, s)
  8388  		v.AddArg2(v0, v2)
  8389  		return true
  8390  	}
  8391  	// match: (Select1 (Sub64borrow x y c))
  8392  	// result: (OR (SLTU <typ.UInt64> x s:(SUB <typ.UInt64> x y)) (SLTU <typ.UInt64> s (SUB <typ.UInt64> s c)))
  8393  	for {
  8394  		if v_0.Op != OpSub64borrow {
  8395  			break
  8396  		}
  8397  		c := v_0.Args[2]
  8398  		x := v_0.Args[0]
  8399  		y := v_0.Args[1]
  8400  		v.reset(OpRISCV64OR)
  8401  		v0 := b.NewValue0(v.Pos, OpRISCV64SLTU, typ.UInt64)
  8402  		s := b.NewValue0(v.Pos, OpRISCV64SUB, typ.UInt64)
  8403  		s.AddArg2(x, y)
  8404  		v0.AddArg2(x, s)
  8405  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTU, typ.UInt64)
  8406  		v3 := b.NewValue0(v.Pos, OpRISCV64SUB, typ.UInt64)
  8407  		v3.AddArg2(s, c)
  8408  		v2.AddArg2(s, v3)
  8409  		v.AddArg2(v0, v2)
  8410  		return true
  8411  	}
  8412  	// match: (Select1 m:(LoweredMuluhilo x y))
  8413  	// cond: m.Uses == 1
  8414  	// result: (MUL x y)
  8415  	for {
  8416  		m := v_0
  8417  		if m.Op != OpRISCV64LoweredMuluhilo {
  8418  			break
  8419  		}
  8420  		y := m.Args[1]
  8421  		x := m.Args[0]
  8422  		if !(m.Uses == 1) {
  8423  			break
  8424  		}
  8425  		v.reset(OpRISCV64MUL)
  8426  		v.AddArg2(x, y)
  8427  		return true
  8428  	}
  8429  	return false
  8430  }
  8431  func rewriteValueRISCV64_OpSlicemask(v *Value) bool {
  8432  	v_0 := v.Args[0]
  8433  	b := v.Block
  8434  	// match: (Slicemask <t> x)
  8435  	// result: (SRAI [63] (NEG <t> x))
  8436  	for {
  8437  		t := v.Type
  8438  		x := v_0
  8439  		v.reset(OpRISCV64SRAI)
  8440  		v.AuxInt = int64ToAuxInt(63)
  8441  		v0 := b.NewValue0(v.Pos, OpRISCV64NEG, t)
  8442  		v0.AddArg(x)
  8443  		v.AddArg(v0)
  8444  		return true
  8445  	}
  8446  }
  8447  func rewriteValueRISCV64_OpStore(v *Value) bool {
  8448  	v_2 := v.Args[2]
  8449  	v_1 := v.Args[1]
  8450  	v_0 := v.Args[0]
  8451  	// match: (Store {t} ptr val mem)
  8452  	// cond: t.Size() == 1
  8453  	// result: (MOVBstore ptr val mem)
  8454  	for {
  8455  		t := auxToType(v.Aux)
  8456  		ptr := v_0
  8457  		val := v_1
  8458  		mem := v_2
  8459  		if !(t.Size() == 1) {
  8460  			break
  8461  		}
  8462  		v.reset(OpRISCV64MOVBstore)
  8463  		v.AddArg3(ptr, val, mem)
  8464  		return true
  8465  	}
  8466  	// match: (Store {t} ptr val mem)
  8467  	// cond: t.Size() == 2
  8468  	// result: (MOVHstore ptr val mem)
  8469  	for {
  8470  		t := auxToType(v.Aux)
  8471  		ptr := v_0
  8472  		val := v_1
  8473  		mem := v_2
  8474  		if !(t.Size() == 2) {
  8475  			break
  8476  		}
  8477  		v.reset(OpRISCV64MOVHstore)
  8478  		v.AddArg3(ptr, val, mem)
  8479  		return true
  8480  	}
  8481  	// match: (Store {t} ptr val mem)
  8482  	// cond: t.Size() == 4 && !t.IsFloat()
  8483  	// result: (MOVWstore ptr val mem)
  8484  	for {
  8485  		t := auxToType(v.Aux)
  8486  		ptr := v_0
  8487  		val := v_1
  8488  		mem := v_2
  8489  		if !(t.Size() == 4 && !t.IsFloat()) {
  8490  			break
  8491  		}
  8492  		v.reset(OpRISCV64MOVWstore)
  8493  		v.AddArg3(ptr, val, mem)
  8494  		return true
  8495  	}
  8496  	// match: (Store {t} ptr val mem)
  8497  	// cond: t.Size() == 8 && !t.IsFloat()
  8498  	// result: (MOVDstore ptr val mem)
  8499  	for {
  8500  		t := auxToType(v.Aux)
  8501  		ptr := v_0
  8502  		val := v_1
  8503  		mem := v_2
  8504  		if !(t.Size() == 8 && !t.IsFloat()) {
  8505  			break
  8506  		}
  8507  		v.reset(OpRISCV64MOVDstore)
  8508  		v.AddArg3(ptr, val, mem)
  8509  		return true
  8510  	}
  8511  	// match: (Store {t} ptr val mem)
  8512  	// cond: t.Size() == 4 && t.IsFloat()
  8513  	// result: (FMOVWstore ptr val mem)
  8514  	for {
  8515  		t := auxToType(v.Aux)
  8516  		ptr := v_0
  8517  		val := v_1
  8518  		mem := v_2
  8519  		if !(t.Size() == 4 && t.IsFloat()) {
  8520  			break
  8521  		}
  8522  		v.reset(OpRISCV64FMOVWstore)
  8523  		v.AddArg3(ptr, val, mem)
  8524  		return true
  8525  	}
  8526  	// match: (Store {t} ptr val mem)
  8527  	// cond: t.Size() == 8 && t.IsFloat()
  8528  	// result: (FMOVDstore ptr val mem)
  8529  	for {
  8530  		t := auxToType(v.Aux)
  8531  		ptr := v_0
  8532  		val := v_1
  8533  		mem := v_2
  8534  		if !(t.Size() == 8 && t.IsFloat()) {
  8535  			break
  8536  		}
  8537  		v.reset(OpRISCV64FMOVDstore)
  8538  		v.AddArg3(ptr, val, mem)
  8539  		return true
  8540  	}
  8541  	return false
  8542  }
  8543  func rewriteValueRISCV64_OpZero(v *Value) bool {
  8544  	v_1 := v.Args[1]
  8545  	v_0 := v.Args[0]
  8546  	b := v.Block
  8547  	config := b.Func.Config
  8548  	typ := &b.Func.Config.Types
  8549  	// match: (Zero [0] _ mem)
  8550  	// result: mem
  8551  	for {
  8552  		if auxIntToInt64(v.AuxInt) != 0 {
  8553  			break
  8554  		}
  8555  		mem := v_1
  8556  		v.copyOf(mem)
  8557  		return true
  8558  	}
  8559  	// match: (Zero [1] ptr mem)
  8560  	// result: (MOVBstore ptr (MOVDconst [0]) mem)
  8561  	for {
  8562  		if auxIntToInt64(v.AuxInt) != 1 {
  8563  			break
  8564  		}
  8565  		ptr := v_0
  8566  		mem := v_1
  8567  		v.reset(OpRISCV64MOVBstore)
  8568  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  8569  		v0.AuxInt = int64ToAuxInt(0)
  8570  		v.AddArg3(ptr, v0, mem)
  8571  		return true
  8572  	}
  8573  	// match: (Zero [2] {t} ptr mem)
  8574  	// cond: t.Alignment()%2 == 0
  8575  	// result: (MOVHstore ptr (MOVDconst [0]) mem)
  8576  	for {
  8577  		if auxIntToInt64(v.AuxInt) != 2 {
  8578  			break
  8579  		}
  8580  		t := auxToType(v.Aux)
  8581  		ptr := v_0
  8582  		mem := v_1
  8583  		if !(t.Alignment()%2 == 0) {
  8584  			break
  8585  		}
  8586  		v.reset(OpRISCV64MOVHstore)
  8587  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  8588  		v0.AuxInt = int64ToAuxInt(0)
  8589  		v.AddArg3(ptr, v0, mem)
  8590  		return true
  8591  	}
  8592  	// match: (Zero [2] ptr mem)
  8593  	// result: (MOVBstore [1] ptr (MOVDconst [0]) (MOVBstore ptr (MOVDconst [0]) mem))
  8594  	for {
  8595  		if auxIntToInt64(v.AuxInt) != 2 {
  8596  			break
  8597  		}
  8598  		ptr := v_0
  8599  		mem := v_1
  8600  		v.reset(OpRISCV64MOVBstore)
  8601  		v.AuxInt = int32ToAuxInt(1)
  8602  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  8603  		v0.AuxInt = int64ToAuxInt(0)
  8604  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  8605  		v1.AddArg3(ptr, v0, mem)
  8606  		v.AddArg3(ptr, v0, v1)
  8607  		return true
  8608  	}
  8609  	// match: (Zero [4] {t} ptr mem)
  8610  	// cond: t.Alignment()%4 == 0
  8611  	// result: (MOVWstore ptr (MOVDconst [0]) mem)
  8612  	for {
  8613  		if auxIntToInt64(v.AuxInt) != 4 {
  8614  			break
  8615  		}
  8616  		t := auxToType(v.Aux)
  8617  		ptr := v_0
  8618  		mem := v_1
  8619  		if !(t.Alignment()%4 == 0) {
  8620  			break
  8621  		}
  8622  		v.reset(OpRISCV64MOVWstore)
  8623  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  8624  		v0.AuxInt = int64ToAuxInt(0)
  8625  		v.AddArg3(ptr, v0, mem)
  8626  		return true
  8627  	}
  8628  	// match: (Zero [4] {t} ptr mem)
  8629  	// cond: t.Alignment()%2 == 0
  8630  	// result: (MOVHstore [2] ptr (MOVDconst [0]) (MOVHstore ptr (MOVDconst [0]) mem))
  8631  	for {
  8632  		if auxIntToInt64(v.AuxInt) != 4 {
  8633  			break
  8634  		}
  8635  		t := auxToType(v.Aux)
  8636  		ptr := v_0
  8637  		mem := v_1
  8638  		if !(t.Alignment()%2 == 0) {
  8639  			break
  8640  		}
  8641  		v.reset(OpRISCV64MOVHstore)
  8642  		v.AuxInt = int32ToAuxInt(2)
  8643  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  8644  		v0.AuxInt = int64ToAuxInt(0)
  8645  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  8646  		v1.AddArg3(ptr, v0, mem)
  8647  		v.AddArg3(ptr, v0, v1)
  8648  		return true
  8649  	}
  8650  	// match: (Zero [4] ptr mem)
  8651  	// result: (MOVBstore [3] ptr (MOVDconst [0]) (MOVBstore [2] ptr (MOVDconst [0]) (MOVBstore [1] ptr (MOVDconst [0]) (MOVBstore ptr (MOVDconst [0]) mem))))
  8652  	for {
  8653  		if auxIntToInt64(v.AuxInt) != 4 {
  8654  			break
  8655  		}
  8656  		ptr := v_0
  8657  		mem := v_1
  8658  		v.reset(OpRISCV64MOVBstore)
  8659  		v.AuxInt = int32ToAuxInt(3)
  8660  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  8661  		v0.AuxInt = int64ToAuxInt(0)
  8662  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  8663  		v1.AuxInt = int32ToAuxInt(2)
  8664  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  8665  		v2.AuxInt = int32ToAuxInt(1)
  8666  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  8667  		v3.AddArg3(ptr, v0, mem)
  8668  		v2.AddArg3(ptr, v0, v3)
  8669  		v1.AddArg3(ptr, v0, v2)
  8670  		v.AddArg3(ptr, v0, v1)
  8671  		return true
  8672  	}
  8673  	// match: (Zero [8] {t} ptr mem)
  8674  	// cond: t.Alignment()%8 == 0
  8675  	// result: (MOVDstore ptr (MOVDconst [0]) mem)
  8676  	for {
  8677  		if auxIntToInt64(v.AuxInt) != 8 {
  8678  			break
  8679  		}
  8680  		t := auxToType(v.Aux)
  8681  		ptr := v_0
  8682  		mem := v_1
  8683  		if !(t.Alignment()%8 == 0) {
  8684  			break
  8685  		}
  8686  		v.reset(OpRISCV64MOVDstore)
  8687  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  8688  		v0.AuxInt = int64ToAuxInt(0)
  8689  		v.AddArg3(ptr, v0, mem)
  8690  		return true
  8691  	}
  8692  	// match: (Zero [8] {t} ptr mem)
  8693  	// cond: t.Alignment()%4 == 0
  8694  	// result: (MOVWstore [4] ptr (MOVDconst [0]) (MOVWstore ptr (MOVDconst [0]) mem))
  8695  	for {
  8696  		if auxIntToInt64(v.AuxInt) != 8 {
  8697  			break
  8698  		}
  8699  		t := auxToType(v.Aux)
  8700  		ptr := v_0
  8701  		mem := v_1
  8702  		if !(t.Alignment()%4 == 0) {
  8703  			break
  8704  		}
  8705  		v.reset(OpRISCV64MOVWstore)
  8706  		v.AuxInt = int32ToAuxInt(4)
  8707  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  8708  		v0.AuxInt = int64ToAuxInt(0)
  8709  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVWstore, types.TypeMem)
  8710  		v1.AddArg3(ptr, v0, mem)
  8711  		v.AddArg3(ptr, v0, v1)
  8712  		return true
  8713  	}
  8714  	// match: (Zero [8] {t} ptr mem)
  8715  	// cond: t.Alignment()%2 == 0
  8716  	// result: (MOVHstore [6] ptr (MOVDconst [0]) (MOVHstore [4] ptr (MOVDconst [0]) (MOVHstore [2] ptr (MOVDconst [0]) (MOVHstore ptr (MOVDconst [0]) mem))))
  8717  	for {
  8718  		if auxIntToInt64(v.AuxInt) != 8 {
  8719  			break
  8720  		}
  8721  		t := auxToType(v.Aux)
  8722  		ptr := v_0
  8723  		mem := v_1
  8724  		if !(t.Alignment()%2 == 0) {
  8725  			break
  8726  		}
  8727  		v.reset(OpRISCV64MOVHstore)
  8728  		v.AuxInt = int32ToAuxInt(6)
  8729  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  8730  		v0.AuxInt = int64ToAuxInt(0)
  8731  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  8732  		v1.AuxInt = int32ToAuxInt(4)
  8733  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  8734  		v2.AuxInt = int32ToAuxInt(2)
  8735  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  8736  		v3.AddArg3(ptr, v0, mem)
  8737  		v2.AddArg3(ptr, v0, v3)
  8738  		v1.AddArg3(ptr, v0, v2)
  8739  		v.AddArg3(ptr, v0, v1)
  8740  		return true
  8741  	}
  8742  	// match: (Zero [3] ptr mem)
  8743  	// result: (MOVBstore [2] ptr (MOVDconst [0]) (MOVBstore [1] ptr (MOVDconst [0]) (MOVBstore ptr (MOVDconst [0]) mem)))
  8744  	for {
  8745  		if auxIntToInt64(v.AuxInt) != 3 {
  8746  			break
  8747  		}
  8748  		ptr := v_0
  8749  		mem := v_1
  8750  		v.reset(OpRISCV64MOVBstore)
  8751  		v.AuxInt = int32ToAuxInt(2)
  8752  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  8753  		v0.AuxInt = int64ToAuxInt(0)
  8754  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  8755  		v1.AuxInt = int32ToAuxInt(1)
  8756  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  8757  		v2.AddArg3(ptr, v0, mem)
  8758  		v1.AddArg3(ptr, v0, v2)
  8759  		v.AddArg3(ptr, v0, v1)
  8760  		return true
  8761  	}
  8762  	// match: (Zero [6] {t} ptr mem)
  8763  	// cond: t.Alignment()%2 == 0
  8764  	// result: (MOVHstore [4] ptr (MOVDconst [0]) (MOVHstore [2] ptr (MOVDconst [0]) (MOVHstore ptr (MOVDconst [0]) mem)))
  8765  	for {
  8766  		if auxIntToInt64(v.AuxInt) != 6 {
  8767  			break
  8768  		}
  8769  		t := auxToType(v.Aux)
  8770  		ptr := v_0
  8771  		mem := v_1
  8772  		if !(t.Alignment()%2 == 0) {
  8773  			break
  8774  		}
  8775  		v.reset(OpRISCV64MOVHstore)
  8776  		v.AuxInt = int32ToAuxInt(4)
  8777  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  8778  		v0.AuxInt = int64ToAuxInt(0)
  8779  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  8780  		v1.AuxInt = int32ToAuxInt(2)
  8781  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  8782  		v2.AddArg3(ptr, v0, mem)
  8783  		v1.AddArg3(ptr, v0, v2)
  8784  		v.AddArg3(ptr, v0, v1)
  8785  		return true
  8786  	}
  8787  	// match: (Zero [12] {t} ptr mem)
  8788  	// cond: t.Alignment()%4 == 0
  8789  	// result: (MOVWstore [8] ptr (MOVDconst [0]) (MOVWstore [4] ptr (MOVDconst [0]) (MOVWstore ptr (MOVDconst [0]) mem)))
  8790  	for {
  8791  		if auxIntToInt64(v.AuxInt) != 12 {
  8792  			break
  8793  		}
  8794  		t := auxToType(v.Aux)
  8795  		ptr := v_0
  8796  		mem := v_1
  8797  		if !(t.Alignment()%4 == 0) {
  8798  			break
  8799  		}
  8800  		v.reset(OpRISCV64MOVWstore)
  8801  		v.AuxInt = int32ToAuxInt(8)
  8802  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  8803  		v0.AuxInt = int64ToAuxInt(0)
  8804  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVWstore, types.TypeMem)
  8805  		v1.AuxInt = int32ToAuxInt(4)
  8806  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVWstore, types.TypeMem)
  8807  		v2.AddArg3(ptr, v0, mem)
  8808  		v1.AddArg3(ptr, v0, v2)
  8809  		v.AddArg3(ptr, v0, v1)
  8810  		return true
  8811  	}
  8812  	// match: (Zero [16] {t} ptr mem)
  8813  	// cond: t.Alignment()%8 == 0
  8814  	// result: (MOVDstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem))
  8815  	for {
  8816  		if auxIntToInt64(v.AuxInt) != 16 {
  8817  			break
  8818  		}
  8819  		t := auxToType(v.Aux)
  8820  		ptr := v_0
  8821  		mem := v_1
  8822  		if !(t.Alignment()%8 == 0) {
  8823  			break
  8824  		}
  8825  		v.reset(OpRISCV64MOVDstore)
  8826  		v.AuxInt = int32ToAuxInt(8)
  8827  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  8828  		v0.AuxInt = int64ToAuxInt(0)
  8829  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  8830  		v1.AddArg3(ptr, v0, mem)
  8831  		v.AddArg3(ptr, v0, v1)
  8832  		return true
  8833  	}
  8834  	// match: (Zero [24] {t} ptr mem)
  8835  	// cond: t.Alignment()%8 == 0
  8836  	// result: (MOVDstore [16] ptr (MOVDconst [0]) (MOVDstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem)))
  8837  	for {
  8838  		if auxIntToInt64(v.AuxInt) != 24 {
  8839  			break
  8840  		}
  8841  		t := auxToType(v.Aux)
  8842  		ptr := v_0
  8843  		mem := v_1
  8844  		if !(t.Alignment()%8 == 0) {
  8845  			break
  8846  		}
  8847  		v.reset(OpRISCV64MOVDstore)
  8848  		v.AuxInt = int32ToAuxInt(16)
  8849  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  8850  		v0.AuxInt = int64ToAuxInt(0)
  8851  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  8852  		v1.AuxInt = int32ToAuxInt(8)
  8853  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  8854  		v2.AddArg3(ptr, v0, mem)
  8855  		v1.AddArg3(ptr, v0, v2)
  8856  		v.AddArg3(ptr, v0, v1)
  8857  		return true
  8858  	}
  8859  	// match: (Zero [32] {t} ptr mem)
  8860  	// cond: t.Alignment()%8 == 0
  8861  	// result: (MOVDstore [24] ptr (MOVDconst [0]) (MOVDstore [16] ptr (MOVDconst [0]) (MOVDstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem))))
  8862  	for {
  8863  		if auxIntToInt64(v.AuxInt) != 32 {
  8864  			break
  8865  		}
  8866  		t := auxToType(v.Aux)
  8867  		ptr := v_0
  8868  		mem := v_1
  8869  		if !(t.Alignment()%8 == 0) {
  8870  			break
  8871  		}
  8872  		v.reset(OpRISCV64MOVDstore)
  8873  		v.AuxInt = int32ToAuxInt(24)
  8874  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  8875  		v0.AuxInt = int64ToAuxInt(0)
  8876  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  8877  		v1.AuxInt = int32ToAuxInt(16)
  8878  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  8879  		v2.AuxInt = int32ToAuxInt(8)
  8880  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  8881  		v3.AddArg3(ptr, v0, mem)
  8882  		v2.AddArg3(ptr, v0, v3)
  8883  		v1.AddArg3(ptr, v0, v2)
  8884  		v.AddArg3(ptr, v0, v1)
  8885  		return true
  8886  	}
  8887  	// match: (Zero [s] {t} ptr mem)
  8888  	// cond: s%8 == 0 && s <= 8*128 && t.Alignment()%8 == 0 && !config.noDuffDevice
  8889  	// result: (DUFFZERO [8 * (128 - s/8)] ptr mem)
  8890  	for {
  8891  		s := auxIntToInt64(v.AuxInt)
  8892  		t := auxToType(v.Aux)
  8893  		ptr := v_0
  8894  		mem := v_1
  8895  		if !(s%8 == 0 && s <= 8*128 && t.Alignment()%8 == 0 && !config.noDuffDevice) {
  8896  			break
  8897  		}
  8898  		v.reset(OpRISCV64DUFFZERO)
  8899  		v.AuxInt = int64ToAuxInt(8 * (128 - s/8))
  8900  		v.AddArg2(ptr, mem)
  8901  		return true
  8902  	}
  8903  	// match: (Zero [s] {t} ptr mem)
  8904  	// result: (LoweredZero [t.Alignment()] ptr (ADD <ptr.Type> ptr (MOVDconst [s-moveSize(t.Alignment(), config)])) mem)
  8905  	for {
  8906  		s := auxIntToInt64(v.AuxInt)
  8907  		t := auxToType(v.Aux)
  8908  		ptr := v_0
  8909  		mem := v_1
  8910  		v.reset(OpRISCV64LoweredZero)
  8911  		v.AuxInt = int64ToAuxInt(t.Alignment())
  8912  		v0 := b.NewValue0(v.Pos, OpRISCV64ADD, ptr.Type)
  8913  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  8914  		v1.AuxInt = int64ToAuxInt(s - moveSize(t.Alignment(), config))
  8915  		v0.AddArg2(ptr, v1)
  8916  		v.AddArg3(ptr, v0, mem)
  8917  		return true
  8918  	}
  8919  }
  8920  func rewriteBlockRISCV64(b *Block) bool {
  8921  	typ := &b.Func.Config.Types
  8922  	switch b.Kind {
  8923  	case BlockRISCV64BEQ:
  8924  		// match: (BEQ (MOVDconst [0]) cond yes no)
  8925  		// result: (BEQZ cond yes no)
  8926  		for b.Controls[0].Op == OpRISCV64MOVDconst {
  8927  			v_0 := b.Controls[0]
  8928  			if auxIntToInt64(v_0.AuxInt) != 0 {
  8929  				break
  8930  			}
  8931  			cond := b.Controls[1]
  8932  			b.resetWithControl(BlockRISCV64BEQZ, cond)
  8933  			return true
  8934  		}
  8935  		// match: (BEQ cond (MOVDconst [0]) yes no)
  8936  		// result: (BEQZ cond yes no)
  8937  		for b.Controls[1].Op == OpRISCV64MOVDconst {
  8938  			cond := b.Controls[0]
  8939  			v_1 := b.Controls[1]
  8940  			if auxIntToInt64(v_1.AuxInt) != 0 {
  8941  				break
  8942  			}
  8943  			b.resetWithControl(BlockRISCV64BEQZ, cond)
  8944  			return true
  8945  		}
  8946  	case BlockRISCV64BEQZ:
  8947  		// match: (BEQZ (SEQZ x) yes no)
  8948  		// result: (BNEZ x yes no)
  8949  		for b.Controls[0].Op == OpRISCV64SEQZ {
  8950  			v_0 := b.Controls[0]
  8951  			x := v_0.Args[0]
  8952  			b.resetWithControl(BlockRISCV64BNEZ, x)
  8953  			return true
  8954  		}
  8955  		// match: (BEQZ (SNEZ x) yes no)
  8956  		// result: (BEQZ x yes no)
  8957  		for b.Controls[0].Op == OpRISCV64SNEZ {
  8958  			v_0 := b.Controls[0]
  8959  			x := v_0.Args[0]
  8960  			b.resetWithControl(BlockRISCV64BEQZ, x)
  8961  			return true
  8962  		}
  8963  		// match: (BEQZ (NEG x) yes no)
  8964  		// result: (BEQZ x yes no)
  8965  		for b.Controls[0].Op == OpRISCV64NEG {
  8966  			v_0 := b.Controls[0]
  8967  			x := v_0.Args[0]
  8968  			b.resetWithControl(BlockRISCV64BEQZ, x)
  8969  			return true
  8970  		}
  8971  		// match: (BEQZ (FNES <t> x y) yes no)
  8972  		// result: (BNEZ (FEQS <t> x y) yes no)
  8973  		for b.Controls[0].Op == OpRISCV64FNES {
  8974  			v_0 := b.Controls[0]
  8975  			t := v_0.Type
  8976  			_ = v_0.Args[1]
  8977  			v_0_0 := v_0.Args[0]
  8978  			v_0_1 := v_0.Args[1]
  8979  			for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
  8980  				x := v_0_0
  8981  				y := v_0_1
  8982  				v0 := b.NewValue0(v_0.Pos, OpRISCV64FEQS, t)
  8983  				v0.AddArg2(x, y)
  8984  				b.resetWithControl(BlockRISCV64BNEZ, v0)
  8985  				return true
  8986  			}
  8987  		}
  8988  		// match: (BEQZ (FNED <t> x y) yes no)
  8989  		// result: (BNEZ (FEQD <t> x y) yes no)
  8990  		for b.Controls[0].Op == OpRISCV64FNED {
  8991  			v_0 := b.Controls[0]
  8992  			t := v_0.Type
  8993  			_ = v_0.Args[1]
  8994  			v_0_0 := v_0.Args[0]
  8995  			v_0_1 := v_0.Args[1]
  8996  			for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
  8997  				x := v_0_0
  8998  				y := v_0_1
  8999  				v0 := b.NewValue0(v_0.Pos, OpRISCV64FEQD, t)
  9000  				v0.AddArg2(x, y)
  9001  				b.resetWithControl(BlockRISCV64BNEZ, v0)
  9002  				return true
  9003  			}
  9004  		}
  9005  		// match: (BEQZ (SUB x y) yes no)
  9006  		// result: (BEQ x y yes no)
  9007  		for b.Controls[0].Op == OpRISCV64SUB {
  9008  			v_0 := b.Controls[0]
  9009  			y := v_0.Args[1]
  9010  			x := v_0.Args[0]
  9011  			b.resetWithControl2(BlockRISCV64BEQ, x, y)
  9012  			return true
  9013  		}
  9014  		// match: (BEQZ (SLT x y) yes no)
  9015  		// result: (BGE x y yes no)
  9016  		for b.Controls[0].Op == OpRISCV64SLT {
  9017  			v_0 := b.Controls[0]
  9018  			y := v_0.Args[1]
  9019  			x := v_0.Args[0]
  9020  			b.resetWithControl2(BlockRISCV64BGE, x, y)
  9021  			return true
  9022  		}
  9023  		// match: (BEQZ (SLTU x y) yes no)
  9024  		// result: (BGEU x y yes no)
  9025  		for b.Controls[0].Op == OpRISCV64SLTU {
  9026  			v_0 := b.Controls[0]
  9027  			y := v_0.Args[1]
  9028  			x := v_0.Args[0]
  9029  			b.resetWithControl2(BlockRISCV64BGEU, x, y)
  9030  			return true
  9031  		}
  9032  		// match: (BEQZ (SLTI [x] y) yes no)
  9033  		// result: (BGE y (MOVDconst [x]) yes no)
  9034  		for b.Controls[0].Op == OpRISCV64SLTI {
  9035  			v_0 := b.Controls[0]
  9036  			x := auxIntToInt64(v_0.AuxInt)
  9037  			y := v_0.Args[0]
  9038  			v0 := b.NewValue0(b.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9039  			v0.AuxInt = int64ToAuxInt(x)
  9040  			b.resetWithControl2(BlockRISCV64BGE, y, v0)
  9041  			return true
  9042  		}
  9043  		// match: (BEQZ (SLTIU [x] y) yes no)
  9044  		// result: (BGEU y (MOVDconst [x]) yes no)
  9045  		for b.Controls[0].Op == OpRISCV64SLTIU {
  9046  			v_0 := b.Controls[0]
  9047  			x := auxIntToInt64(v_0.AuxInt)
  9048  			y := v_0.Args[0]
  9049  			v0 := b.NewValue0(b.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9050  			v0.AuxInt = int64ToAuxInt(x)
  9051  			b.resetWithControl2(BlockRISCV64BGEU, y, v0)
  9052  			return true
  9053  		}
  9054  	case BlockRISCV64BGE:
  9055  		// match: (BGE (MOVDconst [0]) cond yes no)
  9056  		// result: (BLEZ cond yes no)
  9057  		for b.Controls[0].Op == OpRISCV64MOVDconst {
  9058  			v_0 := b.Controls[0]
  9059  			if auxIntToInt64(v_0.AuxInt) != 0 {
  9060  				break
  9061  			}
  9062  			cond := b.Controls[1]
  9063  			b.resetWithControl(BlockRISCV64BLEZ, cond)
  9064  			return true
  9065  		}
  9066  		// match: (BGE cond (MOVDconst [0]) yes no)
  9067  		// result: (BGEZ cond yes no)
  9068  		for b.Controls[1].Op == OpRISCV64MOVDconst {
  9069  			cond := b.Controls[0]
  9070  			v_1 := b.Controls[1]
  9071  			if auxIntToInt64(v_1.AuxInt) != 0 {
  9072  				break
  9073  			}
  9074  			b.resetWithControl(BlockRISCV64BGEZ, cond)
  9075  			return true
  9076  		}
  9077  	case BlockRISCV64BLT:
  9078  		// match: (BLT (MOVDconst [0]) cond yes no)
  9079  		// result: (BGTZ cond yes no)
  9080  		for b.Controls[0].Op == OpRISCV64MOVDconst {
  9081  			v_0 := b.Controls[0]
  9082  			if auxIntToInt64(v_0.AuxInt) != 0 {
  9083  				break
  9084  			}
  9085  			cond := b.Controls[1]
  9086  			b.resetWithControl(BlockRISCV64BGTZ, cond)
  9087  			return true
  9088  		}
  9089  		// match: (BLT cond (MOVDconst [0]) yes no)
  9090  		// result: (BLTZ cond yes no)
  9091  		for b.Controls[1].Op == OpRISCV64MOVDconst {
  9092  			cond := b.Controls[0]
  9093  			v_1 := b.Controls[1]
  9094  			if auxIntToInt64(v_1.AuxInt) != 0 {
  9095  				break
  9096  			}
  9097  			b.resetWithControl(BlockRISCV64BLTZ, cond)
  9098  			return true
  9099  		}
  9100  	case BlockRISCV64BNE:
  9101  		// match: (BNE (MOVDconst [0]) cond yes no)
  9102  		// result: (BNEZ cond yes no)
  9103  		for b.Controls[0].Op == OpRISCV64MOVDconst {
  9104  			v_0 := b.Controls[0]
  9105  			if auxIntToInt64(v_0.AuxInt) != 0 {
  9106  				break
  9107  			}
  9108  			cond := b.Controls[1]
  9109  			b.resetWithControl(BlockRISCV64BNEZ, cond)
  9110  			return true
  9111  		}
  9112  		// match: (BNE cond (MOVDconst [0]) yes no)
  9113  		// result: (BNEZ cond yes no)
  9114  		for b.Controls[1].Op == OpRISCV64MOVDconst {
  9115  			cond := b.Controls[0]
  9116  			v_1 := b.Controls[1]
  9117  			if auxIntToInt64(v_1.AuxInt) != 0 {
  9118  				break
  9119  			}
  9120  			b.resetWithControl(BlockRISCV64BNEZ, cond)
  9121  			return true
  9122  		}
  9123  	case BlockRISCV64BNEZ:
  9124  		// match: (BNEZ (SEQZ x) yes no)
  9125  		// result: (BEQZ x yes no)
  9126  		for b.Controls[0].Op == OpRISCV64SEQZ {
  9127  			v_0 := b.Controls[0]
  9128  			x := v_0.Args[0]
  9129  			b.resetWithControl(BlockRISCV64BEQZ, x)
  9130  			return true
  9131  		}
  9132  		// match: (BNEZ (SNEZ x) yes no)
  9133  		// result: (BNEZ x yes no)
  9134  		for b.Controls[0].Op == OpRISCV64SNEZ {
  9135  			v_0 := b.Controls[0]
  9136  			x := v_0.Args[0]
  9137  			b.resetWithControl(BlockRISCV64BNEZ, x)
  9138  			return true
  9139  		}
  9140  		// match: (BNEZ (NEG x) yes no)
  9141  		// result: (BNEZ x yes no)
  9142  		for b.Controls[0].Op == OpRISCV64NEG {
  9143  			v_0 := b.Controls[0]
  9144  			x := v_0.Args[0]
  9145  			b.resetWithControl(BlockRISCV64BNEZ, x)
  9146  			return true
  9147  		}
  9148  		// match: (BNEZ (FNES <t> x y) yes no)
  9149  		// result: (BEQZ (FEQS <t> x y) yes no)
  9150  		for b.Controls[0].Op == OpRISCV64FNES {
  9151  			v_0 := b.Controls[0]
  9152  			t := v_0.Type
  9153  			_ = v_0.Args[1]
  9154  			v_0_0 := v_0.Args[0]
  9155  			v_0_1 := v_0.Args[1]
  9156  			for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
  9157  				x := v_0_0
  9158  				y := v_0_1
  9159  				v0 := b.NewValue0(v_0.Pos, OpRISCV64FEQS, t)
  9160  				v0.AddArg2(x, y)
  9161  				b.resetWithControl(BlockRISCV64BEQZ, v0)
  9162  				return true
  9163  			}
  9164  		}
  9165  		// match: (BNEZ (FNED <t> x y) yes no)
  9166  		// result: (BEQZ (FEQD <t> x y) yes no)
  9167  		for b.Controls[0].Op == OpRISCV64FNED {
  9168  			v_0 := b.Controls[0]
  9169  			t := v_0.Type
  9170  			_ = v_0.Args[1]
  9171  			v_0_0 := v_0.Args[0]
  9172  			v_0_1 := v_0.Args[1]
  9173  			for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
  9174  				x := v_0_0
  9175  				y := v_0_1
  9176  				v0 := b.NewValue0(v_0.Pos, OpRISCV64FEQD, t)
  9177  				v0.AddArg2(x, y)
  9178  				b.resetWithControl(BlockRISCV64BEQZ, v0)
  9179  				return true
  9180  			}
  9181  		}
  9182  		// match: (BNEZ (SUB x y) yes no)
  9183  		// result: (BNE x y yes no)
  9184  		for b.Controls[0].Op == OpRISCV64SUB {
  9185  			v_0 := b.Controls[0]
  9186  			y := v_0.Args[1]
  9187  			x := v_0.Args[0]
  9188  			b.resetWithControl2(BlockRISCV64BNE, x, y)
  9189  			return true
  9190  		}
  9191  		// match: (BNEZ (SLT x y) yes no)
  9192  		// result: (BLT x y yes no)
  9193  		for b.Controls[0].Op == OpRISCV64SLT {
  9194  			v_0 := b.Controls[0]
  9195  			y := v_0.Args[1]
  9196  			x := v_0.Args[0]
  9197  			b.resetWithControl2(BlockRISCV64BLT, x, y)
  9198  			return true
  9199  		}
  9200  		// match: (BNEZ (SLTU x y) yes no)
  9201  		// result: (BLTU x y yes no)
  9202  		for b.Controls[0].Op == OpRISCV64SLTU {
  9203  			v_0 := b.Controls[0]
  9204  			y := v_0.Args[1]
  9205  			x := v_0.Args[0]
  9206  			b.resetWithControl2(BlockRISCV64BLTU, x, y)
  9207  			return true
  9208  		}
  9209  		// match: (BNEZ (SLTI [x] y) yes no)
  9210  		// result: (BLT y (MOVDconst [x]) yes no)
  9211  		for b.Controls[0].Op == OpRISCV64SLTI {
  9212  			v_0 := b.Controls[0]
  9213  			x := auxIntToInt64(v_0.AuxInt)
  9214  			y := v_0.Args[0]
  9215  			v0 := b.NewValue0(b.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9216  			v0.AuxInt = int64ToAuxInt(x)
  9217  			b.resetWithControl2(BlockRISCV64BLT, y, v0)
  9218  			return true
  9219  		}
  9220  		// match: (BNEZ (SLTIU [x] y) yes no)
  9221  		// result: (BLTU y (MOVDconst [x]) yes no)
  9222  		for b.Controls[0].Op == OpRISCV64SLTIU {
  9223  			v_0 := b.Controls[0]
  9224  			x := auxIntToInt64(v_0.AuxInt)
  9225  			y := v_0.Args[0]
  9226  			v0 := b.NewValue0(b.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9227  			v0.AuxInt = int64ToAuxInt(x)
  9228  			b.resetWithControl2(BlockRISCV64BLTU, y, v0)
  9229  			return true
  9230  		}
  9231  	case BlockIf:
  9232  		// match: (If cond yes no)
  9233  		// result: (BNEZ (MOVBUreg <typ.UInt64> cond) yes no)
  9234  		for {
  9235  			cond := b.Controls[0]
  9236  			v0 := b.NewValue0(cond.Pos, OpRISCV64MOVBUreg, typ.UInt64)
  9237  			v0.AddArg(cond)
  9238  			b.resetWithControl(BlockRISCV64BNEZ, v0)
  9239  			return true
  9240  		}
  9241  	}
  9242  	return false
  9243  }
  9244  

View as plain text