Source file src/cmd/compile/internal/ssa/rewriteRISCV64.go

     1  // Code generated from _gen/RISCV64.rules using 'go generate'; DO NOT EDIT.
     2  
     3  package ssa
     4  
     5  import "internal/buildcfg"
     6  import "math"
     7  import "math/bits"
     8  import "cmd/compile/internal/types"
     9  
    10  func rewriteValueRISCV64(v *Value) bool {
    11  	switch v.Op {
    12  	case OpAbs:
    13  		v.Op = OpRISCV64FABSD
    14  		return true
    15  	case OpAdd16:
    16  		v.Op = OpRISCV64ADD
    17  		return true
    18  	case OpAdd32:
    19  		v.Op = OpRISCV64ADD
    20  		return true
    21  	case OpAdd32F:
    22  		v.Op = OpRISCV64FADDS
    23  		return true
    24  	case OpAdd64:
    25  		v.Op = OpRISCV64ADD
    26  		return true
    27  	case OpAdd64F:
    28  		v.Op = OpRISCV64FADDD
    29  		return true
    30  	case OpAdd8:
    31  		v.Op = OpRISCV64ADD
    32  		return true
    33  	case OpAddPtr:
    34  		v.Op = OpRISCV64ADD
    35  		return true
    36  	case OpAddr:
    37  		return rewriteValueRISCV64_OpAddr(v)
    38  	case OpAnd16:
    39  		v.Op = OpRISCV64AND
    40  		return true
    41  	case OpAnd32:
    42  		v.Op = OpRISCV64AND
    43  		return true
    44  	case OpAnd64:
    45  		v.Op = OpRISCV64AND
    46  		return true
    47  	case OpAnd8:
    48  		v.Op = OpRISCV64AND
    49  		return true
    50  	case OpAndB:
    51  		v.Op = OpRISCV64AND
    52  		return true
    53  	case OpAtomicAdd32:
    54  		v.Op = OpRISCV64LoweredAtomicAdd32
    55  		return true
    56  	case OpAtomicAdd64:
    57  		v.Op = OpRISCV64LoweredAtomicAdd64
    58  		return true
    59  	case OpAtomicAnd32:
    60  		v.Op = OpRISCV64LoweredAtomicAnd32
    61  		return true
    62  	case OpAtomicAnd8:
    63  		return rewriteValueRISCV64_OpAtomicAnd8(v)
    64  	case OpAtomicCompareAndSwap32:
    65  		return rewriteValueRISCV64_OpAtomicCompareAndSwap32(v)
    66  	case OpAtomicCompareAndSwap64:
    67  		v.Op = OpRISCV64LoweredAtomicCas64
    68  		return true
    69  	case OpAtomicExchange32:
    70  		v.Op = OpRISCV64LoweredAtomicExchange32
    71  		return true
    72  	case OpAtomicExchange64:
    73  		v.Op = OpRISCV64LoweredAtomicExchange64
    74  		return true
    75  	case OpAtomicLoad32:
    76  		v.Op = OpRISCV64LoweredAtomicLoad32
    77  		return true
    78  	case OpAtomicLoad64:
    79  		v.Op = OpRISCV64LoweredAtomicLoad64
    80  		return true
    81  	case OpAtomicLoad8:
    82  		v.Op = OpRISCV64LoweredAtomicLoad8
    83  		return true
    84  	case OpAtomicLoadPtr:
    85  		v.Op = OpRISCV64LoweredAtomicLoad64
    86  		return true
    87  	case OpAtomicOr32:
    88  		v.Op = OpRISCV64LoweredAtomicOr32
    89  		return true
    90  	case OpAtomicOr8:
    91  		return rewriteValueRISCV64_OpAtomicOr8(v)
    92  	case OpAtomicStore32:
    93  		v.Op = OpRISCV64LoweredAtomicStore32
    94  		return true
    95  	case OpAtomicStore64:
    96  		v.Op = OpRISCV64LoweredAtomicStore64
    97  		return true
    98  	case OpAtomicStore8:
    99  		v.Op = OpRISCV64LoweredAtomicStore8
   100  		return true
   101  	case OpAtomicStorePtrNoWB:
   102  		v.Op = OpRISCV64LoweredAtomicStore64
   103  		return true
   104  	case OpAvg64u:
   105  		return rewriteValueRISCV64_OpAvg64u(v)
   106  	case OpBitLen16:
   107  		return rewriteValueRISCV64_OpBitLen16(v)
   108  	case OpBitLen32:
   109  		return rewriteValueRISCV64_OpBitLen32(v)
   110  	case OpBitLen64:
   111  		return rewriteValueRISCV64_OpBitLen64(v)
   112  	case OpBitLen8:
   113  		return rewriteValueRISCV64_OpBitLen8(v)
   114  	case OpBswap16:
   115  		return rewriteValueRISCV64_OpBswap16(v)
   116  	case OpBswap32:
   117  		return rewriteValueRISCV64_OpBswap32(v)
   118  	case OpBswap64:
   119  		v.Op = OpRISCV64REV8
   120  		return true
   121  	case OpClosureCall:
   122  		v.Op = OpRISCV64CALLclosure
   123  		return true
   124  	case OpCom16:
   125  		v.Op = OpRISCV64NOT
   126  		return true
   127  	case OpCom32:
   128  		v.Op = OpRISCV64NOT
   129  		return true
   130  	case OpCom64:
   131  		v.Op = OpRISCV64NOT
   132  		return true
   133  	case OpCom8:
   134  		v.Op = OpRISCV64NOT
   135  		return true
   136  	case OpConst16:
   137  		return rewriteValueRISCV64_OpConst16(v)
   138  	case OpConst32:
   139  		return rewriteValueRISCV64_OpConst32(v)
   140  	case OpConst32F:
   141  		v.Op = OpRISCV64FMOVFconst
   142  		return true
   143  	case OpConst64:
   144  		return rewriteValueRISCV64_OpConst64(v)
   145  	case OpConst64F:
   146  		v.Op = OpRISCV64FMOVDconst
   147  		return true
   148  	case OpConst8:
   149  		return rewriteValueRISCV64_OpConst8(v)
   150  	case OpConstBool:
   151  		return rewriteValueRISCV64_OpConstBool(v)
   152  	case OpConstNil:
   153  		return rewriteValueRISCV64_OpConstNil(v)
   154  	case OpCopysign:
   155  		v.Op = OpRISCV64FSGNJD
   156  		return true
   157  	case OpCtz16:
   158  		return rewriteValueRISCV64_OpCtz16(v)
   159  	case OpCtz16NonZero:
   160  		v.Op = OpCtz64
   161  		return true
   162  	case OpCtz32:
   163  		v.Op = OpRISCV64CTZW
   164  		return true
   165  	case OpCtz32NonZero:
   166  		v.Op = OpCtz64
   167  		return true
   168  	case OpCtz64:
   169  		v.Op = OpRISCV64CTZ
   170  		return true
   171  	case OpCtz64NonZero:
   172  		v.Op = OpCtz64
   173  		return true
   174  	case OpCtz8:
   175  		return rewriteValueRISCV64_OpCtz8(v)
   176  	case OpCtz8NonZero:
   177  		v.Op = OpCtz64
   178  		return true
   179  	case OpCvt32Fto32:
   180  		v.Op = OpRISCV64FCVTWS
   181  		return true
   182  	case OpCvt32Fto64:
   183  		v.Op = OpRISCV64FCVTLS
   184  		return true
   185  	case OpCvt32Fto64F:
   186  		v.Op = OpRISCV64FCVTDS
   187  		return true
   188  	case OpCvt32to32F:
   189  		v.Op = OpRISCV64FCVTSW
   190  		return true
   191  	case OpCvt32to64F:
   192  		v.Op = OpRISCV64FCVTDW
   193  		return true
   194  	case OpCvt64Fto32:
   195  		v.Op = OpRISCV64FCVTWD
   196  		return true
   197  	case OpCvt64Fto32F:
   198  		v.Op = OpRISCV64FCVTSD
   199  		return true
   200  	case OpCvt64Fto64:
   201  		v.Op = OpRISCV64FCVTLD
   202  		return true
   203  	case OpCvt64to32F:
   204  		v.Op = OpRISCV64FCVTSL
   205  		return true
   206  	case OpCvt64to64F:
   207  		v.Op = OpRISCV64FCVTDL
   208  		return true
   209  	case OpCvtBoolToUint8:
   210  		v.Op = OpCopy
   211  		return true
   212  	case OpDiv16:
   213  		return rewriteValueRISCV64_OpDiv16(v)
   214  	case OpDiv16u:
   215  		return rewriteValueRISCV64_OpDiv16u(v)
   216  	case OpDiv32:
   217  		return rewriteValueRISCV64_OpDiv32(v)
   218  	case OpDiv32F:
   219  		v.Op = OpRISCV64FDIVS
   220  		return true
   221  	case OpDiv32u:
   222  		v.Op = OpRISCV64DIVUW
   223  		return true
   224  	case OpDiv64:
   225  		return rewriteValueRISCV64_OpDiv64(v)
   226  	case OpDiv64F:
   227  		v.Op = OpRISCV64FDIVD
   228  		return true
   229  	case OpDiv64u:
   230  		v.Op = OpRISCV64DIVU
   231  		return true
   232  	case OpDiv8:
   233  		return rewriteValueRISCV64_OpDiv8(v)
   234  	case OpDiv8u:
   235  		return rewriteValueRISCV64_OpDiv8u(v)
   236  	case OpEq16:
   237  		return rewriteValueRISCV64_OpEq16(v)
   238  	case OpEq32:
   239  		return rewriteValueRISCV64_OpEq32(v)
   240  	case OpEq32F:
   241  		v.Op = OpRISCV64FEQS
   242  		return true
   243  	case OpEq64:
   244  		return rewriteValueRISCV64_OpEq64(v)
   245  	case OpEq64F:
   246  		v.Op = OpRISCV64FEQD
   247  		return true
   248  	case OpEq8:
   249  		return rewriteValueRISCV64_OpEq8(v)
   250  	case OpEqB:
   251  		return rewriteValueRISCV64_OpEqB(v)
   252  	case OpEqPtr:
   253  		return rewriteValueRISCV64_OpEqPtr(v)
   254  	case OpFMA:
   255  		v.Op = OpRISCV64FMADDD
   256  		return true
   257  	case OpGetCallerPC:
   258  		v.Op = OpRISCV64LoweredGetCallerPC
   259  		return true
   260  	case OpGetCallerSP:
   261  		v.Op = OpRISCV64LoweredGetCallerSP
   262  		return true
   263  	case OpGetClosurePtr:
   264  		v.Op = OpRISCV64LoweredGetClosurePtr
   265  		return true
   266  	case OpHmul32:
   267  		return rewriteValueRISCV64_OpHmul32(v)
   268  	case OpHmul32u:
   269  		return rewriteValueRISCV64_OpHmul32u(v)
   270  	case OpHmul64:
   271  		v.Op = OpRISCV64MULH
   272  		return true
   273  	case OpHmul64u:
   274  		v.Op = OpRISCV64MULHU
   275  		return true
   276  	case OpInterCall:
   277  		v.Op = OpRISCV64CALLinter
   278  		return true
   279  	case OpIsInBounds:
   280  		v.Op = OpLess64U
   281  		return true
   282  	case OpIsNonNil:
   283  		v.Op = OpRISCV64SNEZ
   284  		return true
   285  	case OpIsSliceInBounds:
   286  		v.Op = OpLeq64U
   287  		return true
   288  	case OpLeq16:
   289  		return rewriteValueRISCV64_OpLeq16(v)
   290  	case OpLeq16U:
   291  		return rewriteValueRISCV64_OpLeq16U(v)
   292  	case OpLeq32:
   293  		return rewriteValueRISCV64_OpLeq32(v)
   294  	case OpLeq32F:
   295  		v.Op = OpRISCV64FLES
   296  		return true
   297  	case OpLeq32U:
   298  		return rewriteValueRISCV64_OpLeq32U(v)
   299  	case OpLeq64:
   300  		return rewriteValueRISCV64_OpLeq64(v)
   301  	case OpLeq64F:
   302  		v.Op = OpRISCV64FLED
   303  		return true
   304  	case OpLeq64U:
   305  		return rewriteValueRISCV64_OpLeq64U(v)
   306  	case OpLeq8:
   307  		return rewriteValueRISCV64_OpLeq8(v)
   308  	case OpLeq8U:
   309  		return rewriteValueRISCV64_OpLeq8U(v)
   310  	case OpLess16:
   311  		return rewriteValueRISCV64_OpLess16(v)
   312  	case OpLess16U:
   313  		return rewriteValueRISCV64_OpLess16U(v)
   314  	case OpLess32:
   315  		return rewriteValueRISCV64_OpLess32(v)
   316  	case OpLess32F:
   317  		v.Op = OpRISCV64FLTS
   318  		return true
   319  	case OpLess32U:
   320  		return rewriteValueRISCV64_OpLess32U(v)
   321  	case OpLess64:
   322  		v.Op = OpRISCV64SLT
   323  		return true
   324  	case OpLess64F:
   325  		v.Op = OpRISCV64FLTD
   326  		return true
   327  	case OpLess64U:
   328  		v.Op = OpRISCV64SLTU
   329  		return true
   330  	case OpLess8:
   331  		return rewriteValueRISCV64_OpLess8(v)
   332  	case OpLess8U:
   333  		return rewriteValueRISCV64_OpLess8U(v)
   334  	case OpLoad:
   335  		return rewriteValueRISCV64_OpLoad(v)
   336  	case OpLocalAddr:
   337  		return rewriteValueRISCV64_OpLocalAddr(v)
   338  	case OpLsh16x16:
   339  		return rewriteValueRISCV64_OpLsh16x16(v)
   340  	case OpLsh16x32:
   341  		return rewriteValueRISCV64_OpLsh16x32(v)
   342  	case OpLsh16x64:
   343  		return rewriteValueRISCV64_OpLsh16x64(v)
   344  	case OpLsh16x8:
   345  		return rewriteValueRISCV64_OpLsh16x8(v)
   346  	case OpLsh32x16:
   347  		return rewriteValueRISCV64_OpLsh32x16(v)
   348  	case OpLsh32x32:
   349  		return rewriteValueRISCV64_OpLsh32x32(v)
   350  	case OpLsh32x64:
   351  		return rewriteValueRISCV64_OpLsh32x64(v)
   352  	case OpLsh32x8:
   353  		return rewriteValueRISCV64_OpLsh32x8(v)
   354  	case OpLsh64x16:
   355  		return rewriteValueRISCV64_OpLsh64x16(v)
   356  	case OpLsh64x32:
   357  		return rewriteValueRISCV64_OpLsh64x32(v)
   358  	case OpLsh64x64:
   359  		return rewriteValueRISCV64_OpLsh64x64(v)
   360  	case OpLsh64x8:
   361  		return rewriteValueRISCV64_OpLsh64x8(v)
   362  	case OpLsh8x16:
   363  		return rewriteValueRISCV64_OpLsh8x16(v)
   364  	case OpLsh8x32:
   365  		return rewriteValueRISCV64_OpLsh8x32(v)
   366  	case OpLsh8x64:
   367  		return rewriteValueRISCV64_OpLsh8x64(v)
   368  	case OpLsh8x8:
   369  		return rewriteValueRISCV64_OpLsh8x8(v)
   370  	case OpMax32F:
   371  		v.Op = OpRISCV64LoweredFMAXS
   372  		return true
   373  	case OpMax64:
   374  		return rewriteValueRISCV64_OpMax64(v)
   375  	case OpMax64F:
   376  		v.Op = OpRISCV64LoweredFMAXD
   377  		return true
   378  	case OpMax64u:
   379  		return rewriteValueRISCV64_OpMax64u(v)
   380  	case OpMin32F:
   381  		v.Op = OpRISCV64LoweredFMINS
   382  		return true
   383  	case OpMin64:
   384  		return rewriteValueRISCV64_OpMin64(v)
   385  	case OpMin64F:
   386  		v.Op = OpRISCV64LoweredFMIND
   387  		return true
   388  	case OpMin64u:
   389  		return rewriteValueRISCV64_OpMin64u(v)
   390  	case OpMod16:
   391  		return rewriteValueRISCV64_OpMod16(v)
   392  	case OpMod16u:
   393  		return rewriteValueRISCV64_OpMod16u(v)
   394  	case OpMod32:
   395  		return rewriteValueRISCV64_OpMod32(v)
   396  	case OpMod32u:
   397  		v.Op = OpRISCV64REMUW
   398  		return true
   399  	case OpMod64:
   400  		return rewriteValueRISCV64_OpMod64(v)
   401  	case OpMod64u:
   402  		v.Op = OpRISCV64REMU
   403  		return true
   404  	case OpMod8:
   405  		return rewriteValueRISCV64_OpMod8(v)
   406  	case OpMod8u:
   407  		return rewriteValueRISCV64_OpMod8u(v)
   408  	case OpMove:
   409  		return rewriteValueRISCV64_OpMove(v)
   410  	case OpMul16:
   411  		v.Op = OpRISCV64MULW
   412  		return true
   413  	case OpMul32:
   414  		v.Op = OpRISCV64MULW
   415  		return true
   416  	case OpMul32F:
   417  		v.Op = OpRISCV64FMULS
   418  		return true
   419  	case OpMul64:
   420  		v.Op = OpRISCV64MUL
   421  		return true
   422  	case OpMul64F:
   423  		v.Op = OpRISCV64FMULD
   424  		return true
   425  	case OpMul64uhilo:
   426  		v.Op = OpRISCV64LoweredMuluhilo
   427  		return true
   428  	case OpMul64uover:
   429  		v.Op = OpRISCV64LoweredMuluover
   430  		return true
   431  	case OpMul8:
   432  		v.Op = OpRISCV64MULW
   433  		return true
   434  	case OpNeg16:
   435  		v.Op = OpRISCV64NEG
   436  		return true
   437  	case OpNeg32:
   438  		v.Op = OpRISCV64NEG
   439  		return true
   440  	case OpNeg32F:
   441  		v.Op = OpRISCV64FNEGS
   442  		return true
   443  	case OpNeg64:
   444  		v.Op = OpRISCV64NEG
   445  		return true
   446  	case OpNeg64F:
   447  		v.Op = OpRISCV64FNEGD
   448  		return true
   449  	case OpNeg8:
   450  		v.Op = OpRISCV64NEG
   451  		return true
   452  	case OpNeq16:
   453  		return rewriteValueRISCV64_OpNeq16(v)
   454  	case OpNeq32:
   455  		return rewriteValueRISCV64_OpNeq32(v)
   456  	case OpNeq32F:
   457  		v.Op = OpRISCV64FNES
   458  		return true
   459  	case OpNeq64:
   460  		return rewriteValueRISCV64_OpNeq64(v)
   461  	case OpNeq64F:
   462  		v.Op = OpRISCV64FNED
   463  		return true
   464  	case OpNeq8:
   465  		return rewriteValueRISCV64_OpNeq8(v)
   466  	case OpNeqB:
   467  		return rewriteValueRISCV64_OpNeqB(v)
   468  	case OpNeqPtr:
   469  		return rewriteValueRISCV64_OpNeqPtr(v)
   470  	case OpNilCheck:
   471  		v.Op = OpRISCV64LoweredNilCheck
   472  		return true
   473  	case OpNot:
   474  		v.Op = OpRISCV64SEQZ
   475  		return true
   476  	case OpOffPtr:
   477  		return rewriteValueRISCV64_OpOffPtr(v)
   478  	case OpOr16:
   479  		v.Op = OpRISCV64OR
   480  		return true
   481  	case OpOr32:
   482  		v.Op = OpRISCV64OR
   483  		return true
   484  	case OpOr64:
   485  		v.Op = OpRISCV64OR
   486  		return true
   487  	case OpOr8:
   488  		v.Op = OpRISCV64OR
   489  		return true
   490  	case OpOrB:
   491  		v.Op = OpRISCV64OR
   492  		return true
   493  	case OpPanicBounds:
   494  		v.Op = OpRISCV64LoweredPanicBoundsRR
   495  		return true
   496  	case OpPopCount16:
   497  		return rewriteValueRISCV64_OpPopCount16(v)
   498  	case OpPopCount32:
   499  		v.Op = OpRISCV64CPOPW
   500  		return true
   501  	case OpPopCount64:
   502  		v.Op = OpRISCV64CPOP
   503  		return true
   504  	case OpPopCount8:
   505  		return rewriteValueRISCV64_OpPopCount8(v)
   506  	case OpPubBarrier:
   507  		v.Op = OpRISCV64LoweredPubBarrier
   508  		return true
   509  	case OpRISCV64ADD:
   510  		return rewriteValueRISCV64_OpRISCV64ADD(v)
   511  	case OpRISCV64ADDI:
   512  		return rewriteValueRISCV64_OpRISCV64ADDI(v)
   513  	case OpRISCV64AND:
   514  		return rewriteValueRISCV64_OpRISCV64AND(v)
   515  	case OpRISCV64ANDI:
   516  		return rewriteValueRISCV64_OpRISCV64ANDI(v)
   517  	case OpRISCV64FADDD:
   518  		return rewriteValueRISCV64_OpRISCV64FADDD(v)
   519  	case OpRISCV64FADDS:
   520  		return rewriteValueRISCV64_OpRISCV64FADDS(v)
   521  	case OpRISCV64FEQD:
   522  		return rewriteValueRISCV64_OpRISCV64FEQD(v)
   523  	case OpRISCV64FLED:
   524  		return rewriteValueRISCV64_OpRISCV64FLED(v)
   525  	case OpRISCV64FLTD:
   526  		return rewriteValueRISCV64_OpRISCV64FLTD(v)
   527  	case OpRISCV64FMADDD:
   528  		return rewriteValueRISCV64_OpRISCV64FMADDD(v)
   529  	case OpRISCV64FMADDS:
   530  		return rewriteValueRISCV64_OpRISCV64FMADDS(v)
   531  	case OpRISCV64FMOVDload:
   532  		return rewriteValueRISCV64_OpRISCV64FMOVDload(v)
   533  	case OpRISCV64FMOVDstore:
   534  		return rewriteValueRISCV64_OpRISCV64FMOVDstore(v)
   535  	case OpRISCV64FMOVWload:
   536  		return rewriteValueRISCV64_OpRISCV64FMOVWload(v)
   537  	case OpRISCV64FMOVWstore:
   538  		return rewriteValueRISCV64_OpRISCV64FMOVWstore(v)
   539  	case OpRISCV64FMSUBD:
   540  		return rewriteValueRISCV64_OpRISCV64FMSUBD(v)
   541  	case OpRISCV64FMSUBS:
   542  		return rewriteValueRISCV64_OpRISCV64FMSUBS(v)
   543  	case OpRISCV64FNED:
   544  		return rewriteValueRISCV64_OpRISCV64FNED(v)
   545  	case OpRISCV64FNMADDD:
   546  		return rewriteValueRISCV64_OpRISCV64FNMADDD(v)
   547  	case OpRISCV64FNMADDS:
   548  		return rewriteValueRISCV64_OpRISCV64FNMADDS(v)
   549  	case OpRISCV64FNMSUBD:
   550  		return rewriteValueRISCV64_OpRISCV64FNMSUBD(v)
   551  	case OpRISCV64FNMSUBS:
   552  		return rewriteValueRISCV64_OpRISCV64FNMSUBS(v)
   553  	case OpRISCV64FSUBD:
   554  		return rewriteValueRISCV64_OpRISCV64FSUBD(v)
   555  	case OpRISCV64FSUBS:
   556  		return rewriteValueRISCV64_OpRISCV64FSUBS(v)
   557  	case OpRISCV64LoweredPanicBoundsCR:
   558  		return rewriteValueRISCV64_OpRISCV64LoweredPanicBoundsCR(v)
   559  	case OpRISCV64LoweredPanicBoundsRC:
   560  		return rewriteValueRISCV64_OpRISCV64LoweredPanicBoundsRC(v)
   561  	case OpRISCV64LoweredPanicBoundsRR:
   562  		return rewriteValueRISCV64_OpRISCV64LoweredPanicBoundsRR(v)
   563  	case OpRISCV64MOVBUload:
   564  		return rewriteValueRISCV64_OpRISCV64MOVBUload(v)
   565  	case OpRISCV64MOVBUreg:
   566  		return rewriteValueRISCV64_OpRISCV64MOVBUreg(v)
   567  	case OpRISCV64MOVBload:
   568  		return rewriteValueRISCV64_OpRISCV64MOVBload(v)
   569  	case OpRISCV64MOVBreg:
   570  		return rewriteValueRISCV64_OpRISCV64MOVBreg(v)
   571  	case OpRISCV64MOVBstore:
   572  		return rewriteValueRISCV64_OpRISCV64MOVBstore(v)
   573  	case OpRISCV64MOVBstorezero:
   574  		return rewriteValueRISCV64_OpRISCV64MOVBstorezero(v)
   575  	case OpRISCV64MOVDload:
   576  		return rewriteValueRISCV64_OpRISCV64MOVDload(v)
   577  	case OpRISCV64MOVDnop:
   578  		return rewriteValueRISCV64_OpRISCV64MOVDnop(v)
   579  	case OpRISCV64MOVDreg:
   580  		return rewriteValueRISCV64_OpRISCV64MOVDreg(v)
   581  	case OpRISCV64MOVDstore:
   582  		return rewriteValueRISCV64_OpRISCV64MOVDstore(v)
   583  	case OpRISCV64MOVDstorezero:
   584  		return rewriteValueRISCV64_OpRISCV64MOVDstorezero(v)
   585  	case OpRISCV64MOVHUload:
   586  		return rewriteValueRISCV64_OpRISCV64MOVHUload(v)
   587  	case OpRISCV64MOVHUreg:
   588  		return rewriteValueRISCV64_OpRISCV64MOVHUreg(v)
   589  	case OpRISCV64MOVHload:
   590  		return rewriteValueRISCV64_OpRISCV64MOVHload(v)
   591  	case OpRISCV64MOVHreg:
   592  		return rewriteValueRISCV64_OpRISCV64MOVHreg(v)
   593  	case OpRISCV64MOVHstore:
   594  		return rewriteValueRISCV64_OpRISCV64MOVHstore(v)
   595  	case OpRISCV64MOVHstorezero:
   596  		return rewriteValueRISCV64_OpRISCV64MOVHstorezero(v)
   597  	case OpRISCV64MOVWUload:
   598  		return rewriteValueRISCV64_OpRISCV64MOVWUload(v)
   599  	case OpRISCV64MOVWUreg:
   600  		return rewriteValueRISCV64_OpRISCV64MOVWUreg(v)
   601  	case OpRISCV64MOVWload:
   602  		return rewriteValueRISCV64_OpRISCV64MOVWload(v)
   603  	case OpRISCV64MOVWreg:
   604  		return rewriteValueRISCV64_OpRISCV64MOVWreg(v)
   605  	case OpRISCV64MOVWstore:
   606  		return rewriteValueRISCV64_OpRISCV64MOVWstore(v)
   607  	case OpRISCV64MOVWstorezero:
   608  		return rewriteValueRISCV64_OpRISCV64MOVWstorezero(v)
   609  	case OpRISCV64NEG:
   610  		return rewriteValueRISCV64_OpRISCV64NEG(v)
   611  	case OpRISCV64NEGW:
   612  		return rewriteValueRISCV64_OpRISCV64NEGW(v)
   613  	case OpRISCV64OR:
   614  		return rewriteValueRISCV64_OpRISCV64OR(v)
   615  	case OpRISCV64ORI:
   616  		return rewriteValueRISCV64_OpRISCV64ORI(v)
   617  	case OpRISCV64ORN:
   618  		return rewriteValueRISCV64_OpRISCV64ORN(v)
   619  	case OpRISCV64ROL:
   620  		return rewriteValueRISCV64_OpRISCV64ROL(v)
   621  	case OpRISCV64ROLW:
   622  		return rewriteValueRISCV64_OpRISCV64ROLW(v)
   623  	case OpRISCV64ROR:
   624  		return rewriteValueRISCV64_OpRISCV64ROR(v)
   625  	case OpRISCV64RORW:
   626  		return rewriteValueRISCV64_OpRISCV64RORW(v)
   627  	case OpRISCV64SEQZ:
   628  		return rewriteValueRISCV64_OpRISCV64SEQZ(v)
   629  	case OpRISCV64SLL:
   630  		return rewriteValueRISCV64_OpRISCV64SLL(v)
   631  	case OpRISCV64SLLI:
   632  		return rewriteValueRISCV64_OpRISCV64SLLI(v)
   633  	case OpRISCV64SLLW:
   634  		return rewriteValueRISCV64_OpRISCV64SLLW(v)
   635  	case OpRISCV64SLT:
   636  		return rewriteValueRISCV64_OpRISCV64SLT(v)
   637  	case OpRISCV64SLTI:
   638  		return rewriteValueRISCV64_OpRISCV64SLTI(v)
   639  	case OpRISCV64SLTIU:
   640  		return rewriteValueRISCV64_OpRISCV64SLTIU(v)
   641  	case OpRISCV64SLTU:
   642  		return rewriteValueRISCV64_OpRISCV64SLTU(v)
   643  	case OpRISCV64SNEZ:
   644  		return rewriteValueRISCV64_OpRISCV64SNEZ(v)
   645  	case OpRISCV64SRA:
   646  		return rewriteValueRISCV64_OpRISCV64SRA(v)
   647  	case OpRISCV64SRAI:
   648  		return rewriteValueRISCV64_OpRISCV64SRAI(v)
   649  	case OpRISCV64SRAW:
   650  		return rewriteValueRISCV64_OpRISCV64SRAW(v)
   651  	case OpRISCV64SRL:
   652  		return rewriteValueRISCV64_OpRISCV64SRL(v)
   653  	case OpRISCV64SRLI:
   654  		return rewriteValueRISCV64_OpRISCV64SRLI(v)
   655  	case OpRISCV64SRLW:
   656  		return rewriteValueRISCV64_OpRISCV64SRLW(v)
   657  	case OpRISCV64SUB:
   658  		return rewriteValueRISCV64_OpRISCV64SUB(v)
   659  	case OpRISCV64SUBW:
   660  		return rewriteValueRISCV64_OpRISCV64SUBW(v)
   661  	case OpRISCV64XOR:
   662  		return rewriteValueRISCV64_OpRISCV64XOR(v)
   663  	case OpRotateLeft16:
   664  		return rewriteValueRISCV64_OpRotateLeft16(v)
   665  	case OpRotateLeft32:
   666  		v.Op = OpRISCV64ROLW
   667  		return true
   668  	case OpRotateLeft64:
   669  		v.Op = OpRISCV64ROL
   670  		return true
   671  	case OpRotateLeft8:
   672  		return rewriteValueRISCV64_OpRotateLeft8(v)
   673  	case OpRound32F:
   674  		v.Op = OpRISCV64LoweredRound32F
   675  		return true
   676  	case OpRound64F:
   677  		v.Op = OpRISCV64LoweredRound64F
   678  		return true
   679  	case OpRsh16Ux16:
   680  		return rewriteValueRISCV64_OpRsh16Ux16(v)
   681  	case OpRsh16Ux32:
   682  		return rewriteValueRISCV64_OpRsh16Ux32(v)
   683  	case OpRsh16Ux64:
   684  		return rewriteValueRISCV64_OpRsh16Ux64(v)
   685  	case OpRsh16Ux8:
   686  		return rewriteValueRISCV64_OpRsh16Ux8(v)
   687  	case OpRsh16x16:
   688  		return rewriteValueRISCV64_OpRsh16x16(v)
   689  	case OpRsh16x32:
   690  		return rewriteValueRISCV64_OpRsh16x32(v)
   691  	case OpRsh16x64:
   692  		return rewriteValueRISCV64_OpRsh16x64(v)
   693  	case OpRsh16x8:
   694  		return rewriteValueRISCV64_OpRsh16x8(v)
   695  	case OpRsh32Ux16:
   696  		return rewriteValueRISCV64_OpRsh32Ux16(v)
   697  	case OpRsh32Ux32:
   698  		return rewriteValueRISCV64_OpRsh32Ux32(v)
   699  	case OpRsh32Ux64:
   700  		return rewriteValueRISCV64_OpRsh32Ux64(v)
   701  	case OpRsh32Ux8:
   702  		return rewriteValueRISCV64_OpRsh32Ux8(v)
   703  	case OpRsh32x16:
   704  		return rewriteValueRISCV64_OpRsh32x16(v)
   705  	case OpRsh32x32:
   706  		return rewriteValueRISCV64_OpRsh32x32(v)
   707  	case OpRsh32x64:
   708  		return rewriteValueRISCV64_OpRsh32x64(v)
   709  	case OpRsh32x8:
   710  		return rewriteValueRISCV64_OpRsh32x8(v)
   711  	case OpRsh64Ux16:
   712  		return rewriteValueRISCV64_OpRsh64Ux16(v)
   713  	case OpRsh64Ux32:
   714  		return rewriteValueRISCV64_OpRsh64Ux32(v)
   715  	case OpRsh64Ux64:
   716  		return rewriteValueRISCV64_OpRsh64Ux64(v)
   717  	case OpRsh64Ux8:
   718  		return rewriteValueRISCV64_OpRsh64Ux8(v)
   719  	case OpRsh64x16:
   720  		return rewriteValueRISCV64_OpRsh64x16(v)
   721  	case OpRsh64x32:
   722  		return rewriteValueRISCV64_OpRsh64x32(v)
   723  	case OpRsh64x64:
   724  		return rewriteValueRISCV64_OpRsh64x64(v)
   725  	case OpRsh64x8:
   726  		return rewriteValueRISCV64_OpRsh64x8(v)
   727  	case OpRsh8Ux16:
   728  		return rewriteValueRISCV64_OpRsh8Ux16(v)
   729  	case OpRsh8Ux32:
   730  		return rewriteValueRISCV64_OpRsh8Ux32(v)
   731  	case OpRsh8Ux64:
   732  		return rewriteValueRISCV64_OpRsh8Ux64(v)
   733  	case OpRsh8Ux8:
   734  		return rewriteValueRISCV64_OpRsh8Ux8(v)
   735  	case OpRsh8x16:
   736  		return rewriteValueRISCV64_OpRsh8x16(v)
   737  	case OpRsh8x32:
   738  		return rewriteValueRISCV64_OpRsh8x32(v)
   739  	case OpRsh8x64:
   740  		return rewriteValueRISCV64_OpRsh8x64(v)
   741  	case OpRsh8x8:
   742  		return rewriteValueRISCV64_OpRsh8x8(v)
   743  	case OpSelect0:
   744  		return rewriteValueRISCV64_OpSelect0(v)
   745  	case OpSelect1:
   746  		return rewriteValueRISCV64_OpSelect1(v)
   747  	case OpSignExt16to32:
   748  		v.Op = OpRISCV64MOVHreg
   749  		return true
   750  	case OpSignExt16to64:
   751  		v.Op = OpRISCV64MOVHreg
   752  		return true
   753  	case OpSignExt32to64:
   754  		v.Op = OpRISCV64MOVWreg
   755  		return true
   756  	case OpSignExt8to16:
   757  		v.Op = OpRISCV64MOVBreg
   758  		return true
   759  	case OpSignExt8to32:
   760  		v.Op = OpRISCV64MOVBreg
   761  		return true
   762  	case OpSignExt8to64:
   763  		v.Op = OpRISCV64MOVBreg
   764  		return true
   765  	case OpSlicemask:
   766  		return rewriteValueRISCV64_OpSlicemask(v)
   767  	case OpSqrt:
   768  		v.Op = OpRISCV64FSQRTD
   769  		return true
   770  	case OpSqrt32:
   771  		v.Op = OpRISCV64FSQRTS
   772  		return true
   773  	case OpStaticCall:
   774  		v.Op = OpRISCV64CALLstatic
   775  		return true
   776  	case OpStore:
   777  		return rewriteValueRISCV64_OpStore(v)
   778  	case OpSub16:
   779  		v.Op = OpRISCV64SUB
   780  		return true
   781  	case OpSub32:
   782  		v.Op = OpRISCV64SUB
   783  		return true
   784  	case OpSub32F:
   785  		v.Op = OpRISCV64FSUBS
   786  		return true
   787  	case OpSub64:
   788  		v.Op = OpRISCV64SUB
   789  		return true
   790  	case OpSub64F:
   791  		v.Op = OpRISCV64FSUBD
   792  		return true
   793  	case OpSub8:
   794  		v.Op = OpRISCV64SUB
   795  		return true
   796  	case OpSubPtr:
   797  		v.Op = OpRISCV64SUB
   798  		return true
   799  	case OpTailCall:
   800  		v.Op = OpRISCV64CALLtail
   801  		return true
   802  	case OpTrunc16to8:
   803  		v.Op = OpCopy
   804  		return true
   805  	case OpTrunc32to16:
   806  		v.Op = OpCopy
   807  		return true
   808  	case OpTrunc32to8:
   809  		v.Op = OpCopy
   810  		return true
   811  	case OpTrunc64to16:
   812  		v.Op = OpCopy
   813  		return true
   814  	case OpTrunc64to32:
   815  		v.Op = OpCopy
   816  		return true
   817  	case OpTrunc64to8:
   818  		v.Op = OpCopy
   819  		return true
   820  	case OpWB:
   821  		v.Op = OpRISCV64LoweredWB
   822  		return true
   823  	case OpXor16:
   824  		v.Op = OpRISCV64XOR
   825  		return true
   826  	case OpXor32:
   827  		v.Op = OpRISCV64XOR
   828  		return true
   829  	case OpXor64:
   830  		v.Op = OpRISCV64XOR
   831  		return true
   832  	case OpXor8:
   833  		v.Op = OpRISCV64XOR
   834  		return true
   835  	case OpZero:
   836  		return rewriteValueRISCV64_OpZero(v)
   837  	case OpZeroExt16to32:
   838  		v.Op = OpRISCV64MOVHUreg
   839  		return true
   840  	case OpZeroExt16to64:
   841  		v.Op = OpRISCV64MOVHUreg
   842  		return true
   843  	case OpZeroExt32to64:
   844  		v.Op = OpRISCV64MOVWUreg
   845  		return true
   846  	case OpZeroExt8to16:
   847  		v.Op = OpRISCV64MOVBUreg
   848  		return true
   849  	case OpZeroExt8to32:
   850  		v.Op = OpRISCV64MOVBUreg
   851  		return true
   852  	case OpZeroExt8to64:
   853  		v.Op = OpRISCV64MOVBUreg
   854  		return true
   855  	}
   856  	return false
   857  }
   858  func rewriteValueRISCV64_OpAddr(v *Value) bool {
   859  	v_0 := v.Args[0]
   860  	// match: (Addr {sym} base)
   861  	// result: (MOVaddr {sym} [0] base)
   862  	for {
   863  		sym := auxToSym(v.Aux)
   864  		base := v_0
   865  		v.reset(OpRISCV64MOVaddr)
   866  		v.AuxInt = int32ToAuxInt(0)
   867  		v.Aux = symToAux(sym)
   868  		v.AddArg(base)
   869  		return true
   870  	}
   871  }
   872  func rewriteValueRISCV64_OpAtomicAnd8(v *Value) bool {
   873  	v_2 := v.Args[2]
   874  	v_1 := v.Args[1]
   875  	v_0 := v.Args[0]
   876  	b := v.Block
   877  	typ := &b.Func.Config.Types
   878  	// match: (AtomicAnd8 ptr val mem)
   879  	// result: (LoweredAtomicAnd32 (ANDI <typ.Uintptr> [^3] ptr) (NOT <typ.UInt32> (SLL <typ.UInt32> (XORI <typ.UInt32> [0xff] (ZeroExt8to32 val)) (SLLI <typ.UInt64> [3] (ANDI <typ.UInt64> [3] ptr)))) mem)
   880  	for {
   881  		ptr := v_0
   882  		val := v_1
   883  		mem := v_2
   884  		v.reset(OpRISCV64LoweredAtomicAnd32)
   885  		v0 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.Uintptr)
   886  		v0.AuxInt = int64ToAuxInt(^3)
   887  		v0.AddArg(ptr)
   888  		v1 := b.NewValue0(v.Pos, OpRISCV64NOT, typ.UInt32)
   889  		v2 := b.NewValue0(v.Pos, OpRISCV64SLL, typ.UInt32)
   890  		v3 := b.NewValue0(v.Pos, OpRISCV64XORI, typ.UInt32)
   891  		v3.AuxInt = int64ToAuxInt(0xff)
   892  		v4 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   893  		v4.AddArg(val)
   894  		v3.AddArg(v4)
   895  		v5 := b.NewValue0(v.Pos, OpRISCV64SLLI, typ.UInt64)
   896  		v5.AuxInt = int64ToAuxInt(3)
   897  		v6 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.UInt64)
   898  		v6.AuxInt = int64ToAuxInt(3)
   899  		v6.AddArg(ptr)
   900  		v5.AddArg(v6)
   901  		v2.AddArg2(v3, v5)
   902  		v1.AddArg(v2)
   903  		v.AddArg3(v0, v1, mem)
   904  		return true
   905  	}
   906  }
   907  func rewriteValueRISCV64_OpAtomicCompareAndSwap32(v *Value) bool {
   908  	v_3 := v.Args[3]
   909  	v_2 := v.Args[2]
   910  	v_1 := v.Args[1]
   911  	v_0 := v.Args[0]
   912  	b := v.Block
   913  	typ := &b.Func.Config.Types
   914  	// match: (AtomicCompareAndSwap32 ptr old new mem)
   915  	// result: (LoweredAtomicCas32 ptr (SignExt32to64 old) new mem)
   916  	for {
   917  		ptr := v_0
   918  		old := v_1
   919  		new := v_2
   920  		mem := v_3
   921  		v.reset(OpRISCV64LoweredAtomicCas32)
   922  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
   923  		v0.AddArg(old)
   924  		v.AddArg4(ptr, v0, new, mem)
   925  		return true
   926  	}
   927  }
   928  func rewriteValueRISCV64_OpAtomicOr8(v *Value) bool {
   929  	v_2 := v.Args[2]
   930  	v_1 := v.Args[1]
   931  	v_0 := v.Args[0]
   932  	b := v.Block
   933  	typ := &b.Func.Config.Types
   934  	// match: (AtomicOr8 ptr val mem)
   935  	// result: (LoweredAtomicOr32 (ANDI <typ.Uintptr> [^3] ptr) (SLL <typ.UInt32> (ZeroExt8to32 val) (SLLI <typ.UInt64> [3] (ANDI <typ.UInt64> [3] ptr))) mem)
   936  	for {
   937  		ptr := v_0
   938  		val := v_1
   939  		mem := v_2
   940  		v.reset(OpRISCV64LoweredAtomicOr32)
   941  		v0 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.Uintptr)
   942  		v0.AuxInt = int64ToAuxInt(^3)
   943  		v0.AddArg(ptr)
   944  		v1 := b.NewValue0(v.Pos, OpRISCV64SLL, typ.UInt32)
   945  		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   946  		v2.AddArg(val)
   947  		v3 := b.NewValue0(v.Pos, OpRISCV64SLLI, typ.UInt64)
   948  		v3.AuxInt = int64ToAuxInt(3)
   949  		v4 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.UInt64)
   950  		v4.AuxInt = int64ToAuxInt(3)
   951  		v4.AddArg(ptr)
   952  		v3.AddArg(v4)
   953  		v1.AddArg2(v2, v3)
   954  		v.AddArg3(v0, v1, mem)
   955  		return true
   956  	}
   957  }
   958  func rewriteValueRISCV64_OpAvg64u(v *Value) bool {
   959  	v_1 := v.Args[1]
   960  	v_0 := v.Args[0]
   961  	b := v.Block
   962  	// match: (Avg64u <t> x y)
   963  	// result: (ADD (ADD <t> (SRLI <t> [1] x) (SRLI <t> [1] y)) (ANDI <t> [1] (AND <t> x y)))
   964  	for {
   965  		t := v.Type
   966  		x := v_0
   967  		y := v_1
   968  		v.reset(OpRISCV64ADD)
   969  		v0 := b.NewValue0(v.Pos, OpRISCV64ADD, t)
   970  		v1 := b.NewValue0(v.Pos, OpRISCV64SRLI, t)
   971  		v1.AuxInt = int64ToAuxInt(1)
   972  		v1.AddArg(x)
   973  		v2 := b.NewValue0(v.Pos, OpRISCV64SRLI, t)
   974  		v2.AuxInt = int64ToAuxInt(1)
   975  		v2.AddArg(y)
   976  		v0.AddArg2(v1, v2)
   977  		v3 := b.NewValue0(v.Pos, OpRISCV64ANDI, t)
   978  		v3.AuxInt = int64ToAuxInt(1)
   979  		v4 := b.NewValue0(v.Pos, OpRISCV64AND, t)
   980  		v4.AddArg2(x, y)
   981  		v3.AddArg(v4)
   982  		v.AddArg2(v0, v3)
   983  		return true
   984  	}
   985  }
   986  func rewriteValueRISCV64_OpBitLen16(v *Value) bool {
   987  	v_0 := v.Args[0]
   988  	b := v.Block
   989  	typ := &b.Func.Config.Types
   990  	// match: (BitLen16 x)
   991  	// result: (BitLen64 (ZeroExt16to64 x))
   992  	for {
   993  		x := v_0
   994  		v.reset(OpBitLen64)
   995  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
   996  		v0.AddArg(x)
   997  		v.AddArg(v0)
   998  		return true
   999  	}
  1000  }
  1001  func rewriteValueRISCV64_OpBitLen32(v *Value) bool {
  1002  	v_0 := v.Args[0]
  1003  	b := v.Block
  1004  	typ := &b.Func.Config.Types
  1005  	// match: (BitLen32 <t> x)
  1006  	// result: (SUB (MOVDconst [32]) (CLZW <t> x))
  1007  	for {
  1008  		t := v.Type
  1009  		x := v_0
  1010  		v.reset(OpRISCV64SUB)
  1011  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  1012  		v0.AuxInt = int64ToAuxInt(32)
  1013  		v1 := b.NewValue0(v.Pos, OpRISCV64CLZW, t)
  1014  		v1.AddArg(x)
  1015  		v.AddArg2(v0, v1)
  1016  		return true
  1017  	}
  1018  }
  1019  func rewriteValueRISCV64_OpBitLen64(v *Value) bool {
  1020  	v_0 := v.Args[0]
  1021  	b := v.Block
  1022  	typ := &b.Func.Config.Types
  1023  	// match: (BitLen64 <t> x)
  1024  	// result: (SUB (MOVDconst [64]) (CLZ <t> x))
  1025  	for {
  1026  		t := v.Type
  1027  		x := v_0
  1028  		v.reset(OpRISCV64SUB)
  1029  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  1030  		v0.AuxInt = int64ToAuxInt(64)
  1031  		v1 := b.NewValue0(v.Pos, OpRISCV64CLZ, t)
  1032  		v1.AddArg(x)
  1033  		v.AddArg2(v0, v1)
  1034  		return true
  1035  	}
  1036  }
  1037  func rewriteValueRISCV64_OpBitLen8(v *Value) bool {
  1038  	v_0 := v.Args[0]
  1039  	b := v.Block
  1040  	typ := &b.Func.Config.Types
  1041  	// match: (BitLen8 x)
  1042  	// result: (BitLen64 (ZeroExt8to64 x))
  1043  	for {
  1044  		x := v_0
  1045  		v.reset(OpBitLen64)
  1046  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1047  		v0.AddArg(x)
  1048  		v.AddArg(v0)
  1049  		return true
  1050  	}
  1051  }
  1052  func rewriteValueRISCV64_OpBswap16(v *Value) bool {
  1053  	v_0 := v.Args[0]
  1054  	b := v.Block
  1055  	// match: (Bswap16 <t> x)
  1056  	// result: (SRLI [48] (REV8 <t> x))
  1057  	for {
  1058  		t := v.Type
  1059  		x := v_0
  1060  		v.reset(OpRISCV64SRLI)
  1061  		v.AuxInt = int64ToAuxInt(48)
  1062  		v0 := b.NewValue0(v.Pos, OpRISCV64REV8, t)
  1063  		v0.AddArg(x)
  1064  		v.AddArg(v0)
  1065  		return true
  1066  	}
  1067  }
  1068  func rewriteValueRISCV64_OpBswap32(v *Value) bool {
  1069  	v_0 := v.Args[0]
  1070  	b := v.Block
  1071  	// match: (Bswap32 <t> x)
  1072  	// result: (SRLI [32] (REV8 <t> x))
  1073  	for {
  1074  		t := v.Type
  1075  		x := v_0
  1076  		v.reset(OpRISCV64SRLI)
  1077  		v.AuxInt = int64ToAuxInt(32)
  1078  		v0 := b.NewValue0(v.Pos, OpRISCV64REV8, t)
  1079  		v0.AddArg(x)
  1080  		v.AddArg(v0)
  1081  		return true
  1082  	}
  1083  }
  1084  func rewriteValueRISCV64_OpConst16(v *Value) bool {
  1085  	// match: (Const16 [val])
  1086  	// result: (MOVDconst [int64(val)])
  1087  	for {
  1088  		val := auxIntToInt16(v.AuxInt)
  1089  		v.reset(OpRISCV64MOVDconst)
  1090  		v.AuxInt = int64ToAuxInt(int64(val))
  1091  		return true
  1092  	}
  1093  }
  1094  func rewriteValueRISCV64_OpConst32(v *Value) bool {
  1095  	// match: (Const32 [val])
  1096  	// result: (MOVDconst [int64(val)])
  1097  	for {
  1098  		val := auxIntToInt32(v.AuxInt)
  1099  		v.reset(OpRISCV64MOVDconst)
  1100  		v.AuxInt = int64ToAuxInt(int64(val))
  1101  		return true
  1102  	}
  1103  }
  1104  func rewriteValueRISCV64_OpConst64(v *Value) bool {
  1105  	// match: (Const64 [val])
  1106  	// result: (MOVDconst [int64(val)])
  1107  	for {
  1108  		val := auxIntToInt64(v.AuxInt)
  1109  		v.reset(OpRISCV64MOVDconst)
  1110  		v.AuxInt = int64ToAuxInt(int64(val))
  1111  		return true
  1112  	}
  1113  }
  1114  func rewriteValueRISCV64_OpConst8(v *Value) bool {
  1115  	// match: (Const8 [val])
  1116  	// result: (MOVDconst [int64(val)])
  1117  	for {
  1118  		val := auxIntToInt8(v.AuxInt)
  1119  		v.reset(OpRISCV64MOVDconst)
  1120  		v.AuxInt = int64ToAuxInt(int64(val))
  1121  		return true
  1122  	}
  1123  }
  1124  func rewriteValueRISCV64_OpConstBool(v *Value) bool {
  1125  	// match: (ConstBool [val])
  1126  	// result: (MOVDconst [int64(b2i(val))])
  1127  	for {
  1128  		val := auxIntToBool(v.AuxInt)
  1129  		v.reset(OpRISCV64MOVDconst)
  1130  		v.AuxInt = int64ToAuxInt(int64(b2i(val)))
  1131  		return true
  1132  	}
  1133  }
  1134  func rewriteValueRISCV64_OpConstNil(v *Value) bool {
  1135  	// match: (ConstNil)
  1136  	// result: (MOVDconst [0])
  1137  	for {
  1138  		v.reset(OpRISCV64MOVDconst)
  1139  		v.AuxInt = int64ToAuxInt(0)
  1140  		return true
  1141  	}
  1142  }
  1143  func rewriteValueRISCV64_OpCtz16(v *Value) bool {
  1144  	v_0 := v.Args[0]
  1145  	b := v.Block
  1146  	typ := &b.Func.Config.Types
  1147  	// match: (Ctz16 x)
  1148  	// result: (CTZW (ORI <typ.UInt32> [1<<16] x))
  1149  	for {
  1150  		x := v_0
  1151  		v.reset(OpRISCV64CTZW)
  1152  		v0 := b.NewValue0(v.Pos, OpRISCV64ORI, typ.UInt32)
  1153  		v0.AuxInt = int64ToAuxInt(1 << 16)
  1154  		v0.AddArg(x)
  1155  		v.AddArg(v0)
  1156  		return true
  1157  	}
  1158  }
  1159  func rewriteValueRISCV64_OpCtz8(v *Value) bool {
  1160  	v_0 := v.Args[0]
  1161  	b := v.Block
  1162  	typ := &b.Func.Config.Types
  1163  	// match: (Ctz8 x)
  1164  	// result: (CTZW (ORI <typ.UInt32> [1<<8] x))
  1165  	for {
  1166  		x := v_0
  1167  		v.reset(OpRISCV64CTZW)
  1168  		v0 := b.NewValue0(v.Pos, OpRISCV64ORI, typ.UInt32)
  1169  		v0.AuxInt = int64ToAuxInt(1 << 8)
  1170  		v0.AddArg(x)
  1171  		v.AddArg(v0)
  1172  		return true
  1173  	}
  1174  }
  1175  func rewriteValueRISCV64_OpDiv16(v *Value) bool {
  1176  	v_1 := v.Args[1]
  1177  	v_0 := v.Args[0]
  1178  	b := v.Block
  1179  	typ := &b.Func.Config.Types
  1180  	// match: (Div16 x y [false])
  1181  	// result: (DIVW (SignExt16to32 x) (SignExt16to32 y))
  1182  	for {
  1183  		if auxIntToBool(v.AuxInt) != false {
  1184  			break
  1185  		}
  1186  		x := v_0
  1187  		y := v_1
  1188  		v.reset(OpRISCV64DIVW)
  1189  		v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  1190  		v0.AddArg(x)
  1191  		v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  1192  		v1.AddArg(y)
  1193  		v.AddArg2(v0, v1)
  1194  		return true
  1195  	}
  1196  	return false
  1197  }
  1198  func rewriteValueRISCV64_OpDiv16u(v *Value) bool {
  1199  	v_1 := v.Args[1]
  1200  	v_0 := v.Args[0]
  1201  	b := v.Block
  1202  	typ := &b.Func.Config.Types
  1203  	// match: (Div16u x y)
  1204  	// result: (DIVUW (ZeroExt16to32 x) (ZeroExt16to32 y))
  1205  	for {
  1206  		x := v_0
  1207  		y := v_1
  1208  		v.reset(OpRISCV64DIVUW)
  1209  		v0 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  1210  		v0.AddArg(x)
  1211  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  1212  		v1.AddArg(y)
  1213  		v.AddArg2(v0, v1)
  1214  		return true
  1215  	}
  1216  }
  1217  func rewriteValueRISCV64_OpDiv32(v *Value) bool {
  1218  	v_1 := v.Args[1]
  1219  	v_0 := v.Args[0]
  1220  	// match: (Div32 x y [false])
  1221  	// result: (DIVW x y)
  1222  	for {
  1223  		if auxIntToBool(v.AuxInt) != false {
  1224  			break
  1225  		}
  1226  		x := v_0
  1227  		y := v_1
  1228  		v.reset(OpRISCV64DIVW)
  1229  		v.AddArg2(x, y)
  1230  		return true
  1231  	}
  1232  	return false
  1233  }
  1234  func rewriteValueRISCV64_OpDiv64(v *Value) bool {
  1235  	v_1 := v.Args[1]
  1236  	v_0 := v.Args[0]
  1237  	// match: (Div64 x y [false])
  1238  	// result: (DIV x y)
  1239  	for {
  1240  		if auxIntToBool(v.AuxInt) != false {
  1241  			break
  1242  		}
  1243  		x := v_0
  1244  		y := v_1
  1245  		v.reset(OpRISCV64DIV)
  1246  		v.AddArg2(x, y)
  1247  		return true
  1248  	}
  1249  	return false
  1250  }
  1251  func rewriteValueRISCV64_OpDiv8(v *Value) bool {
  1252  	v_1 := v.Args[1]
  1253  	v_0 := v.Args[0]
  1254  	b := v.Block
  1255  	typ := &b.Func.Config.Types
  1256  	// match: (Div8 x y)
  1257  	// result: (DIVW (SignExt8to32 x) (SignExt8to32 y))
  1258  	for {
  1259  		x := v_0
  1260  		y := v_1
  1261  		v.reset(OpRISCV64DIVW)
  1262  		v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  1263  		v0.AddArg(x)
  1264  		v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  1265  		v1.AddArg(y)
  1266  		v.AddArg2(v0, v1)
  1267  		return true
  1268  	}
  1269  }
  1270  func rewriteValueRISCV64_OpDiv8u(v *Value) bool {
  1271  	v_1 := v.Args[1]
  1272  	v_0 := v.Args[0]
  1273  	b := v.Block
  1274  	typ := &b.Func.Config.Types
  1275  	// match: (Div8u x y)
  1276  	// result: (DIVUW (ZeroExt8to32 x) (ZeroExt8to32 y))
  1277  	for {
  1278  		x := v_0
  1279  		y := v_1
  1280  		v.reset(OpRISCV64DIVUW)
  1281  		v0 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  1282  		v0.AddArg(x)
  1283  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  1284  		v1.AddArg(y)
  1285  		v.AddArg2(v0, v1)
  1286  		return true
  1287  	}
  1288  }
  1289  func rewriteValueRISCV64_OpEq16(v *Value) bool {
  1290  	v_1 := v.Args[1]
  1291  	v_0 := v.Args[0]
  1292  	b := v.Block
  1293  	typ := &b.Func.Config.Types
  1294  	// match: (Eq16 x y)
  1295  	// result: (SEQZ (SUB <x.Type> (ZeroExt16to64 x) (ZeroExt16to64 y)))
  1296  	for {
  1297  		x := v_0
  1298  		y := v_1
  1299  		v.reset(OpRISCV64SEQZ)
  1300  		v0 := b.NewValue0(v.Pos, OpRISCV64SUB, x.Type)
  1301  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1302  		v1.AddArg(x)
  1303  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1304  		v2.AddArg(y)
  1305  		v0.AddArg2(v1, v2)
  1306  		v.AddArg(v0)
  1307  		return true
  1308  	}
  1309  }
  1310  func rewriteValueRISCV64_OpEq32(v *Value) bool {
  1311  	v_1 := v.Args[1]
  1312  	v_0 := v.Args[0]
  1313  	b := v.Block
  1314  	typ := &b.Func.Config.Types
  1315  	// match: (Eq32 x y)
  1316  	// cond: x.Type.IsSigned()
  1317  	// result: (SEQZ (SUB <x.Type> (SignExt32to64 x) (SignExt32to64 y)))
  1318  	for {
  1319  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1320  			x := v_0
  1321  			y := v_1
  1322  			if !(x.Type.IsSigned()) {
  1323  				continue
  1324  			}
  1325  			v.reset(OpRISCV64SEQZ)
  1326  			v0 := b.NewValue0(v.Pos, OpRISCV64SUB, x.Type)
  1327  			v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1328  			v1.AddArg(x)
  1329  			v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1330  			v2.AddArg(y)
  1331  			v0.AddArg2(v1, v2)
  1332  			v.AddArg(v0)
  1333  			return true
  1334  		}
  1335  		break
  1336  	}
  1337  	// match: (Eq32 x y)
  1338  	// cond: !x.Type.IsSigned()
  1339  	// result: (SEQZ (SUB <x.Type> (ZeroExt32to64 x) (ZeroExt32to64 y)))
  1340  	for {
  1341  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1342  			x := v_0
  1343  			y := v_1
  1344  			if !(!x.Type.IsSigned()) {
  1345  				continue
  1346  			}
  1347  			v.reset(OpRISCV64SEQZ)
  1348  			v0 := b.NewValue0(v.Pos, OpRISCV64SUB, x.Type)
  1349  			v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1350  			v1.AddArg(x)
  1351  			v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1352  			v2.AddArg(y)
  1353  			v0.AddArg2(v1, v2)
  1354  			v.AddArg(v0)
  1355  			return true
  1356  		}
  1357  		break
  1358  	}
  1359  	return false
  1360  }
  1361  func rewriteValueRISCV64_OpEq64(v *Value) bool {
  1362  	v_1 := v.Args[1]
  1363  	v_0 := v.Args[0]
  1364  	b := v.Block
  1365  	// match: (Eq64 x y)
  1366  	// result: (SEQZ (SUB <x.Type> x y))
  1367  	for {
  1368  		x := v_0
  1369  		y := v_1
  1370  		v.reset(OpRISCV64SEQZ)
  1371  		v0 := b.NewValue0(v.Pos, OpRISCV64SUB, x.Type)
  1372  		v0.AddArg2(x, y)
  1373  		v.AddArg(v0)
  1374  		return true
  1375  	}
  1376  }
  1377  func rewriteValueRISCV64_OpEq8(v *Value) bool {
  1378  	v_1 := v.Args[1]
  1379  	v_0 := v.Args[0]
  1380  	b := v.Block
  1381  	typ := &b.Func.Config.Types
  1382  	// match: (Eq8 x y)
  1383  	// result: (SEQZ (SUB <x.Type> (ZeroExt8to64 x) (ZeroExt8to64 y)))
  1384  	for {
  1385  		x := v_0
  1386  		y := v_1
  1387  		v.reset(OpRISCV64SEQZ)
  1388  		v0 := b.NewValue0(v.Pos, OpRISCV64SUB, x.Type)
  1389  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1390  		v1.AddArg(x)
  1391  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1392  		v2.AddArg(y)
  1393  		v0.AddArg2(v1, v2)
  1394  		v.AddArg(v0)
  1395  		return true
  1396  	}
  1397  }
  1398  func rewriteValueRISCV64_OpEqB(v *Value) bool {
  1399  	v_1 := v.Args[1]
  1400  	v_0 := v.Args[0]
  1401  	b := v.Block
  1402  	typ := &b.Func.Config.Types
  1403  	// match: (EqB x y)
  1404  	// result: (SEQZ (SUB <typ.Bool> x y))
  1405  	for {
  1406  		x := v_0
  1407  		y := v_1
  1408  		v.reset(OpRISCV64SEQZ)
  1409  		v0 := b.NewValue0(v.Pos, OpRISCV64SUB, typ.Bool)
  1410  		v0.AddArg2(x, y)
  1411  		v.AddArg(v0)
  1412  		return true
  1413  	}
  1414  }
  1415  func rewriteValueRISCV64_OpEqPtr(v *Value) bool {
  1416  	v_1 := v.Args[1]
  1417  	v_0 := v.Args[0]
  1418  	b := v.Block
  1419  	typ := &b.Func.Config.Types
  1420  	// match: (EqPtr x y)
  1421  	// result: (SEQZ (SUB <typ.Uintptr> x y))
  1422  	for {
  1423  		x := v_0
  1424  		y := v_1
  1425  		v.reset(OpRISCV64SEQZ)
  1426  		v0 := b.NewValue0(v.Pos, OpRISCV64SUB, typ.Uintptr)
  1427  		v0.AddArg2(x, y)
  1428  		v.AddArg(v0)
  1429  		return true
  1430  	}
  1431  }
  1432  func rewriteValueRISCV64_OpHmul32(v *Value) bool {
  1433  	v_1 := v.Args[1]
  1434  	v_0 := v.Args[0]
  1435  	b := v.Block
  1436  	typ := &b.Func.Config.Types
  1437  	// match: (Hmul32 x y)
  1438  	// result: (SRAI [32] (MUL (SignExt32to64 x) (SignExt32to64 y)))
  1439  	for {
  1440  		x := v_0
  1441  		y := v_1
  1442  		v.reset(OpRISCV64SRAI)
  1443  		v.AuxInt = int64ToAuxInt(32)
  1444  		v0 := b.NewValue0(v.Pos, OpRISCV64MUL, typ.Int64)
  1445  		v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1446  		v1.AddArg(x)
  1447  		v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1448  		v2.AddArg(y)
  1449  		v0.AddArg2(v1, v2)
  1450  		v.AddArg(v0)
  1451  		return true
  1452  	}
  1453  }
  1454  func rewriteValueRISCV64_OpHmul32u(v *Value) bool {
  1455  	v_1 := v.Args[1]
  1456  	v_0 := v.Args[0]
  1457  	b := v.Block
  1458  	typ := &b.Func.Config.Types
  1459  	// match: (Hmul32u x y)
  1460  	// result: (SRLI [32] (MUL (ZeroExt32to64 x) (ZeroExt32to64 y)))
  1461  	for {
  1462  		x := v_0
  1463  		y := v_1
  1464  		v.reset(OpRISCV64SRLI)
  1465  		v.AuxInt = int64ToAuxInt(32)
  1466  		v0 := b.NewValue0(v.Pos, OpRISCV64MUL, typ.Int64)
  1467  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1468  		v1.AddArg(x)
  1469  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1470  		v2.AddArg(y)
  1471  		v0.AddArg2(v1, v2)
  1472  		v.AddArg(v0)
  1473  		return true
  1474  	}
  1475  }
  1476  func rewriteValueRISCV64_OpLeq16(v *Value) bool {
  1477  	v_1 := v.Args[1]
  1478  	v_0 := v.Args[0]
  1479  	b := v.Block
  1480  	typ := &b.Func.Config.Types
  1481  	// match: (Leq16 x y)
  1482  	// result: (Not (Less16 y x))
  1483  	for {
  1484  		x := v_0
  1485  		y := v_1
  1486  		v.reset(OpNot)
  1487  		v0 := b.NewValue0(v.Pos, OpLess16, typ.Bool)
  1488  		v0.AddArg2(y, x)
  1489  		v.AddArg(v0)
  1490  		return true
  1491  	}
  1492  }
  1493  func rewriteValueRISCV64_OpLeq16U(v *Value) bool {
  1494  	v_1 := v.Args[1]
  1495  	v_0 := v.Args[0]
  1496  	b := v.Block
  1497  	typ := &b.Func.Config.Types
  1498  	// match: (Leq16U x y)
  1499  	// result: (Not (Less16U y x))
  1500  	for {
  1501  		x := v_0
  1502  		y := v_1
  1503  		v.reset(OpNot)
  1504  		v0 := b.NewValue0(v.Pos, OpLess16U, typ.Bool)
  1505  		v0.AddArg2(y, x)
  1506  		v.AddArg(v0)
  1507  		return true
  1508  	}
  1509  }
  1510  func rewriteValueRISCV64_OpLeq32(v *Value) bool {
  1511  	v_1 := v.Args[1]
  1512  	v_0 := v.Args[0]
  1513  	b := v.Block
  1514  	typ := &b.Func.Config.Types
  1515  	// match: (Leq32 x y)
  1516  	// result: (Not (Less32 y x))
  1517  	for {
  1518  		x := v_0
  1519  		y := v_1
  1520  		v.reset(OpNot)
  1521  		v0 := b.NewValue0(v.Pos, OpLess32, typ.Bool)
  1522  		v0.AddArg2(y, x)
  1523  		v.AddArg(v0)
  1524  		return true
  1525  	}
  1526  }
  1527  func rewriteValueRISCV64_OpLeq32U(v *Value) bool {
  1528  	v_1 := v.Args[1]
  1529  	v_0 := v.Args[0]
  1530  	b := v.Block
  1531  	typ := &b.Func.Config.Types
  1532  	// match: (Leq32U x y)
  1533  	// result: (Not (Less32U y x))
  1534  	for {
  1535  		x := v_0
  1536  		y := v_1
  1537  		v.reset(OpNot)
  1538  		v0 := b.NewValue0(v.Pos, OpLess32U, typ.Bool)
  1539  		v0.AddArg2(y, x)
  1540  		v.AddArg(v0)
  1541  		return true
  1542  	}
  1543  }
  1544  func rewriteValueRISCV64_OpLeq64(v *Value) bool {
  1545  	v_1 := v.Args[1]
  1546  	v_0 := v.Args[0]
  1547  	b := v.Block
  1548  	typ := &b.Func.Config.Types
  1549  	// match: (Leq64 x y)
  1550  	// result: (Not (Less64 y x))
  1551  	for {
  1552  		x := v_0
  1553  		y := v_1
  1554  		v.reset(OpNot)
  1555  		v0 := b.NewValue0(v.Pos, OpLess64, typ.Bool)
  1556  		v0.AddArg2(y, x)
  1557  		v.AddArg(v0)
  1558  		return true
  1559  	}
  1560  }
  1561  func rewriteValueRISCV64_OpLeq64U(v *Value) bool {
  1562  	v_1 := v.Args[1]
  1563  	v_0 := v.Args[0]
  1564  	b := v.Block
  1565  	typ := &b.Func.Config.Types
  1566  	// match: (Leq64U x y)
  1567  	// result: (Not (Less64U y x))
  1568  	for {
  1569  		x := v_0
  1570  		y := v_1
  1571  		v.reset(OpNot)
  1572  		v0 := b.NewValue0(v.Pos, OpLess64U, typ.Bool)
  1573  		v0.AddArg2(y, x)
  1574  		v.AddArg(v0)
  1575  		return true
  1576  	}
  1577  }
  1578  func rewriteValueRISCV64_OpLeq8(v *Value) bool {
  1579  	v_1 := v.Args[1]
  1580  	v_0 := v.Args[0]
  1581  	b := v.Block
  1582  	typ := &b.Func.Config.Types
  1583  	// match: (Leq8 x y)
  1584  	// result: (Not (Less8 y x))
  1585  	for {
  1586  		x := v_0
  1587  		y := v_1
  1588  		v.reset(OpNot)
  1589  		v0 := b.NewValue0(v.Pos, OpLess8, typ.Bool)
  1590  		v0.AddArg2(y, x)
  1591  		v.AddArg(v0)
  1592  		return true
  1593  	}
  1594  }
  1595  func rewriteValueRISCV64_OpLeq8U(v *Value) bool {
  1596  	v_1 := v.Args[1]
  1597  	v_0 := v.Args[0]
  1598  	b := v.Block
  1599  	typ := &b.Func.Config.Types
  1600  	// match: (Leq8U x y)
  1601  	// result: (Not (Less8U y x))
  1602  	for {
  1603  		x := v_0
  1604  		y := v_1
  1605  		v.reset(OpNot)
  1606  		v0 := b.NewValue0(v.Pos, OpLess8U, typ.Bool)
  1607  		v0.AddArg2(y, x)
  1608  		v.AddArg(v0)
  1609  		return true
  1610  	}
  1611  }
  1612  func rewriteValueRISCV64_OpLess16(v *Value) bool {
  1613  	v_1 := v.Args[1]
  1614  	v_0 := v.Args[0]
  1615  	b := v.Block
  1616  	typ := &b.Func.Config.Types
  1617  	// match: (Less16 x y)
  1618  	// result: (SLT (SignExt16to64 x) (SignExt16to64 y))
  1619  	for {
  1620  		x := v_0
  1621  		y := v_1
  1622  		v.reset(OpRISCV64SLT)
  1623  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  1624  		v0.AddArg(x)
  1625  		v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  1626  		v1.AddArg(y)
  1627  		v.AddArg2(v0, v1)
  1628  		return true
  1629  	}
  1630  }
  1631  func rewriteValueRISCV64_OpLess16U(v *Value) bool {
  1632  	v_1 := v.Args[1]
  1633  	v_0 := v.Args[0]
  1634  	b := v.Block
  1635  	typ := &b.Func.Config.Types
  1636  	// match: (Less16U x y)
  1637  	// result: (SLTU (ZeroExt16to64 x) (ZeroExt16to64 y))
  1638  	for {
  1639  		x := v_0
  1640  		y := v_1
  1641  		v.reset(OpRISCV64SLTU)
  1642  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1643  		v0.AddArg(x)
  1644  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1645  		v1.AddArg(y)
  1646  		v.AddArg2(v0, v1)
  1647  		return true
  1648  	}
  1649  }
  1650  func rewriteValueRISCV64_OpLess32(v *Value) bool {
  1651  	v_1 := v.Args[1]
  1652  	v_0 := v.Args[0]
  1653  	b := v.Block
  1654  	typ := &b.Func.Config.Types
  1655  	// match: (Less32 x y)
  1656  	// result: (SLT (SignExt32to64 x) (SignExt32to64 y))
  1657  	for {
  1658  		x := v_0
  1659  		y := v_1
  1660  		v.reset(OpRISCV64SLT)
  1661  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1662  		v0.AddArg(x)
  1663  		v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1664  		v1.AddArg(y)
  1665  		v.AddArg2(v0, v1)
  1666  		return true
  1667  	}
  1668  }
  1669  func rewriteValueRISCV64_OpLess32U(v *Value) bool {
  1670  	v_1 := v.Args[1]
  1671  	v_0 := v.Args[0]
  1672  	b := v.Block
  1673  	typ := &b.Func.Config.Types
  1674  	// match: (Less32U x y)
  1675  	// result: (SLTU (ZeroExt32to64 x) (ZeroExt32to64 y))
  1676  	for {
  1677  		x := v_0
  1678  		y := v_1
  1679  		v.reset(OpRISCV64SLTU)
  1680  		v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1681  		v0.AddArg(x)
  1682  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1683  		v1.AddArg(y)
  1684  		v.AddArg2(v0, v1)
  1685  		return true
  1686  	}
  1687  }
  1688  func rewriteValueRISCV64_OpLess8(v *Value) bool {
  1689  	v_1 := v.Args[1]
  1690  	v_0 := v.Args[0]
  1691  	b := v.Block
  1692  	typ := &b.Func.Config.Types
  1693  	// match: (Less8 x y)
  1694  	// result: (SLT (SignExt8to64 x) (SignExt8to64 y))
  1695  	for {
  1696  		x := v_0
  1697  		y := v_1
  1698  		v.reset(OpRISCV64SLT)
  1699  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  1700  		v0.AddArg(x)
  1701  		v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  1702  		v1.AddArg(y)
  1703  		v.AddArg2(v0, v1)
  1704  		return true
  1705  	}
  1706  }
  1707  func rewriteValueRISCV64_OpLess8U(v *Value) bool {
  1708  	v_1 := v.Args[1]
  1709  	v_0 := v.Args[0]
  1710  	b := v.Block
  1711  	typ := &b.Func.Config.Types
  1712  	// match: (Less8U x y)
  1713  	// result: (SLTU (ZeroExt8to64 x) (ZeroExt8to64 y))
  1714  	for {
  1715  		x := v_0
  1716  		y := v_1
  1717  		v.reset(OpRISCV64SLTU)
  1718  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1719  		v0.AddArg(x)
  1720  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1721  		v1.AddArg(y)
  1722  		v.AddArg2(v0, v1)
  1723  		return true
  1724  	}
  1725  }
  1726  func rewriteValueRISCV64_OpLoad(v *Value) bool {
  1727  	v_1 := v.Args[1]
  1728  	v_0 := v.Args[0]
  1729  	// match: (Load <t> ptr mem)
  1730  	// cond: t.IsBoolean()
  1731  	// result: (MOVBUload ptr mem)
  1732  	for {
  1733  		t := v.Type
  1734  		ptr := v_0
  1735  		mem := v_1
  1736  		if !(t.IsBoolean()) {
  1737  			break
  1738  		}
  1739  		v.reset(OpRISCV64MOVBUload)
  1740  		v.AddArg2(ptr, mem)
  1741  		return true
  1742  	}
  1743  	// match: (Load <t> ptr mem)
  1744  	// cond: ( is8BitInt(t) && t.IsSigned())
  1745  	// result: (MOVBload ptr mem)
  1746  	for {
  1747  		t := v.Type
  1748  		ptr := v_0
  1749  		mem := v_1
  1750  		if !(is8BitInt(t) && t.IsSigned()) {
  1751  			break
  1752  		}
  1753  		v.reset(OpRISCV64MOVBload)
  1754  		v.AddArg2(ptr, mem)
  1755  		return true
  1756  	}
  1757  	// match: (Load <t> ptr mem)
  1758  	// cond: ( is8BitInt(t) && !t.IsSigned())
  1759  	// result: (MOVBUload ptr mem)
  1760  	for {
  1761  		t := v.Type
  1762  		ptr := v_0
  1763  		mem := v_1
  1764  		if !(is8BitInt(t) && !t.IsSigned()) {
  1765  			break
  1766  		}
  1767  		v.reset(OpRISCV64MOVBUload)
  1768  		v.AddArg2(ptr, mem)
  1769  		return true
  1770  	}
  1771  	// match: (Load <t> ptr mem)
  1772  	// cond: (is16BitInt(t) && t.IsSigned())
  1773  	// result: (MOVHload ptr mem)
  1774  	for {
  1775  		t := v.Type
  1776  		ptr := v_0
  1777  		mem := v_1
  1778  		if !(is16BitInt(t) && t.IsSigned()) {
  1779  			break
  1780  		}
  1781  		v.reset(OpRISCV64MOVHload)
  1782  		v.AddArg2(ptr, mem)
  1783  		return true
  1784  	}
  1785  	// match: (Load <t> ptr mem)
  1786  	// cond: (is16BitInt(t) && !t.IsSigned())
  1787  	// result: (MOVHUload ptr mem)
  1788  	for {
  1789  		t := v.Type
  1790  		ptr := v_0
  1791  		mem := v_1
  1792  		if !(is16BitInt(t) && !t.IsSigned()) {
  1793  			break
  1794  		}
  1795  		v.reset(OpRISCV64MOVHUload)
  1796  		v.AddArg2(ptr, mem)
  1797  		return true
  1798  	}
  1799  	// match: (Load <t> ptr mem)
  1800  	// cond: (is32BitInt(t) && t.IsSigned())
  1801  	// result: (MOVWload ptr mem)
  1802  	for {
  1803  		t := v.Type
  1804  		ptr := v_0
  1805  		mem := v_1
  1806  		if !(is32BitInt(t) && t.IsSigned()) {
  1807  			break
  1808  		}
  1809  		v.reset(OpRISCV64MOVWload)
  1810  		v.AddArg2(ptr, mem)
  1811  		return true
  1812  	}
  1813  	// match: (Load <t> ptr mem)
  1814  	// cond: (is32BitInt(t) && !t.IsSigned())
  1815  	// result: (MOVWUload ptr mem)
  1816  	for {
  1817  		t := v.Type
  1818  		ptr := v_0
  1819  		mem := v_1
  1820  		if !(is32BitInt(t) && !t.IsSigned()) {
  1821  			break
  1822  		}
  1823  		v.reset(OpRISCV64MOVWUload)
  1824  		v.AddArg2(ptr, mem)
  1825  		return true
  1826  	}
  1827  	// match: (Load <t> ptr mem)
  1828  	// cond: (is64BitInt(t) || isPtr(t))
  1829  	// result: (MOVDload ptr mem)
  1830  	for {
  1831  		t := v.Type
  1832  		ptr := v_0
  1833  		mem := v_1
  1834  		if !(is64BitInt(t) || isPtr(t)) {
  1835  			break
  1836  		}
  1837  		v.reset(OpRISCV64MOVDload)
  1838  		v.AddArg2(ptr, mem)
  1839  		return true
  1840  	}
  1841  	// match: (Load <t> ptr mem)
  1842  	// cond: is32BitFloat(t)
  1843  	// result: (FMOVWload ptr mem)
  1844  	for {
  1845  		t := v.Type
  1846  		ptr := v_0
  1847  		mem := v_1
  1848  		if !(is32BitFloat(t)) {
  1849  			break
  1850  		}
  1851  		v.reset(OpRISCV64FMOVWload)
  1852  		v.AddArg2(ptr, mem)
  1853  		return true
  1854  	}
  1855  	// match: (Load <t> ptr mem)
  1856  	// cond: is64BitFloat(t)
  1857  	// result: (FMOVDload ptr mem)
  1858  	for {
  1859  		t := v.Type
  1860  		ptr := v_0
  1861  		mem := v_1
  1862  		if !(is64BitFloat(t)) {
  1863  			break
  1864  		}
  1865  		v.reset(OpRISCV64FMOVDload)
  1866  		v.AddArg2(ptr, mem)
  1867  		return true
  1868  	}
  1869  	return false
  1870  }
  1871  func rewriteValueRISCV64_OpLocalAddr(v *Value) bool {
  1872  	v_1 := v.Args[1]
  1873  	v_0 := v.Args[0]
  1874  	b := v.Block
  1875  	typ := &b.Func.Config.Types
  1876  	// match: (LocalAddr <t> {sym} base mem)
  1877  	// cond: t.Elem().HasPointers()
  1878  	// result: (MOVaddr {sym} (SPanchored base mem))
  1879  	for {
  1880  		t := v.Type
  1881  		sym := auxToSym(v.Aux)
  1882  		base := v_0
  1883  		mem := v_1
  1884  		if !(t.Elem().HasPointers()) {
  1885  			break
  1886  		}
  1887  		v.reset(OpRISCV64MOVaddr)
  1888  		v.Aux = symToAux(sym)
  1889  		v0 := b.NewValue0(v.Pos, OpSPanchored, typ.Uintptr)
  1890  		v0.AddArg2(base, mem)
  1891  		v.AddArg(v0)
  1892  		return true
  1893  	}
  1894  	// match: (LocalAddr <t> {sym} base _)
  1895  	// cond: !t.Elem().HasPointers()
  1896  	// result: (MOVaddr {sym} base)
  1897  	for {
  1898  		t := v.Type
  1899  		sym := auxToSym(v.Aux)
  1900  		base := v_0
  1901  		if !(!t.Elem().HasPointers()) {
  1902  			break
  1903  		}
  1904  		v.reset(OpRISCV64MOVaddr)
  1905  		v.Aux = symToAux(sym)
  1906  		v.AddArg(base)
  1907  		return true
  1908  	}
  1909  	return false
  1910  }
  1911  func rewriteValueRISCV64_OpLsh16x16(v *Value) bool {
  1912  	v_1 := v.Args[1]
  1913  	v_0 := v.Args[0]
  1914  	b := v.Block
  1915  	typ := &b.Func.Config.Types
  1916  	// match: (Lsh16x16 <t> x y)
  1917  	// cond: !shiftIsBounded(v)
  1918  	// result: (AND (SLL <t> x y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
  1919  	for {
  1920  		t := v.Type
  1921  		x := v_0
  1922  		y := v_1
  1923  		if !(!shiftIsBounded(v)) {
  1924  			break
  1925  		}
  1926  		v.reset(OpRISCV64AND)
  1927  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  1928  		v0.AddArg2(x, y)
  1929  		v1 := b.NewValue0(v.Pos, OpNeg16, t)
  1930  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  1931  		v2.AuxInt = int64ToAuxInt(64)
  1932  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1933  		v3.AddArg(y)
  1934  		v2.AddArg(v3)
  1935  		v1.AddArg(v2)
  1936  		v.AddArg2(v0, v1)
  1937  		return true
  1938  	}
  1939  	// match: (Lsh16x16 x y)
  1940  	// cond: shiftIsBounded(v)
  1941  	// result: (SLL x y)
  1942  	for {
  1943  		x := v_0
  1944  		y := v_1
  1945  		if !(shiftIsBounded(v)) {
  1946  			break
  1947  		}
  1948  		v.reset(OpRISCV64SLL)
  1949  		v.AddArg2(x, y)
  1950  		return true
  1951  	}
  1952  	return false
  1953  }
  1954  func rewriteValueRISCV64_OpLsh16x32(v *Value) bool {
  1955  	v_1 := v.Args[1]
  1956  	v_0 := v.Args[0]
  1957  	b := v.Block
  1958  	typ := &b.Func.Config.Types
  1959  	// match: (Lsh16x32 <t> x y)
  1960  	// cond: !shiftIsBounded(v)
  1961  	// result: (AND (SLL <t> x y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
  1962  	for {
  1963  		t := v.Type
  1964  		x := v_0
  1965  		y := v_1
  1966  		if !(!shiftIsBounded(v)) {
  1967  			break
  1968  		}
  1969  		v.reset(OpRISCV64AND)
  1970  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  1971  		v0.AddArg2(x, y)
  1972  		v1 := b.NewValue0(v.Pos, OpNeg16, t)
  1973  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  1974  		v2.AuxInt = int64ToAuxInt(64)
  1975  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1976  		v3.AddArg(y)
  1977  		v2.AddArg(v3)
  1978  		v1.AddArg(v2)
  1979  		v.AddArg2(v0, v1)
  1980  		return true
  1981  	}
  1982  	// match: (Lsh16x32 x y)
  1983  	// cond: shiftIsBounded(v)
  1984  	// result: (SLL x y)
  1985  	for {
  1986  		x := v_0
  1987  		y := v_1
  1988  		if !(shiftIsBounded(v)) {
  1989  			break
  1990  		}
  1991  		v.reset(OpRISCV64SLL)
  1992  		v.AddArg2(x, y)
  1993  		return true
  1994  	}
  1995  	return false
  1996  }
  1997  func rewriteValueRISCV64_OpLsh16x64(v *Value) bool {
  1998  	v_1 := v.Args[1]
  1999  	v_0 := v.Args[0]
  2000  	b := v.Block
  2001  	// match: (Lsh16x64 <t> x y)
  2002  	// cond: !shiftIsBounded(v)
  2003  	// result: (AND (SLL <t> x y) (Neg16 <t> (SLTIU <t> [64] y)))
  2004  	for {
  2005  		t := v.Type
  2006  		x := v_0
  2007  		y := v_1
  2008  		if !(!shiftIsBounded(v)) {
  2009  			break
  2010  		}
  2011  		v.reset(OpRISCV64AND)
  2012  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2013  		v0.AddArg2(x, y)
  2014  		v1 := b.NewValue0(v.Pos, OpNeg16, t)
  2015  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2016  		v2.AuxInt = int64ToAuxInt(64)
  2017  		v2.AddArg(y)
  2018  		v1.AddArg(v2)
  2019  		v.AddArg2(v0, v1)
  2020  		return true
  2021  	}
  2022  	// match: (Lsh16x64 x y)
  2023  	// cond: shiftIsBounded(v)
  2024  	// result: (SLL x y)
  2025  	for {
  2026  		x := v_0
  2027  		y := v_1
  2028  		if !(shiftIsBounded(v)) {
  2029  			break
  2030  		}
  2031  		v.reset(OpRISCV64SLL)
  2032  		v.AddArg2(x, y)
  2033  		return true
  2034  	}
  2035  	return false
  2036  }
  2037  func rewriteValueRISCV64_OpLsh16x8(v *Value) bool {
  2038  	v_1 := v.Args[1]
  2039  	v_0 := v.Args[0]
  2040  	b := v.Block
  2041  	typ := &b.Func.Config.Types
  2042  	// match: (Lsh16x8 <t> x y)
  2043  	// cond: !shiftIsBounded(v)
  2044  	// result: (AND (SLL <t> x y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
  2045  	for {
  2046  		t := v.Type
  2047  		x := v_0
  2048  		y := v_1
  2049  		if !(!shiftIsBounded(v)) {
  2050  			break
  2051  		}
  2052  		v.reset(OpRISCV64AND)
  2053  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2054  		v0.AddArg2(x, y)
  2055  		v1 := b.NewValue0(v.Pos, OpNeg16, t)
  2056  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2057  		v2.AuxInt = int64ToAuxInt(64)
  2058  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  2059  		v3.AddArg(y)
  2060  		v2.AddArg(v3)
  2061  		v1.AddArg(v2)
  2062  		v.AddArg2(v0, v1)
  2063  		return true
  2064  	}
  2065  	// match: (Lsh16x8 x y)
  2066  	// cond: shiftIsBounded(v)
  2067  	// result: (SLL x y)
  2068  	for {
  2069  		x := v_0
  2070  		y := v_1
  2071  		if !(shiftIsBounded(v)) {
  2072  			break
  2073  		}
  2074  		v.reset(OpRISCV64SLL)
  2075  		v.AddArg2(x, y)
  2076  		return true
  2077  	}
  2078  	return false
  2079  }
  2080  func rewriteValueRISCV64_OpLsh32x16(v *Value) bool {
  2081  	v_1 := v.Args[1]
  2082  	v_0 := v.Args[0]
  2083  	b := v.Block
  2084  	typ := &b.Func.Config.Types
  2085  	// match: (Lsh32x16 <t> x y)
  2086  	// cond: !shiftIsBounded(v)
  2087  	// result: (AND (SLL <t> x y) (Neg32 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
  2088  	for {
  2089  		t := v.Type
  2090  		x := v_0
  2091  		y := v_1
  2092  		if !(!shiftIsBounded(v)) {
  2093  			break
  2094  		}
  2095  		v.reset(OpRISCV64AND)
  2096  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2097  		v0.AddArg2(x, y)
  2098  		v1 := b.NewValue0(v.Pos, OpNeg32, t)
  2099  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2100  		v2.AuxInt = int64ToAuxInt(64)
  2101  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  2102  		v3.AddArg(y)
  2103  		v2.AddArg(v3)
  2104  		v1.AddArg(v2)
  2105  		v.AddArg2(v0, v1)
  2106  		return true
  2107  	}
  2108  	// match: (Lsh32x16 x y)
  2109  	// cond: shiftIsBounded(v)
  2110  	// result: (SLL x y)
  2111  	for {
  2112  		x := v_0
  2113  		y := v_1
  2114  		if !(shiftIsBounded(v)) {
  2115  			break
  2116  		}
  2117  		v.reset(OpRISCV64SLL)
  2118  		v.AddArg2(x, y)
  2119  		return true
  2120  	}
  2121  	return false
  2122  }
  2123  func rewriteValueRISCV64_OpLsh32x32(v *Value) bool {
  2124  	v_1 := v.Args[1]
  2125  	v_0 := v.Args[0]
  2126  	b := v.Block
  2127  	typ := &b.Func.Config.Types
  2128  	// match: (Lsh32x32 <t> x y)
  2129  	// cond: !shiftIsBounded(v)
  2130  	// result: (AND (SLL <t> x y) (Neg32 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
  2131  	for {
  2132  		t := v.Type
  2133  		x := v_0
  2134  		y := v_1
  2135  		if !(!shiftIsBounded(v)) {
  2136  			break
  2137  		}
  2138  		v.reset(OpRISCV64AND)
  2139  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2140  		v0.AddArg2(x, y)
  2141  		v1 := b.NewValue0(v.Pos, OpNeg32, t)
  2142  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2143  		v2.AuxInt = int64ToAuxInt(64)
  2144  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  2145  		v3.AddArg(y)
  2146  		v2.AddArg(v3)
  2147  		v1.AddArg(v2)
  2148  		v.AddArg2(v0, v1)
  2149  		return true
  2150  	}
  2151  	// match: (Lsh32x32 x y)
  2152  	// cond: shiftIsBounded(v)
  2153  	// result: (SLL x y)
  2154  	for {
  2155  		x := v_0
  2156  		y := v_1
  2157  		if !(shiftIsBounded(v)) {
  2158  			break
  2159  		}
  2160  		v.reset(OpRISCV64SLL)
  2161  		v.AddArg2(x, y)
  2162  		return true
  2163  	}
  2164  	return false
  2165  }
  2166  func rewriteValueRISCV64_OpLsh32x64(v *Value) bool {
  2167  	v_1 := v.Args[1]
  2168  	v_0 := v.Args[0]
  2169  	b := v.Block
  2170  	// match: (Lsh32x64 <t> x y)
  2171  	// cond: !shiftIsBounded(v)
  2172  	// result: (AND (SLL <t> x y) (Neg32 <t> (SLTIU <t> [64] y)))
  2173  	for {
  2174  		t := v.Type
  2175  		x := v_0
  2176  		y := v_1
  2177  		if !(!shiftIsBounded(v)) {
  2178  			break
  2179  		}
  2180  		v.reset(OpRISCV64AND)
  2181  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2182  		v0.AddArg2(x, y)
  2183  		v1 := b.NewValue0(v.Pos, OpNeg32, t)
  2184  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2185  		v2.AuxInt = int64ToAuxInt(64)
  2186  		v2.AddArg(y)
  2187  		v1.AddArg(v2)
  2188  		v.AddArg2(v0, v1)
  2189  		return true
  2190  	}
  2191  	// match: (Lsh32x64 x y)
  2192  	// cond: shiftIsBounded(v)
  2193  	// result: (SLL x y)
  2194  	for {
  2195  		x := v_0
  2196  		y := v_1
  2197  		if !(shiftIsBounded(v)) {
  2198  			break
  2199  		}
  2200  		v.reset(OpRISCV64SLL)
  2201  		v.AddArg2(x, y)
  2202  		return true
  2203  	}
  2204  	return false
  2205  }
  2206  func rewriteValueRISCV64_OpLsh32x8(v *Value) bool {
  2207  	v_1 := v.Args[1]
  2208  	v_0 := v.Args[0]
  2209  	b := v.Block
  2210  	typ := &b.Func.Config.Types
  2211  	// match: (Lsh32x8 <t> x y)
  2212  	// cond: !shiftIsBounded(v)
  2213  	// result: (AND (SLL <t> x y) (Neg32 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
  2214  	for {
  2215  		t := v.Type
  2216  		x := v_0
  2217  		y := v_1
  2218  		if !(!shiftIsBounded(v)) {
  2219  			break
  2220  		}
  2221  		v.reset(OpRISCV64AND)
  2222  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2223  		v0.AddArg2(x, y)
  2224  		v1 := b.NewValue0(v.Pos, OpNeg32, t)
  2225  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2226  		v2.AuxInt = int64ToAuxInt(64)
  2227  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  2228  		v3.AddArg(y)
  2229  		v2.AddArg(v3)
  2230  		v1.AddArg(v2)
  2231  		v.AddArg2(v0, v1)
  2232  		return true
  2233  	}
  2234  	// match: (Lsh32x8 x y)
  2235  	// cond: shiftIsBounded(v)
  2236  	// result: (SLL x y)
  2237  	for {
  2238  		x := v_0
  2239  		y := v_1
  2240  		if !(shiftIsBounded(v)) {
  2241  			break
  2242  		}
  2243  		v.reset(OpRISCV64SLL)
  2244  		v.AddArg2(x, y)
  2245  		return true
  2246  	}
  2247  	return false
  2248  }
  2249  func rewriteValueRISCV64_OpLsh64x16(v *Value) bool {
  2250  	v_1 := v.Args[1]
  2251  	v_0 := v.Args[0]
  2252  	b := v.Block
  2253  	typ := &b.Func.Config.Types
  2254  	// match: (Lsh64x16 <t> x y)
  2255  	// cond: !shiftIsBounded(v)
  2256  	// result: (AND (SLL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
  2257  	for {
  2258  		t := v.Type
  2259  		x := v_0
  2260  		y := v_1
  2261  		if !(!shiftIsBounded(v)) {
  2262  			break
  2263  		}
  2264  		v.reset(OpRISCV64AND)
  2265  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2266  		v0.AddArg2(x, y)
  2267  		v1 := b.NewValue0(v.Pos, OpNeg64, t)
  2268  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2269  		v2.AuxInt = int64ToAuxInt(64)
  2270  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  2271  		v3.AddArg(y)
  2272  		v2.AddArg(v3)
  2273  		v1.AddArg(v2)
  2274  		v.AddArg2(v0, v1)
  2275  		return true
  2276  	}
  2277  	// match: (Lsh64x16 x y)
  2278  	// cond: shiftIsBounded(v)
  2279  	// result: (SLL x y)
  2280  	for {
  2281  		x := v_0
  2282  		y := v_1
  2283  		if !(shiftIsBounded(v)) {
  2284  			break
  2285  		}
  2286  		v.reset(OpRISCV64SLL)
  2287  		v.AddArg2(x, y)
  2288  		return true
  2289  	}
  2290  	return false
  2291  }
  2292  func rewriteValueRISCV64_OpLsh64x32(v *Value) bool {
  2293  	v_1 := v.Args[1]
  2294  	v_0 := v.Args[0]
  2295  	b := v.Block
  2296  	typ := &b.Func.Config.Types
  2297  	// match: (Lsh64x32 <t> x y)
  2298  	// cond: !shiftIsBounded(v)
  2299  	// result: (AND (SLL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
  2300  	for {
  2301  		t := v.Type
  2302  		x := v_0
  2303  		y := v_1
  2304  		if !(!shiftIsBounded(v)) {
  2305  			break
  2306  		}
  2307  		v.reset(OpRISCV64AND)
  2308  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2309  		v0.AddArg2(x, y)
  2310  		v1 := b.NewValue0(v.Pos, OpNeg64, t)
  2311  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2312  		v2.AuxInt = int64ToAuxInt(64)
  2313  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  2314  		v3.AddArg(y)
  2315  		v2.AddArg(v3)
  2316  		v1.AddArg(v2)
  2317  		v.AddArg2(v0, v1)
  2318  		return true
  2319  	}
  2320  	// match: (Lsh64x32 x y)
  2321  	// cond: shiftIsBounded(v)
  2322  	// result: (SLL x y)
  2323  	for {
  2324  		x := v_0
  2325  		y := v_1
  2326  		if !(shiftIsBounded(v)) {
  2327  			break
  2328  		}
  2329  		v.reset(OpRISCV64SLL)
  2330  		v.AddArg2(x, y)
  2331  		return true
  2332  	}
  2333  	return false
  2334  }
  2335  func rewriteValueRISCV64_OpLsh64x64(v *Value) bool {
  2336  	v_1 := v.Args[1]
  2337  	v_0 := v.Args[0]
  2338  	b := v.Block
  2339  	// match: (Lsh64x64 <t> x y)
  2340  	// cond: !shiftIsBounded(v)
  2341  	// result: (AND (SLL <t> x y) (Neg64 <t> (SLTIU <t> [64] y)))
  2342  	for {
  2343  		t := v.Type
  2344  		x := v_0
  2345  		y := v_1
  2346  		if !(!shiftIsBounded(v)) {
  2347  			break
  2348  		}
  2349  		v.reset(OpRISCV64AND)
  2350  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2351  		v0.AddArg2(x, y)
  2352  		v1 := b.NewValue0(v.Pos, OpNeg64, t)
  2353  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2354  		v2.AuxInt = int64ToAuxInt(64)
  2355  		v2.AddArg(y)
  2356  		v1.AddArg(v2)
  2357  		v.AddArg2(v0, v1)
  2358  		return true
  2359  	}
  2360  	// match: (Lsh64x64 x y)
  2361  	// cond: shiftIsBounded(v)
  2362  	// result: (SLL x y)
  2363  	for {
  2364  		x := v_0
  2365  		y := v_1
  2366  		if !(shiftIsBounded(v)) {
  2367  			break
  2368  		}
  2369  		v.reset(OpRISCV64SLL)
  2370  		v.AddArg2(x, y)
  2371  		return true
  2372  	}
  2373  	return false
  2374  }
  2375  func rewriteValueRISCV64_OpLsh64x8(v *Value) bool {
  2376  	v_1 := v.Args[1]
  2377  	v_0 := v.Args[0]
  2378  	b := v.Block
  2379  	typ := &b.Func.Config.Types
  2380  	// match: (Lsh64x8 <t> x y)
  2381  	// cond: !shiftIsBounded(v)
  2382  	// result: (AND (SLL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
  2383  	for {
  2384  		t := v.Type
  2385  		x := v_0
  2386  		y := v_1
  2387  		if !(!shiftIsBounded(v)) {
  2388  			break
  2389  		}
  2390  		v.reset(OpRISCV64AND)
  2391  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2392  		v0.AddArg2(x, y)
  2393  		v1 := b.NewValue0(v.Pos, OpNeg64, t)
  2394  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2395  		v2.AuxInt = int64ToAuxInt(64)
  2396  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  2397  		v3.AddArg(y)
  2398  		v2.AddArg(v3)
  2399  		v1.AddArg(v2)
  2400  		v.AddArg2(v0, v1)
  2401  		return true
  2402  	}
  2403  	// match: (Lsh64x8 x y)
  2404  	// cond: shiftIsBounded(v)
  2405  	// result: (SLL x y)
  2406  	for {
  2407  		x := v_0
  2408  		y := v_1
  2409  		if !(shiftIsBounded(v)) {
  2410  			break
  2411  		}
  2412  		v.reset(OpRISCV64SLL)
  2413  		v.AddArg2(x, y)
  2414  		return true
  2415  	}
  2416  	return false
  2417  }
  2418  func rewriteValueRISCV64_OpLsh8x16(v *Value) bool {
  2419  	v_1 := v.Args[1]
  2420  	v_0 := v.Args[0]
  2421  	b := v.Block
  2422  	typ := &b.Func.Config.Types
  2423  	// match: (Lsh8x16 <t> x y)
  2424  	// cond: !shiftIsBounded(v)
  2425  	// result: (AND (SLL <t> x y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
  2426  	for {
  2427  		t := v.Type
  2428  		x := v_0
  2429  		y := v_1
  2430  		if !(!shiftIsBounded(v)) {
  2431  			break
  2432  		}
  2433  		v.reset(OpRISCV64AND)
  2434  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2435  		v0.AddArg2(x, y)
  2436  		v1 := b.NewValue0(v.Pos, OpNeg8, t)
  2437  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2438  		v2.AuxInt = int64ToAuxInt(64)
  2439  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  2440  		v3.AddArg(y)
  2441  		v2.AddArg(v3)
  2442  		v1.AddArg(v2)
  2443  		v.AddArg2(v0, v1)
  2444  		return true
  2445  	}
  2446  	// match: (Lsh8x16 x y)
  2447  	// cond: shiftIsBounded(v)
  2448  	// result: (SLL x y)
  2449  	for {
  2450  		x := v_0
  2451  		y := v_1
  2452  		if !(shiftIsBounded(v)) {
  2453  			break
  2454  		}
  2455  		v.reset(OpRISCV64SLL)
  2456  		v.AddArg2(x, y)
  2457  		return true
  2458  	}
  2459  	return false
  2460  }
  2461  func rewriteValueRISCV64_OpLsh8x32(v *Value) bool {
  2462  	v_1 := v.Args[1]
  2463  	v_0 := v.Args[0]
  2464  	b := v.Block
  2465  	typ := &b.Func.Config.Types
  2466  	// match: (Lsh8x32 <t> x y)
  2467  	// cond: !shiftIsBounded(v)
  2468  	// result: (AND (SLL <t> x y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
  2469  	for {
  2470  		t := v.Type
  2471  		x := v_0
  2472  		y := v_1
  2473  		if !(!shiftIsBounded(v)) {
  2474  			break
  2475  		}
  2476  		v.reset(OpRISCV64AND)
  2477  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2478  		v0.AddArg2(x, y)
  2479  		v1 := b.NewValue0(v.Pos, OpNeg8, t)
  2480  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2481  		v2.AuxInt = int64ToAuxInt(64)
  2482  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  2483  		v3.AddArg(y)
  2484  		v2.AddArg(v3)
  2485  		v1.AddArg(v2)
  2486  		v.AddArg2(v0, v1)
  2487  		return true
  2488  	}
  2489  	// match: (Lsh8x32 x y)
  2490  	// cond: shiftIsBounded(v)
  2491  	// result: (SLL x y)
  2492  	for {
  2493  		x := v_0
  2494  		y := v_1
  2495  		if !(shiftIsBounded(v)) {
  2496  			break
  2497  		}
  2498  		v.reset(OpRISCV64SLL)
  2499  		v.AddArg2(x, y)
  2500  		return true
  2501  	}
  2502  	return false
  2503  }
  2504  func rewriteValueRISCV64_OpLsh8x64(v *Value) bool {
  2505  	v_1 := v.Args[1]
  2506  	v_0 := v.Args[0]
  2507  	b := v.Block
  2508  	// match: (Lsh8x64 <t> x y)
  2509  	// cond: !shiftIsBounded(v)
  2510  	// result: (AND (SLL <t> x y) (Neg8 <t> (SLTIU <t> [64] y)))
  2511  	for {
  2512  		t := v.Type
  2513  		x := v_0
  2514  		y := v_1
  2515  		if !(!shiftIsBounded(v)) {
  2516  			break
  2517  		}
  2518  		v.reset(OpRISCV64AND)
  2519  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2520  		v0.AddArg2(x, y)
  2521  		v1 := b.NewValue0(v.Pos, OpNeg8, t)
  2522  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2523  		v2.AuxInt = int64ToAuxInt(64)
  2524  		v2.AddArg(y)
  2525  		v1.AddArg(v2)
  2526  		v.AddArg2(v0, v1)
  2527  		return true
  2528  	}
  2529  	// match: (Lsh8x64 x y)
  2530  	// cond: shiftIsBounded(v)
  2531  	// result: (SLL x y)
  2532  	for {
  2533  		x := v_0
  2534  		y := v_1
  2535  		if !(shiftIsBounded(v)) {
  2536  			break
  2537  		}
  2538  		v.reset(OpRISCV64SLL)
  2539  		v.AddArg2(x, y)
  2540  		return true
  2541  	}
  2542  	return false
  2543  }
  2544  func rewriteValueRISCV64_OpLsh8x8(v *Value) bool {
  2545  	v_1 := v.Args[1]
  2546  	v_0 := v.Args[0]
  2547  	b := v.Block
  2548  	typ := &b.Func.Config.Types
  2549  	// match: (Lsh8x8 <t> x y)
  2550  	// cond: !shiftIsBounded(v)
  2551  	// result: (AND (SLL <t> x y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
  2552  	for {
  2553  		t := v.Type
  2554  		x := v_0
  2555  		y := v_1
  2556  		if !(!shiftIsBounded(v)) {
  2557  			break
  2558  		}
  2559  		v.reset(OpRISCV64AND)
  2560  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2561  		v0.AddArg2(x, y)
  2562  		v1 := b.NewValue0(v.Pos, OpNeg8, t)
  2563  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2564  		v2.AuxInt = int64ToAuxInt(64)
  2565  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  2566  		v3.AddArg(y)
  2567  		v2.AddArg(v3)
  2568  		v1.AddArg(v2)
  2569  		v.AddArg2(v0, v1)
  2570  		return true
  2571  	}
  2572  	// match: (Lsh8x8 x y)
  2573  	// cond: shiftIsBounded(v)
  2574  	// result: (SLL x y)
  2575  	for {
  2576  		x := v_0
  2577  		y := v_1
  2578  		if !(shiftIsBounded(v)) {
  2579  			break
  2580  		}
  2581  		v.reset(OpRISCV64SLL)
  2582  		v.AddArg2(x, y)
  2583  		return true
  2584  	}
  2585  	return false
  2586  }
  2587  func rewriteValueRISCV64_OpMax64(v *Value) bool {
  2588  	v_1 := v.Args[1]
  2589  	v_0 := v.Args[0]
  2590  	// match: (Max64 x y)
  2591  	// cond: buildcfg.GORISCV64 >= 22
  2592  	// result: (MAX x y)
  2593  	for {
  2594  		x := v_0
  2595  		y := v_1
  2596  		if !(buildcfg.GORISCV64 >= 22) {
  2597  			break
  2598  		}
  2599  		v.reset(OpRISCV64MAX)
  2600  		v.AddArg2(x, y)
  2601  		return true
  2602  	}
  2603  	return false
  2604  }
  2605  func rewriteValueRISCV64_OpMax64u(v *Value) bool {
  2606  	v_1 := v.Args[1]
  2607  	v_0 := v.Args[0]
  2608  	// match: (Max64u x y)
  2609  	// cond: buildcfg.GORISCV64 >= 22
  2610  	// result: (MAXU x y)
  2611  	for {
  2612  		x := v_0
  2613  		y := v_1
  2614  		if !(buildcfg.GORISCV64 >= 22) {
  2615  			break
  2616  		}
  2617  		v.reset(OpRISCV64MAXU)
  2618  		v.AddArg2(x, y)
  2619  		return true
  2620  	}
  2621  	return false
  2622  }
  2623  func rewriteValueRISCV64_OpMin64(v *Value) bool {
  2624  	v_1 := v.Args[1]
  2625  	v_0 := v.Args[0]
  2626  	// match: (Min64 x y)
  2627  	// cond: buildcfg.GORISCV64 >= 22
  2628  	// result: (MIN x y)
  2629  	for {
  2630  		x := v_0
  2631  		y := v_1
  2632  		if !(buildcfg.GORISCV64 >= 22) {
  2633  			break
  2634  		}
  2635  		v.reset(OpRISCV64MIN)
  2636  		v.AddArg2(x, y)
  2637  		return true
  2638  	}
  2639  	return false
  2640  }
  2641  func rewriteValueRISCV64_OpMin64u(v *Value) bool {
  2642  	v_1 := v.Args[1]
  2643  	v_0 := v.Args[0]
  2644  	// match: (Min64u x y)
  2645  	// cond: buildcfg.GORISCV64 >= 22
  2646  	// result: (MINU x y)
  2647  	for {
  2648  		x := v_0
  2649  		y := v_1
  2650  		if !(buildcfg.GORISCV64 >= 22) {
  2651  			break
  2652  		}
  2653  		v.reset(OpRISCV64MINU)
  2654  		v.AddArg2(x, y)
  2655  		return true
  2656  	}
  2657  	return false
  2658  }
  2659  func rewriteValueRISCV64_OpMod16(v *Value) bool {
  2660  	v_1 := v.Args[1]
  2661  	v_0 := v.Args[0]
  2662  	b := v.Block
  2663  	typ := &b.Func.Config.Types
  2664  	// match: (Mod16 x y [false])
  2665  	// result: (REMW (SignExt16to32 x) (SignExt16to32 y))
  2666  	for {
  2667  		if auxIntToBool(v.AuxInt) != false {
  2668  			break
  2669  		}
  2670  		x := v_0
  2671  		y := v_1
  2672  		v.reset(OpRISCV64REMW)
  2673  		v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  2674  		v0.AddArg(x)
  2675  		v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  2676  		v1.AddArg(y)
  2677  		v.AddArg2(v0, v1)
  2678  		return true
  2679  	}
  2680  	return false
  2681  }
  2682  func rewriteValueRISCV64_OpMod16u(v *Value) bool {
  2683  	v_1 := v.Args[1]
  2684  	v_0 := v.Args[0]
  2685  	b := v.Block
  2686  	typ := &b.Func.Config.Types
  2687  	// match: (Mod16u x y)
  2688  	// result: (REMUW (ZeroExt16to32 x) (ZeroExt16to32 y))
  2689  	for {
  2690  		x := v_0
  2691  		y := v_1
  2692  		v.reset(OpRISCV64REMUW)
  2693  		v0 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  2694  		v0.AddArg(x)
  2695  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  2696  		v1.AddArg(y)
  2697  		v.AddArg2(v0, v1)
  2698  		return true
  2699  	}
  2700  }
  2701  func rewriteValueRISCV64_OpMod32(v *Value) bool {
  2702  	v_1 := v.Args[1]
  2703  	v_0 := v.Args[0]
  2704  	// match: (Mod32 x y [false])
  2705  	// result: (REMW x y)
  2706  	for {
  2707  		if auxIntToBool(v.AuxInt) != false {
  2708  			break
  2709  		}
  2710  		x := v_0
  2711  		y := v_1
  2712  		v.reset(OpRISCV64REMW)
  2713  		v.AddArg2(x, y)
  2714  		return true
  2715  	}
  2716  	return false
  2717  }
  2718  func rewriteValueRISCV64_OpMod64(v *Value) bool {
  2719  	v_1 := v.Args[1]
  2720  	v_0 := v.Args[0]
  2721  	// match: (Mod64 x y [false])
  2722  	// result: (REM x y)
  2723  	for {
  2724  		if auxIntToBool(v.AuxInt) != false {
  2725  			break
  2726  		}
  2727  		x := v_0
  2728  		y := v_1
  2729  		v.reset(OpRISCV64REM)
  2730  		v.AddArg2(x, y)
  2731  		return true
  2732  	}
  2733  	return false
  2734  }
  2735  func rewriteValueRISCV64_OpMod8(v *Value) bool {
  2736  	v_1 := v.Args[1]
  2737  	v_0 := v.Args[0]
  2738  	b := v.Block
  2739  	typ := &b.Func.Config.Types
  2740  	// match: (Mod8 x y)
  2741  	// result: (REMW (SignExt8to32 x) (SignExt8to32 y))
  2742  	for {
  2743  		x := v_0
  2744  		y := v_1
  2745  		v.reset(OpRISCV64REMW)
  2746  		v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  2747  		v0.AddArg(x)
  2748  		v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  2749  		v1.AddArg(y)
  2750  		v.AddArg2(v0, v1)
  2751  		return true
  2752  	}
  2753  }
  2754  func rewriteValueRISCV64_OpMod8u(v *Value) bool {
  2755  	v_1 := v.Args[1]
  2756  	v_0 := v.Args[0]
  2757  	b := v.Block
  2758  	typ := &b.Func.Config.Types
  2759  	// match: (Mod8u x y)
  2760  	// result: (REMUW (ZeroExt8to32 x) (ZeroExt8to32 y))
  2761  	for {
  2762  		x := v_0
  2763  		y := v_1
  2764  		v.reset(OpRISCV64REMUW)
  2765  		v0 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  2766  		v0.AddArg(x)
  2767  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  2768  		v1.AddArg(y)
  2769  		v.AddArg2(v0, v1)
  2770  		return true
  2771  	}
  2772  }
  2773  func rewriteValueRISCV64_OpMove(v *Value) bool {
  2774  	v_2 := v.Args[2]
  2775  	v_1 := v.Args[1]
  2776  	v_0 := v.Args[0]
  2777  	b := v.Block
  2778  	config := b.Func.Config
  2779  	typ := &b.Func.Config.Types
  2780  	// match: (Move [0] _ _ mem)
  2781  	// result: mem
  2782  	for {
  2783  		if auxIntToInt64(v.AuxInt) != 0 {
  2784  			break
  2785  		}
  2786  		mem := v_2
  2787  		v.copyOf(mem)
  2788  		return true
  2789  	}
  2790  	// match: (Move [1] dst src mem)
  2791  	// result: (MOVBstore dst (MOVBload src mem) mem)
  2792  	for {
  2793  		if auxIntToInt64(v.AuxInt) != 1 {
  2794  			break
  2795  		}
  2796  		dst := v_0
  2797  		src := v_1
  2798  		mem := v_2
  2799  		v.reset(OpRISCV64MOVBstore)
  2800  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  2801  		v0.AddArg2(src, mem)
  2802  		v.AddArg3(dst, v0, mem)
  2803  		return true
  2804  	}
  2805  	// match: (Move [2] {t} dst src mem)
  2806  	// cond: t.Alignment()%2 == 0
  2807  	// result: (MOVHstore dst (MOVHload src mem) mem)
  2808  	for {
  2809  		if auxIntToInt64(v.AuxInt) != 2 {
  2810  			break
  2811  		}
  2812  		t := auxToType(v.Aux)
  2813  		dst := v_0
  2814  		src := v_1
  2815  		mem := v_2
  2816  		if !(t.Alignment()%2 == 0) {
  2817  			break
  2818  		}
  2819  		v.reset(OpRISCV64MOVHstore)
  2820  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  2821  		v0.AddArg2(src, mem)
  2822  		v.AddArg3(dst, v0, mem)
  2823  		return true
  2824  	}
  2825  	// match: (Move [2] dst src mem)
  2826  	// result: (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem))
  2827  	for {
  2828  		if auxIntToInt64(v.AuxInt) != 2 {
  2829  			break
  2830  		}
  2831  		dst := v_0
  2832  		src := v_1
  2833  		mem := v_2
  2834  		v.reset(OpRISCV64MOVBstore)
  2835  		v.AuxInt = int32ToAuxInt(1)
  2836  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  2837  		v0.AuxInt = int32ToAuxInt(1)
  2838  		v0.AddArg2(src, mem)
  2839  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  2840  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  2841  		v2.AddArg2(src, mem)
  2842  		v1.AddArg3(dst, v2, mem)
  2843  		v.AddArg3(dst, v0, v1)
  2844  		return true
  2845  	}
  2846  	// match: (Move [4] {t} dst src mem)
  2847  	// cond: t.Alignment()%4 == 0
  2848  	// result: (MOVWstore dst (MOVWload src mem) mem)
  2849  	for {
  2850  		if auxIntToInt64(v.AuxInt) != 4 {
  2851  			break
  2852  		}
  2853  		t := auxToType(v.Aux)
  2854  		dst := v_0
  2855  		src := v_1
  2856  		mem := v_2
  2857  		if !(t.Alignment()%4 == 0) {
  2858  			break
  2859  		}
  2860  		v.reset(OpRISCV64MOVWstore)
  2861  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVWload, typ.Int32)
  2862  		v0.AddArg2(src, mem)
  2863  		v.AddArg3(dst, v0, mem)
  2864  		return true
  2865  	}
  2866  	// match: (Move [4] {t} dst src mem)
  2867  	// cond: t.Alignment()%2 == 0
  2868  	// result: (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem))
  2869  	for {
  2870  		if auxIntToInt64(v.AuxInt) != 4 {
  2871  			break
  2872  		}
  2873  		t := auxToType(v.Aux)
  2874  		dst := v_0
  2875  		src := v_1
  2876  		mem := v_2
  2877  		if !(t.Alignment()%2 == 0) {
  2878  			break
  2879  		}
  2880  		v.reset(OpRISCV64MOVHstore)
  2881  		v.AuxInt = int32ToAuxInt(2)
  2882  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  2883  		v0.AuxInt = int32ToAuxInt(2)
  2884  		v0.AddArg2(src, mem)
  2885  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  2886  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  2887  		v2.AddArg2(src, mem)
  2888  		v1.AddArg3(dst, v2, mem)
  2889  		v.AddArg3(dst, v0, v1)
  2890  		return true
  2891  	}
  2892  	// match: (Move [4] dst src mem)
  2893  	// result: (MOVBstore [3] dst (MOVBload [3] src mem) (MOVBstore [2] dst (MOVBload [2] src mem) (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem))))
  2894  	for {
  2895  		if auxIntToInt64(v.AuxInt) != 4 {
  2896  			break
  2897  		}
  2898  		dst := v_0
  2899  		src := v_1
  2900  		mem := v_2
  2901  		v.reset(OpRISCV64MOVBstore)
  2902  		v.AuxInt = int32ToAuxInt(3)
  2903  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  2904  		v0.AuxInt = int32ToAuxInt(3)
  2905  		v0.AddArg2(src, mem)
  2906  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  2907  		v1.AuxInt = int32ToAuxInt(2)
  2908  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  2909  		v2.AuxInt = int32ToAuxInt(2)
  2910  		v2.AddArg2(src, mem)
  2911  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  2912  		v3.AuxInt = int32ToAuxInt(1)
  2913  		v4 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  2914  		v4.AuxInt = int32ToAuxInt(1)
  2915  		v4.AddArg2(src, mem)
  2916  		v5 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  2917  		v6 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  2918  		v6.AddArg2(src, mem)
  2919  		v5.AddArg3(dst, v6, mem)
  2920  		v3.AddArg3(dst, v4, v5)
  2921  		v1.AddArg3(dst, v2, v3)
  2922  		v.AddArg3(dst, v0, v1)
  2923  		return true
  2924  	}
  2925  	// match: (Move [8] {t} dst src mem)
  2926  	// cond: t.Alignment()%8 == 0
  2927  	// result: (MOVDstore dst (MOVDload src mem) mem)
  2928  	for {
  2929  		if auxIntToInt64(v.AuxInt) != 8 {
  2930  			break
  2931  		}
  2932  		t := auxToType(v.Aux)
  2933  		dst := v_0
  2934  		src := v_1
  2935  		mem := v_2
  2936  		if !(t.Alignment()%8 == 0) {
  2937  			break
  2938  		}
  2939  		v.reset(OpRISCV64MOVDstore)
  2940  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
  2941  		v0.AddArg2(src, mem)
  2942  		v.AddArg3(dst, v0, mem)
  2943  		return true
  2944  	}
  2945  	// match: (Move [8] {t} dst src mem)
  2946  	// cond: t.Alignment()%4 == 0
  2947  	// result: (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem))
  2948  	for {
  2949  		if auxIntToInt64(v.AuxInt) != 8 {
  2950  			break
  2951  		}
  2952  		t := auxToType(v.Aux)
  2953  		dst := v_0
  2954  		src := v_1
  2955  		mem := v_2
  2956  		if !(t.Alignment()%4 == 0) {
  2957  			break
  2958  		}
  2959  		v.reset(OpRISCV64MOVWstore)
  2960  		v.AuxInt = int32ToAuxInt(4)
  2961  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVWload, typ.Int32)
  2962  		v0.AuxInt = int32ToAuxInt(4)
  2963  		v0.AddArg2(src, mem)
  2964  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVWstore, types.TypeMem)
  2965  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVWload, typ.Int32)
  2966  		v2.AddArg2(src, mem)
  2967  		v1.AddArg3(dst, v2, mem)
  2968  		v.AddArg3(dst, v0, v1)
  2969  		return true
  2970  	}
  2971  	// match: (Move [8] {t} dst src mem)
  2972  	// cond: t.Alignment()%2 == 0
  2973  	// result: (MOVHstore [6] dst (MOVHload [6] src mem) (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem))))
  2974  	for {
  2975  		if auxIntToInt64(v.AuxInt) != 8 {
  2976  			break
  2977  		}
  2978  		t := auxToType(v.Aux)
  2979  		dst := v_0
  2980  		src := v_1
  2981  		mem := v_2
  2982  		if !(t.Alignment()%2 == 0) {
  2983  			break
  2984  		}
  2985  		v.reset(OpRISCV64MOVHstore)
  2986  		v.AuxInt = int32ToAuxInt(6)
  2987  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  2988  		v0.AuxInt = int32ToAuxInt(6)
  2989  		v0.AddArg2(src, mem)
  2990  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  2991  		v1.AuxInt = int32ToAuxInt(4)
  2992  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  2993  		v2.AuxInt = int32ToAuxInt(4)
  2994  		v2.AddArg2(src, mem)
  2995  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  2996  		v3.AuxInt = int32ToAuxInt(2)
  2997  		v4 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  2998  		v4.AuxInt = int32ToAuxInt(2)
  2999  		v4.AddArg2(src, mem)
  3000  		v5 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  3001  		v6 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  3002  		v6.AddArg2(src, mem)
  3003  		v5.AddArg3(dst, v6, mem)
  3004  		v3.AddArg3(dst, v4, v5)
  3005  		v1.AddArg3(dst, v2, v3)
  3006  		v.AddArg3(dst, v0, v1)
  3007  		return true
  3008  	}
  3009  	// match: (Move [3] dst src mem)
  3010  	// result: (MOVBstore [2] dst (MOVBload [2] src mem) (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem)))
  3011  	for {
  3012  		if auxIntToInt64(v.AuxInt) != 3 {
  3013  			break
  3014  		}
  3015  		dst := v_0
  3016  		src := v_1
  3017  		mem := v_2
  3018  		v.reset(OpRISCV64MOVBstore)
  3019  		v.AuxInt = int32ToAuxInt(2)
  3020  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  3021  		v0.AuxInt = int32ToAuxInt(2)
  3022  		v0.AddArg2(src, mem)
  3023  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  3024  		v1.AuxInt = int32ToAuxInt(1)
  3025  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  3026  		v2.AuxInt = int32ToAuxInt(1)
  3027  		v2.AddArg2(src, mem)
  3028  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  3029  		v4 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  3030  		v4.AddArg2(src, mem)
  3031  		v3.AddArg3(dst, v4, mem)
  3032  		v1.AddArg3(dst, v2, v3)
  3033  		v.AddArg3(dst, v0, v1)
  3034  		return true
  3035  	}
  3036  	// match: (Move [6] {t} dst src mem)
  3037  	// cond: t.Alignment()%2 == 0
  3038  	// result: (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem)))
  3039  	for {
  3040  		if auxIntToInt64(v.AuxInt) != 6 {
  3041  			break
  3042  		}
  3043  		t := auxToType(v.Aux)
  3044  		dst := v_0
  3045  		src := v_1
  3046  		mem := v_2
  3047  		if !(t.Alignment()%2 == 0) {
  3048  			break
  3049  		}
  3050  		v.reset(OpRISCV64MOVHstore)
  3051  		v.AuxInt = int32ToAuxInt(4)
  3052  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  3053  		v0.AuxInt = int32ToAuxInt(4)
  3054  		v0.AddArg2(src, mem)
  3055  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  3056  		v1.AuxInt = int32ToAuxInt(2)
  3057  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  3058  		v2.AuxInt = int32ToAuxInt(2)
  3059  		v2.AddArg2(src, mem)
  3060  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  3061  		v4 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  3062  		v4.AddArg2(src, mem)
  3063  		v3.AddArg3(dst, v4, mem)
  3064  		v1.AddArg3(dst, v2, v3)
  3065  		v.AddArg3(dst, v0, v1)
  3066  		return true
  3067  	}
  3068  	// match: (Move [s] {t} dst src mem)
  3069  	// cond: s > 0 && s <= 3*8*moveSize(t.Alignment(), config) && logLargeCopy(v, s)
  3070  	// result: (LoweredMove [makeValAndOff(int32(s),int32(t.Alignment()))] dst src mem)
  3071  	for {
  3072  		s := auxIntToInt64(v.AuxInt)
  3073  		t := auxToType(v.Aux)
  3074  		dst := v_0
  3075  		src := v_1
  3076  		mem := v_2
  3077  		if !(s > 0 && s <= 3*8*moveSize(t.Alignment(), config) && logLargeCopy(v, s)) {
  3078  			break
  3079  		}
  3080  		v.reset(OpRISCV64LoweredMove)
  3081  		v.AuxInt = valAndOffToAuxInt(makeValAndOff(int32(s), int32(t.Alignment())))
  3082  		v.AddArg3(dst, src, mem)
  3083  		return true
  3084  	}
  3085  	// match: (Move [s] {t} dst src mem)
  3086  	// cond: s > 3*8*moveSize(t.Alignment(), config) && logLargeCopy(v, s)
  3087  	// result: (LoweredMoveLoop [makeValAndOff(int32(s),int32(t.Alignment()))] dst src mem)
  3088  	for {
  3089  		s := auxIntToInt64(v.AuxInt)
  3090  		t := auxToType(v.Aux)
  3091  		dst := v_0
  3092  		src := v_1
  3093  		mem := v_2
  3094  		if !(s > 3*8*moveSize(t.Alignment(), config) && logLargeCopy(v, s)) {
  3095  			break
  3096  		}
  3097  		v.reset(OpRISCV64LoweredMoveLoop)
  3098  		v.AuxInt = valAndOffToAuxInt(makeValAndOff(int32(s), int32(t.Alignment())))
  3099  		v.AddArg3(dst, src, mem)
  3100  		return true
  3101  	}
  3102  	return false
  3103  }
  3104  func rewriteValueRISCV64_OpNeq16(v *Value) bool {
  3105  	v_1 := v.Args[1]
  3106  	v_0 := v.Args[0]
  3107  	b := v.Block
  3108  	typ := &b.Func.Config.Types
  3109  	// match: (Neq16 x y)
  3110  	// result: (Not (Eq16 x y))
  3111  	for {
  3112  		x := v_0
  3113  		y := v_1
  3114  		v.reset(OpNot)
  3115  		v0 := b.NewValue0(v.Pos, OpEq16, typ.Bool)
  3116  		v0.AddArg2(x, y)
  3117  		v.AddArg(v0)
  3118  		return true
  3119  	}
  3120  }
  3121  func rewriteValueRISCV64_OpNeq32(v *Value) bool {
  3122  	v_1 := v.Args[1]
  3123  	v_0 := v.Args[0]
  3124  	b := v.Block
  3125  	typ := &b.Func.Config.Types
  3126  	// match: (Neq32 x y)
  3127  	// result: (Not (Eq32 x y))
  3128  	for {
  3129  		x := v_0
  3130  		y := v_1
  3131  		v.reset(OpNot)
  3132  		v0 := b.NewValue0(v.Pos, OpEq32, typ.Bool)
  3133  		v0.AddArg2(x, y)
  3134  		v.AddArg(v0)
  3135  		return true
  3136  	}
  3137  }
  3138  func rewriteValueRISCV64_OpNeq64(v *Value) bool {
  3139  	v_1 := v.Args[1]
  3140  	v_0 := v.Args[0]
  3141  	b := v.Block
  3142  	typ := &b.Func.Config.Types
  3143  	// match: (Neq64 x y)
  3144  	// result: (Not (Eq64 x y))
  3145  	for {
  3146  		x := v_0
  3147  		y := v_1
  3148  		v.reset(OpNot)
  3149  		v0 := b.NewValue0(v.Pos, OpEq64, typ.Bool)
  3150  		v0.AddArg2(x, y)
  3151  		v.AddArg(v0)
  3152  		return true
  3153  	}
  3154  }
  3155  func rewriteValueRISCV64_OpNeq8(v *Value) bool {
  3156  	v_1 := v.Args[1]
  3157  	v_0 := v.Args[0]
  3158  	b := v.Block
  3159  	typ := &b.Func.Config.Types
  3160  	// match: (Neq8 x y)
  3161  	// result: (Not (Eq8 x y))
  3162  	for {
  3163  		x := v_0
  3164  		y := v_1
  3165  		v.reset(OpNot)
  3166  		v0 := b.NewValue0(v.Pos, OpEq8, typ.Bool)
  3167  		v0.AddArg2(x, y)
  3168  		v.AddArg(v0)
  3169  		return true
  3170  	}
  3171  }
  3172  func rewriteValueRISCV64_OpNeqB(v *Value) bool {
  3173  	v_1 := v.Args[1]
  3174  	v_0 := v.Args[0]
  3175  	b := v.Block
  3176  	typ := &b.Func.Config.Types
  3177  	// match: (NeqB x y)
  3178  	// result: (SNEZ (SUB <typ.Bool> x y))
  3179  	for {
  3180  		x := v_0
  3181  		y := v_1
  3182  		v.reset(OpRISCV64SNEZ)
  3183  		v0 := b.NewValue0(v.Pos, OpRISCV64SUB, typ.Bool)
  3184  		v0.AddArg2(x, y)
  3185  		v.AddArg(v0)
  3186  		return true
  3187  	}
  3188  }
  3189  func rewriteValueRISCV64_OpNeqPtr(v *Value) bool {
  3190  	v_1 := v.Args[1]
  3191  	v_0 := v.Args[0]
  3192  	b := v.Block
  3193  	typ := &b.Func.Config.Types
  3194  	// match: (NeqPtr x y)
  3195  	// result: (Not (EqPtr x y))
  3196  	for {
  3197  		x := v_0
  3198  		y := v_1
  3199  		v.reset(OpNot)
  3200  		v0 := b.NewValue0(v.Pos, OpEqPtr, typ.Bool)
  3201  		v0.AddArg2(x, y)
  3202  		v.AddArg(v0)
  3203  		return true
  3204  	}
  3205  }
  3206  func rewriteValueRISCV64_OpOffPtr(v *Value) bool {
  3207  	v_0 := v.Args[0]
  3208  	b := v.Block
  3209  	typ := &b.Func.Config.Types
  3210  	// match: (OffPtr [off] ptr:(SP))
  3211  	// cond: is32Bit(off)
  3212  	// result: (MOVaddr [int32(off)] ptr)
  3213  	for {
  3214  		off := auxIntToInt64(v.AuxInt)
  3215  		ptr := v_0
  3216  		if ptr.Op != OpSP || !(is32Bit(off)) {
  3217  			break
  3218  		}
  3219  		v.reset(OpRISCV64MOVaddr)
  3220  		v.AuxInt = int32ToAuxInt(int32(off))
  3221  		v.AddArg(ptr)
  3222  		return true
  3223  	}
  3224  	// match: (OffPtr [off] ptr)
  3225  	// cond: is32Bit(off)
  3226  	// result: (ADDI [off] ptr)
  3227  	for {
  3228  		off := auxIntToInt64(v.AuxInt)
  3229  		ptr := v_0
  3230  		if !(is32Bit(off)) {
  3231  			break
  3232  		}
  3233  		v.reset(OpRISCV64ADDI)
  3234  		v.AuxInt = int64ToAuxInt(off)
  3235  		v.AddArg(ptr)
  3236  		return true
  3237  	}
  3238  	// match: (OffPtr [off] ptr)
  3239  	// result: (ADD (MOVDconst [off]) ptr)
  3240  	for {
  3241  		off := auxIntToInt64(v.AuxInt)
  3242  		ptr := v_0
  3243  		v.reset(OpRISCV64ADD)
  3244  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  3245  		v0.AuxInt = int64ToAuxInt(off)
  3246  		v.AddArg2(v0, ptr)
  3247  		return true
  3248  	}
  3249  }
  3250  func rewriteValueRISCV64_OpPopCount16(v *Value) bool {
  3251  	v_0 := v.Args[0]
  3252  	b := v.Block
  3253  	typ := &b.Func.Config.Types
  3254  	// match: (PopCount16 x)
  3255  	// result: (CPOP (ZeroExt16to64 x))
  3256  	for {
  3257  		x := v_0
  3258  		v.reset(OpRISCV64CPOP)
  3259  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  3260  		v0.AddArg(x)
  3261  		v.AddArg(v0)
  3262  		return true
  3263  	}
  3264  }
  3265  func rewriteValueRISCV64_OpPopCount8(v *Value) bool {
  3266  	v_0 := v.Args[0]
  3267  	b := v.Block
  3268  	typ := &b.Func.Config.Types
  3269  	// match: (PopCount8 x)
  3270  	// result: (CPOP (ZeroExt8to64 x))
  3271  	for {
  3272  		x := v_0
  3273  		v.reset(OpRISCV64CPOP)
  3274  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  3275  		v0.AddArg(x)
  3276  		v.AddArg(v0)
  3277  		return true
  3278  	}
  3279  }
  3280  func rewriteValueRISCV64_OpRISCV64ADD(v *Value) bool {
  3281  	v_1 := v.Args[1]
  3282  	v_0 := v.Args[0]
  3283  	// match: (ADD (MOVDconst <t> [val]) x)
  3284  	// cond: is32Bit(val) && !t.IsPtr()
  3285  	// result: (ADDI [val] x)
  3286  	for {
  3287  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3288  			if v_0.Op != OpRISCV64MOVDconst {
  3289  				continue
  3290  			}
  3291  			t := v_0.Type
  3292  			val := auxIntToInt64(v_0.AuxInt)
  3293  			x := v_1
  3294  			if !(is32Bit(val) && !t.IsPtr()) {
  3295  				continue
  3296  			}
  3297  			v.reset(OpRISCV64ADDI)
  3298  			v.AuxInt = int64ToAuxInt(val)
  3299  			v.AddArg(x)
  3300  			return true
  3301  		}
  3302  		break
  3303  	}
  3304  	// match: (ADD x (NEG y))
  3305  	// result: (SUB x y)
  3306  	for {
  3307  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3308  			x := v_0
  3309  			if v_1.Op != OpRISCV64NEG {
  3310  				continue
  3311  			}
  3312  			y := v_1.Args[0]
  3313  			v.reset(OpRISCV64SUB)
  3314  			v.AddArg2(x, y)
  3315  			return true
  3316  		}
  3317  		break
  3318  	}
  3319  	// match: (ADD (SLLI [1] x) y)
  3320  	// cond: buildcfg.GORISCV64 >= 22
  3321  	// result: (SH1ADD x y)
  3322  	for {
  3323  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3324  			if v_0.Op != OpRISCV64SLLI || auxIntToInt64(v_0.AuxInt) != 1 {
  3325  				continue
  3326  			}
  3327  			x := v_0.Args[0]
  3328  			y := v_1
  3329  			if !(buildcfg.GORISCV64 >= 22) {
  3330  				continue
  3331  			}
  3332  			v.reset(OpRISCV64SH1ADD)
  3333  			v.AddArg2(x, y)
  3334  			return true
  3335  		}
  3336  		break
  3337  	}
  3338  	// match: (ADD (SLLI [2] x) y)
  3339  	// cond: buildcfg.GORISCV64 >= 22
  3340  	// result: (SH2ADD x y)
  3341  	for {
  3342  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3343  			if v_0.Op != OpRISCV64SLLI || auxIntToInt64(v_0.AuxInt) != 2 {
  3344  				continue
  3345  			}
  3346  			x := v_0.Args[0]
  3347  			y := v_1
  3348  			if !(buildcfg.GORISCV64 >= 22) {
  3349  				continue
  3350  			}
  3351  			v.reset(OpRISCV64SH2ADD)
  3352  			v.AddArg2(x, y)
  3353  			return true
  3354  		}
  3355  		break
  3356  	}
  3357  	// match: (ADD (SLLI [3] x) y)
  3358  	// cond: buildcfg.GORISCV64 >= 22
  3359  	// result: (SH3ADD x y)
  3360  	for {
  3361  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3362  			if v_0.Op != OpRISCV64SLLI || auxIntToInt64(v_0.AuxInt) != 3 {
  3363  				continue
  3364  			}
  3365  			x := v_0.Args[0]
  3366  			y := v_1
  3367  			if !(buildcfg.GORISCV64 >= 22) {
  3368  				continue
  3369  			}
  3370  			v.reset(OpRISCV64SH3ADD)
  3371  			v.AddArg2(x, y)
  3372  			return true
  3373  		}
  3374  		break
  3375  	}
  3376  	return false
  3377  }
  3378  func rewriteValueRISCV64_OpRISCV64ADDI(v *Value) bool {
  3379  	v_0 := v.Args[0]
  3380  	// match: (ADDI [c] (MOVaddr [d] {s} x))
  3381  	// cond: is32Bit(c+int64(d))
  3382  	// result: (MOVaddr [int32(c)+d] {s} x)
  3383  	for {
  3384  		c := auxIntToInt64(v.AuxInt)
  3385  		if v_0.Op != OpRISCV64MOVaddr {
  3386  			break
  3387  		}
  3388  		d := auxIntToInt32(v_0.AuxInt)
  3389  		s := auxToSym(v_0.Aux)
  3390  		x := v_0.Args[0]
  3391  		if !(is32Bit(c + int64(d))) {
  3392  			break
  3393  		}
  3394  		v.reset(OpRISCV64MOVaddr)
  3395  		v.AuxInt = int32ToAuxInt(int32(c) + d)
  3396  		v.Aux = symToAux(s)
  3397  		v.AddArg(x)
  3398  		return true
  3399  	}
  3400  	// match: (ADDI [0] x)
  3401  	// result: x
  3402  	for {
  3403  		if auxIntToInt64(v.AuxInt) != 0 {
  3404  			break
  3405  		}
  3406  		x := v_0
  3407  		v.copyOf(x)
  3408  		return true
  3409  	}
  3410  	// match: (ADDI [x] (MOVDconst [y]))
  3411  	// cond: is32Bit(x + y)
  3412  	// result: (MOVDconst [x + y])
  3413  	for {
  3414  		x := auxIntToInt64(v.AuxInt)
  3415  		if v_0.Op != OpRISCV64MOVDconst {
  3416  			break
  3417  		}
  3418  		y := auxIntToInt64(v_0.AuxInt)
  3419  		if !(is32Bit(x + y)) {
  3420  			break
  3421  		}
  3422  		v.reset(OpRISCV64MOVDconst)
  3423  		v.AuxInt = int64ToAuxInt(x + y)
  3424  		return true
  3425  	}
  3426  	// match: (ADDI [x] (ADDI [y] z))
  3427  	// cond: is32Bit(x + y)
  3428  	// result: (ADDI [x + y] z)
  3429  	for {
  3430  		x := auxIntToInt64(v.AuxInt)
  3431  		if v_0.Op != OpRISCV64ADDI {
  3432  			break
  3433  		}
  3434  		y := auxIntToInt64(v_0.AuxInt)
  3435  		z := v_0.Args[0]
  3436  		if !(is32Bit(x + y)) {
  3437  			break
  3438  		}
  3439  		v.reset(OpRISCV64ADDI)
  3440  		v.AuxInt = int64ToAuxInt(x + y)
  3441  		v.AddArg(z)
  3442  		return true
  3443  	}
  3444  	return false
  3445  }
  3446  func rewriteValueRISCV64_OpRISCV64AND(v *Value) bool {
  3447  	v_1 := v.Args[1]
  3448  	v_0 := v.Args[0]
  3449  	// match: (AND (MOVDconst [val]) x)
  3450  	// cond: is32Bit(val)
  3451  	// result: (ANDI [val] x)
  3452  	for {
  3453  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3454  			if v_0.Op != OpRISCV64MOVDconst {
  3455  				continue
  3456  			}
  3457  			val := auxIntToInt64(v_0.AuxInt)
  3458  			x := v_1
  3459  			if !(is32Bit(val)) {
  3460  				continue
  3461  			}
  3462  			v.reset(OpRISCV64ANDI)
  3463  			v.AuxInt = int64ToAuxInt(val)
  3464  			v.AddArg(x)
  3465  			return true
  3466  		}
  3467  		break
  3468  	}
  3469  	// match: (AND x x)
  3470  	// result: x
  3471  	for {
  3472  		x := v_0
  3473  		if x != v_1 {
  3474  			break
  3475  		}
  3476  		v.copyOf(x)
  3477  		return true
  3478  	}
  3479  	return false
  3480  }
  3481  func rewriteValueRISCV64_OpRISCV64ANDI(v *Value) bool {
  3482  	v_0 := v.Args[0]
  3483  	// match: (ANDI [0] x)
  3484  	// result: (MOVDconst [0])
  3485  	for {
  3486  		if auxIntToInt64(v.AuxInt) != 0 {
  3487  			break
  3488  		}
  3489  		v.reset(OpRISCV64MOVDconst)
  3490  		v.AuxInt = int64ToAuxInt(0)
  3491  		return true
  3492  	}
  3493  	// match: (ANDI [-1] x)
  3494  	// result: x
  3495  	for {
  3496  		if auxIntToInt64(v.AuxInt) != -1 {
  3497  			break
  3498  		}
  3499  		x := v_0
  3500  		v.copyOf(x)
  3501  		return true
  3502  	}
  3503  	// match: (ANDI [x] (MOVDconst [y]))
  3504  	// result: (MOVDconst [x & y])
  3505  	for {
  3506  		x := auxIntToInt64(v.AuxInt)
  3507  		if v_0.Op != OpRISCV64MOVDconst {
  3508  			break
  3509  		}
  3510  		y := auxIntToInt64(v_0.AuxInt)
  3511  		v.reset(OpRISCV64MOVDconst)
  3512  		v.AuxInt = int64ToAuxInt(x & y)
  3513  		return true
  3514  	}
  3515  	// match: (ANDI [x] (ANDI [y] z))
  3516  	// result: (ANDI [x & y] z)
  3517  	for {
  3518  		x := auxIntToInt64(v.AuxInt)
  3519  		if v_0.Op != OpRISCV64ANDI {
  3520  			break
  3521  		}
  3522  		y := auxIntToInt64(v_0.AuxInt)
  3523  		z := v_0.Args[0]
  3524  		v.reset(OpRISCV64ANDI)
  3525  		v.AuxInt = int64ToAuxInt(x & y)
  3526  		v.AddArg(z)
  3527  		return true
  3528  	}
  3529  	return false
  3530  }
  3531  func rewriteValueRISCV64_OpRISCV64FADDD(v *Value) bool {
  3532  	v_1 := v.Args[1]
  3533  	v_0 := v.Args[0]
  3534  	// match: (FADDD a (FMULD x y))
  3535  	// cond: a.Block.Func.useFMA(v)
  3536  	// result: (FMADDD x y a)
  3537  	for {
  3538  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3539  			a := v_0
  3540  			if v_1.Op != OpRISCV64FMULD {
  3541  				continue
  3542  			}
  3543  			y := v_1.Args[1]
  3544  			x := v_1.Args[0]
  3545  			if !(a.Block.Func.useFMA(v)) {
  3546  				continue
  3547  			}
  3548  			v.reset(OpRISCV64FMADDD)
  3549  			v.AddArg3(x, y, a)
  3550  			return true
  3551  		}
  3552  		break
  3553  	}
  3554  	return false
  3555  }
  3556  func rewriteValueRISCV64_OpRISCV64FADDS(v *Value) bool {
  3557  	v_1 := v.Args[1]
  3558  	v_0 := v.Args[0]
  3559  	// match: (FADDS a (FMULS x y))
  3560  	// cond: a.Block.Func.useFMA(v)
  3561  	// result: (FMADDS x y a)
  3562  	for {
  3563  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3564  			a := v_0
  3565  			if v_1.Op != OpRISCV64FMULS {
  3566  				continue
  3567  			}
  3568  			y := v_1.Args[1]
  3569  			x := v_1.Args[0]
  3570  			if !(a.Block.Func.useFMA(v)) {
  3571  				continue
  3572  			}
  3573  			v.reset(OpRISCV64FMADDS)
  3574  			v.AddArg3(x, y, a)
  3575  			return true
  3576  		}
  3577  		break
  3578  	}
  3579  	return false
  3580  }
  3581  func rewriteValueRISCV64_OpRISCV64FEQD(v *Value) bool {
  3582  	v_1 := v.Args[1]
  3583  	v_0 := v.Args[0]
  3584  	b := v.Block
  3585  	typ := &b.Func.Config.Types
  3586  	// match: (FEQD x (FMOVDconst [math.Inf(-1)]))
  3587  	// result: (ANDI [0b00_0000_0001] (FCLASSD x))
  3588  	for {
  3589  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3590  			x := v_0
  3591  			if v_1.Op != OpRISCV64FMOVDconst || auxIntToFloat64(v_1.AuxInt) != math.Inf(-1) {
  3592  				continue
  3593  			}
  3594  			v.reset(OpRISCV64ANDI)
  3595  			v.AuxInt = int64ToAuxInt(0b00_0000_0001)
  3596  			v0 := b.NewValue0(v.Pos, OpRISCV64FCLASSD, typ.Int64)
  3597  			v0.AddArg(x)
  3598  			v.AddArg(v0)
  3599  			return true
  3600  		}
  3601  		break
  3602  	}
  3603  	// match: (FEQD x (FMOVDconst [math.Inf(1)]))
  3604  	// result: (SNEZ (ANDI <typ.Int64> [0b00_1000_0000] (FCLASSD x)))
  3605  	for {
  3606  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3607  			x := v_0
  3608  			if v_1.Op != OpRISCV64FMOVDconst || auxIntToFloat64(v_1.AuxInt) != math.Inf(1) {
  3609  				continue
  3610  			}
  3611  			v.reset(OpRISCV64SNEZ)
  3612  			v0 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.Int64)
  3613  			v0.AuxInt = int64ToAuxInt(0b00_1000_0000)
  3614  			v1 := b.NewValue0(v.Pos, OpRISCV64FCLASSD, typ.Int64)
  3615  			v1.AddArg(x)
  3616  			v0.AddArg(v1)
  3617  			v.AddArg(v0)
  3618  			return true
  3619  		}
  3620  		break
  3621  	}
  3622  	return false
  3623  }
  3624  func rewriteValueRISCV64_OpRISCV64FLED(v *Value) bool {
  3625  	v_1 := v.Args[1]
  3626  	v_0 := v.Args[0]
  3627  	b := v.Block
  3628  	typ := &b.Func.Config.Types
  3629  	// match: (FLED (FMOVDconst [-math.MaxFloat64]) x)
  3630  	// result: (SNEZ (ANDI <typ.Int64> [0b00_1111_1110] (FCLASSD x)))
  3631  	for {
  3632  		if v_0.Op != OpRISCV64FMOVDconst || auxIntToFloat64(v_0.AuxInt) != -math.MaxFloat64 {
  3633  			break
  3634  		}
  3635  		x := v_1
  3636  		v.reset(OpRISCV64SNEZ)
  3637  		v0 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.Int64)
  3638  		v0.AuxInt = int64ToAuxInt(0b00_1111_1110)
  3639  		v1 := b.NewValue0(v.Pos, OpRISCV64FCLASSD, typ.Int64)
  3640  		v1.AddArg(x)
  3641  		v0.AddArg(v1)
  3642  		v.AddArg(v0)
  3643  		return true
  3644  	}
  3645  	// match: (FLED x (FMOVDconst [math.MaxFloat64]))
  3646  	// result: (SNEZ (ANDI <typ.Int64> [0b00_0111_1111] (FCLASSD x)))
  3647  	for {
  3648  		x := v_0
  3649  		if v_1.Op != OpRISCV64FMOVDconst || auxIntToFloat64(v_1.AuxInt) != math.MaxFloat64 {
  3650  			break
  3651  		}
  3652  		v.reset(OpRISCV64SNEZ)
  3653  		v0 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.Int64)
  3654  		v0.AuxInt = int64ToAuxInt(0b00_0111_1111)
  3655  		v1 := b.NewValue0(v.Pos, OpRISCV64FCLASSD, typ.Int64)
  3656  		v1.AddArg(x)
  3657  		v0.AddArg(v1)
  3658  		v.AddArg(v0)
  3659  		return true
  3660  	}
  3661  	// match: (FLED (FMOVDconst [+0x1p-1022]) x)
  3662  	// result: (SNEZ (ANDI <typ.Int64> [0b00_1100_0000] (FCLASSD x)))
  3663  	for {
  3664  		if v_0.Op != OpRISCV64FMOVDconst || auxIntToFloat64(v_0.AuxInt) != +0x1p-1022 {
  3665  			break
  3666  		}
  3667  		x := v_1
  3668  		v.reset(OpRISCV64SNEZ)
  3669  		v0 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.Int64)
  3670  		v0.AuxInt = int64ToAuxInt(0b00_1100_0000)
  3671  		v1 := b.NewValue0(v.Pos, OpRISCV64FCLASSD, typ.Int64)
  3672  		v1.AddArg(x)
  3673  		v0.AddArg(v1)
  3674  		v.AddArg(v0)
  3675  		return true
  3676  	}
  3677  	// match: (FLED x (FMOVDconst [-0x1p-1022]))
  3678  	// result: (SNEZ (ANDI <typ.Int64> [0b00_0000_0011] (FCLASSD x)))
  3679  	for {
  3680  		x := v_0
  3681  		if v_1.Op != OpRISCV64FMOVDconst || auxIntToFloat64(v_1.AuxInt) != -0x1p-1022 {
  3682  			break
  3683  		}
  3684  		v.reset(OpRISCV64SNEZ)
  3685  		v0 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.Int64)
  3686  		v0.AuxInt = int64ToAuxInt(0b00_0000_0011)
  3687  		v1 := b.NewValue0(v.Pos, OpRISCV64FCLASSD, typ.Int64)
  3688  		v1.AddArg(x)
  3689  		v0.AddArg(v1)
  3690  		v.AddArg(v0)
  3691  		return true
  3692  	}
  3693  	return false
  3694  }
  3695  func rewriteValueRISCV64_OpRISCV64FLTD(v *Value) bool {
  3696  	v_1 := v.Args[1]
  3697  	v_0 := v.Args[0]
  3698  	b := v.Block
  3699  	typ := &b.Func.Config.Types
  3700  	// match: (FLTD x (FMOVDconst [-math.MaxFloat64]))
  3701  	// result: (ANDI [0b00_0000_0001] (FCLASSD x))
  3702  	for {
  3703  		x := v_0
  3704  		if v_1.Op != OpRISCV64FMOVDconst || auxIntToFloat64(v_1.AuxInt) != -math.MaxFloat64 {
  3705  			break
  3706  		}
  3707  		v.reset(OpRISCV64ANDI)
  3708  		v.AuxInt = int64ToAuxInt(0b00_0000_0001)
  3709  		v0 := b.NewValue0(v.Pos, OpRISCV64FCLASSD, typ.Int64)
  3710  		v0.AddArg(x)
  3711  		v.AddArg(v0)
  3712  		return true
  3713  	}
  3714  	// match: (FLTD (FMOVDconst [math.MaxFloat64]) x)
  3715  	// result: (SNEZ (ANDI <typ.Int64> [0b00_1000_0000] (FCLASSD x)))
  3716  	for {
  3717  		if v_0.Op != OpRISCV64FMOVDconst || auxIntToFloat64(v_0.AuxInt) != math.MaxFloat64 {
  3718  			break
  3719  		}
  3720  		x := v_1
  3721  		v.reset(OpRISCV64SNEZ)
  3722  		v0 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.Int64)
  3723  		v0.AuxInt = int64ToAuxInt(0b00_1000_0000)
  3724  		v1 := b.NewValue0(v.Pos, OpRISCV64FCLASSD, typ.Int64)
  3725  		v1.AddArg(x)
  3726  		v0.AddArg(v1)
  3727  		v.AddArg(v0)
  3728  		return true
  3729  	}
  3730  	// match: (FLTD x (FMOVDconst [+0x1p-1022]))
  3731  	// result: (SNEZ (ANDI <typ.Int64> [0b00_0011_1111] (FCLASSD x)))
  3732  	for {
  3733  		x := v_0
  3734  		if v_1.Op != OpRISCV64FMOVDconst || auxIntToFloat64(v_1.AuxInt) != +0x1p-1022 {
  3735  			break
  3736  		}
  3737  		v.reset(OpRISCV64SNEZ)
  3738  		v0 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.Int64)
  3739  		v0.AuxInt = int64ToAuxInt(0b00_0011_1111)
  3740  		v1 := b.NewValue0(v.Pos, OpRISCV64FCLASSD, typ.Int64)
  3741  		v1.AddArg(x)
  3742  		v0.AddArg(v1)
  3743  		v.AddArg(v0)
  3744  		return true
  3745  	}
  3746  	// match: (FLTD (FMOVDconst [-0x1p-1022]) x)
  3747  	// result: (SNEZ (ANDI <typ.Int64> [0b00_1111_1100] (FCLASSD x)))
  3748  	for {
  3749  		if v_0.Op != OpRISCV64FMOVDconst || auxIntToFloat64(v_0.AuxInt) != -0x1p-1022 {
  3750  			break
  3751  		}
  3752  		x := v_1
  3753  		v.reset(OpRISCV64SNEZ)
  3754  		v0 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.Int64)
  3755  		v0.AuxInt = int64ToAuxInt(0b00_1111_1100)
  3756  		v1 := b.NewValue0(v.Pos, OpRISCV64FCLASSD, typ.Int64)
  3757  		v1.AddArg(x)
  3758  		v0.AddArg(v1)
  3759  		v.AddArg(v0)
  3760  		return true
  3761  	}
  3762  	return false
  3763  }
  3764  func rewriteValueRISCV64_OpRISCV64FMADDD(v *Value) bool {
  3765  	v_2 := v.Args[2]
  3766  	v_1 := v.Args[1]
  3767  	v_0 := v.Args[0]
  3768  	// match: (FMADDD neg:(FNEGD x) y z)
  3769  	// cond: neg.Uses == 1
  3770  	// result: (FNMSUBD x y z)
  3771  	for {
  3772  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3773  			neg := v_0
  3774  			if neg.Op != OpRISCV64FNEGD {
  3775  				continue
  3776  			}
  3777  			x := neg.Args[0]
  3778  			y := v_1
  3779  			z := v_2
  3780  			if !(neg.Uses == 1) {
  3781  				continue
  3782  			}
  3783  			v.reset(OpRISCV64FNMSUBD)
  3784  			v.AddArg3(x, y, z)
  3785  			return true
  3786  		}
  3787  		break
  3788  	}
  3789  	// match: (FMADDD x y neg:(FNEGD z))
  3790  	// cond: neg.Uses == 1
  3791  	// result: (FMSUBD x y z)
  3792  	for {
  3793  		x := v_0
  3794  		y := v_1
  3795  		neg := v_2
  3796  		if neg.Op != OpRISCV64FNEGD {
  3797  			break
  3798  		}
  3799  		z := neg.Args[0]
  3800  		if !(neg.Uses == 1) {
  3801  			break
  3802  		}
  3803  		v.reset(OpRISCV64FMSUBD)
  3804  		v.AddArg3(x, y, z)
  3805  		return true
  3806  	}
  3807  	return false
  3808  }
  3809  func rewriteValueRISCV64_OpRISCV64FMADDS(v *Value) bool {
  3810  	v_2 := v.Args[2]
  3811  	v_1 := v.Args[1]
  3812  	v_0 := v.Args[0]
  3813  	// match: (FMADDS neg:(FNEGS x) y z)
  3814  	// cond: neg.Uses == 1
  3815  	// result: (FNMSUBS x y z)
  3816  	for {
  3817  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3818  			neg := v_0
  3819  			if neg.Op != OpRISCV64FNEGS {
  3820  				continue
  3821  			}
  3822  			x := neg.Args[0]
  3823  			y := v_1
  3824  			z := v_2
  3825  			if !(neg.Uses == 1) {
  3826  				continue
  3827  			}
  3828  			v.reset(OpRISCV64FNMSUBS)
  3829  			v.AddArg3(x, y, z)
  3830  			return true
  3831  		}
  3832  		break
  3833  	}
  3834  	// match: (FMADDS x y neg:(FNEGS z))
  3835  	// cond: neg.Uses == 1
  3836  	// result: (FMSUBS x y z)
  3837  	for {
  3838  		x := v_0
  3839  		y := v_1
  3840  		neg := v_2
  3841  		if neg.Op != OpRISCV64FNEGS {
  3842  			break
  3843  		}
  3844  		z := neg.Args[0]
  3845  		if !(neg.Uses == 1) {
  3846  			break
  3847  		}
  3848  		v.reset(OpRISCV64FMSUBS)
  3849  		v.AddArg3(x, y, z)
  3850  		return true
  3851  	}
  3852  	return false
  3853  }
  3854  func rewriteValueRISCV64_OpRISCV64FMOVDload(v *Value) bool {
  3855  	v_1 := v.Args[1]
  3856  	v_0 := v.Args[0]
  3857  	b := v.Block
  3858  	config := b.Func.Config
  3859  	// match: (FMOVDload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  3860  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  3861  	// result: (FMOVDload [off1+off2] {mergeSym(sym1,sym2)} base mem)
  3862  	for {
  3863  		off1 := auxIntToInt32(v.AuxInt)
  3864  		sym1 := auxToSym(v.Aux)
  3865  		if v_0.Op != OpRISCV64MOVaddr {
  3866  			break
  3867  		}
  3868  		off2 := auxIntToInt32(v_0.AuxInt)
  3869  		sym2 := auxToSym(v_0.Aux)
  3870  		base := v_0.Args[0]
  3871  		mem := v_1
  3872  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  3873  			break
  3874  		}
  3875  		v.reset(OpRISCV64FMOVDload)
  3876  		v.AuxInt = int32ToAuxInt(off1 + off2)
  3877  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3878  		v.AddArg2(base, mem)
  3879  		return true
  3880  	}
  3881  	// match: (FMOVDload [off1] {sym} (ADDI [off2] base) mem)
  3882  	// cond: is32Bit(int64(off1)+off2)
  3883  	// result: (FMOVDload [off1+int32(off2)] {sym} base mem)
  3884  	for {
  3885  		off1 := auxIntToInt32(v.AuxInt)
  3886  		sym := auxToSym(v.Aux)
  3887  		if v_0.Op != OpRISCV64ADDI {
  3888  			break
  3889  		}
  3890  		off2 := auxIntToInt64(v_0.AuxInt)
  3891  		base := v_0.Args[0]
  3892  		mem := v_1
  3893  		if !(is32Bit(int64(off1) + off2)) {
  3894  			break
  3895  		}
  3896  		v.reset(OpRISCV64FMOVDload)
  3897  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3898  		v.Aux = symToAux(sym)
  3899  		v.AddArg2(base, mem)
  3900  		return true
  3901  	}
  3902  	// match: (FMOVDload [off] {sym} ptr1 (MOVDstore [off] {sym} ptr2 x _))
  3903  	// cond: isSamePtr(ptr1, ptr2)
  3904  	// result: (FMVDX x)
  3905  	for {
  3906  		off := auxIntToInt32(v.AuxInt)
  3907  		sym := auxToSym(v.Aux)
  3908  		ptr1 := v_0
  3909  		if v_1.Op != OpRISCV64MOVDstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  3910  			break
  3911  		}
  3912  		x := v_1.Args[1]
  3913  		ptr2 := v_1.Args[0]
  3914  		if !(isSamePtr(ptr1, ptr2)) {
  3915  			break
  3916  		}
  3917  		v.reset(OpRISCV64FMVDX)
  3918  		v.AddArg(x)
  3919  		return true
  3920  	}
  3921  	return false
  3922  }
  3923  func rewriteValueRISCV64_OpRISCV64FMOVDstore(v *Value) bool {
  3924  	v_2 := v.Args[2]
  3925  	v_1 := v.Args[1]
  3926  	v_0 := v.Args[0]
  3927  	b := v.Block
  3928  	config := b.Func.Config
  3929  	// match: (FMOVDstore [off1] {sym1} (MOVaddr [off2] {sym2} base) val mem)
  3930  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  3931  	// result: (FMOVDstore [off1+off2] {mergeSym(sym1,sym2)} base val mem)
  3932  	for {
  3933  		off1 := auxIntToInt32(v.AuxInt)
  3934  		sym1 := auxToSym(v.Aux)
  3935  		if v_0.Op != OpRISCV64MOVaddr {
  3936  			break
  3937  		}
  3938  		off2 := auxIntToInt32(v_0.AuxInt)
  3939  		sym2 := auxToSym(v_0.Aux)
  3940  		base := v_0.Args[0]
  3941  		val := v_1
  3942  		mem := v_2
  3943  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  3944  			break
  3945  		}
  3946  		v.reset(OpRISCV64FMOVDstore)
  3947  		v.AuxInt = int32ToAuxInt(off1 + off2)
  3948  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3949  		v.AddArg3(base, val, mem)
  3950  		return true
  3951  	}
  3952  	// match: (FMOVDstore [off1] {sym} (ADDI [off2] base) val mem)
  3953  	// cond: is32Bit(int64(off1)+off2)
  3954  	// result: (FMOVDstore [off1+int32(off2)] {sym} base val mem)
  3955  	for {
  3956  		off1 := auxIntToInt32(v.AuxInt)
  3957  		sym := auxToSym(v.Aux)
  3958  		if v_0.Op != OpRISCV64ADDI {
  3959  			break
  3960  		}
  3961  		off2 := auxIntToInt64(v_0.AuxInt)
  3962  		base := v_0.Args[0]
  3963  		val := v_1
  3964  		mem := v_2
  3965  		if !(is32Bit(int64(off1) + off2)) {
  3966  			break
  3967  		}
  3968  		v.reset(OpRISCV64FMOVDstore)
  3969  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3970  		v.Aux = symToAux(sym)
  3971  		v.AddArg3(base, val, mem)
  3972  		return true
  3973  	}
  3974  	return false
  3975  }
  3976  func rewriteValueRISCV64_OpRISCV64FMOVWload(v *Value) bool {
  3977  	v_1 := v.Args[1]
  3978  	v_0 := v.Args[0]
  3979  	b := v.Block
  3980  	config := b.Func.Config
  3981  	// match: (FMOVWload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  3982  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  3983  	// result: (FMOVWload [off1+off2] {mergeSym(sym1,sym2)} base mem)
  3984  	for {
  3985  		off1 := auxIntToInt32(v.AuxInt)
  3986  		sym1 := auxToSym(v.Aux)
  3987  		if v_0.Op != OpRISCV64MOVaddr {
  3988  			break
  3989  		}
  3990  		off2 := auxIntToInt32(v_0.AuxInt)
  3991  		sym2 := auxToSym(v_0.Aux)
  3992  		base := v_0.Args[0]
  3993  		mem := v_1
  3994  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  3995  			break
  3996  		}
  3997  		v.reset(OpRISCV64FMOVWload)
  3998  		v.AuxInt = int32ToAuxInt(off1 + off2)
  3999  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4000  		v.AddArg2(base, mem)
  4001  		return true
  4002  	}
  4003  	// match: (FMOVWload [off1] {sym} (ADDI [off2] base) mem)
  4004  	// cond: is32Bit(int64(off1)+off2)
  4005  	// result: (FMOVWload [off1+int32(off2)] {sym} base mem)
  4006  	for {
  4007  		off1 := auxIntToInt32(v.AuxInt)
  4008  		sym := auxToSym(v.Aux)
  4009  		if v_0.Op != OpRISCV64ADDI {
  4010  			break
  4011  		}
  4012  		off2 := auxIntToInt64(v_0.AuxInt)
  4013  		base := v_0.Args[0]
  4014  		mem := v_1
  4015  		if !(is32Bit(int64(off1) + off2)) {
  4016  			break
  4017  		}
  4018  		v.reset(OpRISCV64FMOVWload)
  4019  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4020  		v.Aux = symToAux(sym)
  4021  		v.AddArg2(base, mem)
  4022  		return true
  4023  	}
  4024  	// match: (FMOVWload [off] {sym} ptr1 (MOVWstore [off] {sym} ptr2 x _))
  4025  	// cond: isSamePtr(ptr1, ptr2)
  4026  	// result: (FMVSX x)
  4027  	for {
  4028  		off := auxIntToInt32(v.AuxInt)
  4029  		sym := auxToSym(v.Aux)
  4030  		ptr1 := v_0
  4031  		if v_1.Op != OpRISCV64MOVWstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  4032  			break
  4033  		}
  4034  		x := v_1.Args[1]
  4035  		ptr2 := v_1.Args[0]
  4036  		if !(isSamePtr(ptr1, ptr2)) {
  4037  			break
  4038  		}
  4039  		v.reset(OpRISCV64FMVSX)
  4040  		v.AddArg(x)
  4041  		return true
  4042  	}
  4043  	return false
  4044  }
  4045  func rewriteValueRISCV64_OpRISCV64FMOVWstore(v *Value) bool {
  4046  	v_2 := v.Args[2]
  4047  	v_1 := v.Args[1]
  4048  	v_0 := v.Args[0]
  4049  	b := v.Block
  4050  	config := b.Func.Config
  4051  	// match: (FMOVWstore [off1] {sym1} (MOVaddr [off2] {sym2} base) val mem)
  4052  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  4053  	// result: (FMOVWstore [off1+off2] {mergeSym(sym1,sym2)} base val mem)
  4054  	for {
  4055  		off1 := auxIntToInt32(v.AuxInt)
  4056  		sym1 := auxToSym(v.Aux)
  4057  		if v_0.Op != OpRISCV64MOVaddr {
  4058  			break
  4059  		}
  4060  		off2 := auxIntToInt32(v_0.AuxInt)
  4061  		sym2 := auxToSym(v_0.Aux)
  4062  		base := v_0.Args[0]
  4063  		val := v_1
  4064  		mem := v_2
  4065  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4066  			break
  4067  		}
  4068  		v.reset(OpRISCV64FMOVWstore)
  4069  		v.AuxInt = int32ToAuxInt(off1 + off2)
  4070  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4071  		v.AddArg3(base, val, mem)
  4072  		return true
  4073  	}
  4074  	// match: (FMOVWstore [off1] {sym} (ADDI [off2] base) val mem)
  4075  	// cond: is32Bit(int64(off1)+off2)
  4076  	// result: (FMOVWstore [off1+int32(off2)] {sym} base val mem)
  4077  	for {
  4078  		off1 := auxIntToInt32(v.AuxInt)
  4079  		sym := auxToSym(v.Aux)
  4080  		if v_0.Op != OpRISCV64ADDI {
  4081  			break
  4082  		}
  4083  		off2 := auxIntToInt64(v_0.AuxInt)
  4084  		base := v_0.Args[0]
  4085  		val := v_1
  4086  		mem := v_2
  4087  		if !(is32Bit(int64(off1) + off2)) {
  4088  			break
  4089  		}
  4090  		v.reset(OpRISCV64FMOVWstore)
  4091  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4092  		v.Aux = symToAux(sym)
  4093  		v.AddArg3(base, val, mem)
  4094  		return true
  4095  	}
  4096  	return false
  4097  }
  4098  func rewriteValueRISCV64_OpRISCV64FMSUBD(v *Value) bool {
  4099  	v_2 := v.Args[2]
  4100  	v_1 := v.Args[1]
  4101  	v_0 := v.Args[0]
  4102  	// match: (FMSUBD neg:(FNEGD x) y z)
  4103  	// cond: neg.Uses == 1
  4104  	// result: (FNMADDD x y z)
  4105  	for {
  4106  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4107  			neg := v_0
  4108  			if neg.Op != OpRISCV64FNEGD {
  4109  				continue
  4110  			}
  4111  			x := neg.Args[0]
  4112  			y := v_1
  4113  			z := v_2
  4114  			if !(neg.Uses == 1) {
  4115  				continue
  4116  			}
  4117  			v.reset(OpRISCV64FNMADDD)
  4118  			v.AddArg3(x, y, z)
  4119  			return true
  4120  		}
  4121  		break
  4122  	}
  4123  	// match: (FMSUBD x y neg:(FNEGD z))
  4124  	// cond: neg.Uses == 1
  4125  	// result: (FMADDD x y z)
  4126  	for {
  4127  		x := v_0
  4128  		y := v_1
  4129  		neg := v_2
  4130  		if neg.Op != OpRISCV64FNEGD {
  4131  			break
  4132  		}
  4133  		z := neg.Args[0]
  4134  		if !(neg.Uses == 1) {
  4135  			break
  4136  		}
  4137  		v.reset(OpRISCV64FMADDD)
  4138  		v.AddArg3(x, y, z)
  4139  		return true
  4140  	}
  4141  	return false
  4142  }
  4143  func rewriteValueRISCV64_OpRISCV64FMSUBS(v *Value) bool {
  4144  	v_2 := v.Args[2]
  4145  	v_1 := v.Args[1]
  4146  	v_0 := v.Args[0]
  4147  	// match: (FMSUBS neg:(FNEGS x) y z)
  4148  	// cond: neg.Uses == 1
  4149  	// result: (FNMADDS x y z)
  4150  	for {
  4151  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4152  			neg := v_0
  4153  			if neg.Op != OpRISCV64FNEGS {
  4154  				continue
  4155  			}
  4156  			x := neg.Args[0]
  4157  			y := v_1
  4158  			z := v_2
  4159  			if !(neg.Uses == 1) {
  4160  				continue
  4161  			}
  4162  			v.reset(OpRISCV64FNMADDS)
  4163  			v.AddArg3(x, y, z)
  4164  			return true
  4165  		}
  4166  		break
  4167  	}
  4168  	// match: (FMSUBS x y neg:(FNEGS z))
  4169  	// cond: neg.Uses == 1
  4170  	// result: (FMADDS x y z)
  4171  	for {
  4172  		x := v_0
  4173  		y := v_1
  4174  		neg := v_2
  4175  		if neg.Op != OpRISCV64FNEGS {
  4176  			break
  4177  		}
  4178  		z := neg.Args[0]
  4179  		if !(neg.Uses == 1) {
  4180  			break
  4181  		}
  4182  		v.reset(OpRISCV64FMADDS)
  4183  		v.AddArg3(x, y, z)
  4184  		return true
  4185  	}
  4186  	return false
  4187  }
  4188  func rewriteValueRISCV64_OpRISCV64FNED(v *Value) bool {
  4189  	v_1 := v.Args[1]
  4190  	v_0 := v.Args[0]
  4191  	b := v.Block
  4192  	typ := &b.Func.Config.Types
  4193  	// match: (FNED x (FMOVDconst [math.Inf(-1)]))
  4194  	// result: (SEQZ (ANDI <typ.Int64> [0b00_0000_0001] (FCLASSD x)))
  4195  	for {
  4196  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4197  			x := v_0
  4198  			if v_1.Op != OpRISCV64FMOVDconst || auxIntToFloat64(v_1.AuxInt) != math.Inf(-1) {
  4199  				continue
  4200  			}
  4201  			v.reset(OpRISCV64SEQZ)
  4202  			v0 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.Int64)
  4203  			v0.AuxInt = int64ToAuxInt(0b00_0000_0001)
  4204  			v1 := b.NewValue0(v.Pos, OpRISCV64FCLASSD, typ.Int64)
  4205  			v1.AddArg(x)
  4206  			v0.AddArg(v1)
  4207  			v.AddArg(v0)
  4208  			return true
  4209  		}
  4210  		break
  4211  	}
  4212  	// match: (FNED x (FMOVDconst [math.Inf(1)]))
  4213  	// result: (SEQZ (ANDI <typ.Int64> [0b00_1000_0000] (FCLASSD x)))
  4214  	for {
  4215  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4216  			x := v_0
  4217  			if v_1.Op != OpRISCV64FMOVDconst || auxIntToFloat64(v_1.AuxInt) != math.Inf(1) {
  4218  				continue
  4219  			}
  4220  			v.reset(OpRISCV64SEQZ)
  4221  			v0 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.Int64)
  4222  			v0.AuxInt = int64ToAuxInt(0b00_1000_0000)
  4223  			v1 := b.NewValue0(v.Pos, OpRISCV64FCLASSD, typ.Int64)
  4224  			v1.AddArg(x)
  4225  			v0.AddArg(v1)
  4226  			v.AddArg(v0)
  4227  			return true
  4228  		}
  4229  		break
  4230  	}
  4231  	return false
  4232  }
  4233  func rewriteValueRISCV64_OpRISCV64FNMADDD(v *Value) bool {
  4234  	v_2 := v.Args[2]
  4235  	v_1 := v.Args[1]
  4236  	v_0 := v.Args[0]
  4237  	// match: (FNMADDD neg:(FNEGD x) y z)
  4238  	// cond: neg.Uses == 1
  4239  	// result: (FMSUBD x y z)
  4240  	for {
  4241  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4242  			neg := v_0
  4243  			if neg.Op != OpRISCV64FNEGD {
  4244  				continue
  4245  			}
  4246  			x := neg.Args[0]
  4247  			y := v_1
  4248  			z := v_2
  4249  			if !(neg.Uses == 1) {
  4250  				continue
  4251  			}
  4252  			v.reset(OpRISCV64FMSUBD)
  4253  			v.AddArg3(x, y, z)
  4254  			return true
  4255  		}
  4256  		break
  4257  	}
  4258  	// match: (FNMADDD x y neg:(FNEGD z))
  4259  	// cond: neg.Uses == 1
  4260  	// result: (FNMSUBD x y z)
  4261  	for {
  4262  		x := v_0
  4263  		y := v_1
  4264  		neg := v_2
  4265  		if neg.Op != OpRISCV64FNEGD {
  4266  			break
  4267  		}
  4268  		z := neg.Args[0]
  4269  		if !(neg.Uses == 1) {
  4270  			break
  4271  		}
  4272  		v.reset(OpRISCV64FNMSUBD)
  4273  		v.AddArg3(x, y, z)
  4274  		return true
  4275  	}
  4276  	return false
  4277  }
  4278  func rewriteValueRISCV64_OpRISCV64FNMADDS(v *Value) bool {
  4279  	v_2 := v.Args[2]
  4280  	v_1 := v.Args[1]
  4281  	v_0 := v.Args[0]
  4282  	// match: (FNMADDS neg:(FNEGS x) y z)
  4283  	// cond: neg.Uses == 1
  4284  	// result: (FMSUBS x y z)
  4285  	for {
  4286  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4287  			neg := v_0
  4288  			if neg.Op != OpRISCV64FNEGS {
  4289  				continue
  4290  			}
  4291  			x := neg.Args[0]
  4292  			y := v_1
  4293  			z := v_2
  4294  			if !(neg.Uses == 1) {
  4295  				continue
  4296  			}
  4297  			v.reset(OpRISCV64FMSUBS)
  4298  			v.AddArg3(x, y, z)
  4299  			return true
  4300  		}
  4301  		break
  4302  	}
  4303  	// match: (FNMADDS x y neg:(FNEGS z))
  4304  	// cond: neg.Uses == 1
  4305  	// result: (FNMSUBS x y z)
  4306  	for {
  4307  		x := v_0
  4308  		y := v_1
  4309  		neg := v_2
  4310  		if neg.Op != OpRISCV64FNEGS {
  4311  			break
  4312  		}
  4313  		z := neg.Args[0]
  4314  		if !(neg.Uses == 1) {
  4315  			break
  4316  		}
  4317  		v.reset(OpRISCV64FNMSUBS)
  4318  		v.AddArg3(x, y, z)
  4319  		return true
  4320  	}
  4321  	return false
  4322  }
  4323  func rewriteValueRISCV64_OpRISCV64FNMSUBD(v *Value) bool {
  4324  	v_2 := v.Args[2]
  4325  	v_1 := v.Args[1]
  4326  	v_0 := v.Args[0]
  4327  	// match: (FNMSUBD neg:(FNEGD x) y z)
  4328  	// cond: neg.Uses == 1
  4329  	// result: (FMADDD x y z)
  4330  	for {
  4331  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4332  			neg := v_0
  4333  			if neg.Op != OpRISCV64FNEGD {
  4334  				continue
  4335  			}
  4336  			x := neg.Args[0]
  4337  			y := v_1
  4338  			z := v_2
  4339  			if !(neg.Uses == 1) {
  4340  				continue
  4341  			}
  4342  			v.reset(OpRISCV64FMADDD)
  4343  			v.AddArg3(x, y, z)
  4344  			return true
  4345  		}
  4346  		break
  4347  	}
  4348  	// match: (FNMSUBD x y neg:(FNEGD z))
  4349  	// cond: neg.Uses == 1
  4350  	// result: (FNMADDD x y z)
  4351  	for {
  4352  		x := v_0
  4353  		y := v_1
  4354  		neg := v_2
  4355  		if neg.Op != OpRISCV64FNEGD {
  4356  			break
  4357  		}
  4358  		z := neg.Args[0]
  4359  		if !(neg.Uses == 1) {
  4360  			break
  4361  		}
  4362  		v.reset(OpRISCV64FNMADDD)
  4363  		v.AddArg3(x, y, z)
  4364  		return true
  4365  	}
  4366  	return false
  4367  }
  4368  func rewriteValueRISCV64_OpRISCV64FNMSUBS(v *Value) bool {
  4369  	v_2 := v.Args[2]
  4370  	v_1 := v.Args[1]
  4371  	v_0 := v.Args[0]
  4372  	// match: (FNMSUBS neg:(FNEGS x) y z)
  4373  	// cond: neg.Uses == 1
  4374  	// result: (FMADDS x y z)
  4375  	for {
  4376  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4377  			neg := v_0
  4378  			if neg.Op != OpRISCV64FNEGS {
  4379  				continue
  4380  			}
  4381  			x := neg.Args[0]
  4382  			y := v_1
  4383  			z := v_2
  4384  			if !(neg.Uses == 1) {
  4385  				continue
  4386  			}
  4387  			v.reset(OpRISCV64FMADDS)
  4388  			v.AddArg3(x, y, z)
  4389  			return true
  4390  		}
  4391  		break
  4392  	}
  4393  	// match: (FNMSUBS x y neg:(FNEGS z))
  4394  	// cond: neg.Uses == 1
  4395  	// result: (FNMADDS x y z)
  4396  	for {
  4397  		x := v_0
  4398  		y := v_1
  4399  		neg := v_2
  4400  		if neg.Op != OpRISCV64FNEGS {
  4401  			break
  4402  		}
  4403  		z := neg.Args[0]
  4404  		if !(neg.Uses == 1) {
  4405  			break
  4406  		}
  4407  		v.reset(OpRISCV64FNMADDS)
  4408  		v.AddArg3(x, y, z)
  4409  		return true
  4410  	}
  4411  	return false
  4412  }
  4413  func rewriteValueRISCV64_OpRISCV64FSUBD(v *Value) bool {
  4414  	v_1 := v.Args[1]
  4415  	v_0 := v.Args[0]
  4416  	// match: (FSUBD a (FMULD x y))
  4417  	// cond: a.Block.Func.useFMA(v)
  4418  	// result: (FNMSUBD x y a)
  4419  	for {
  4420  		a := v_0
  4421  		if v_1.Op != OpRISCV64FMULD {
  4422  			break
  4423  		}
  4424  		y := v_1.Args[1]
  4425  		x := v_1.Args[0]
  4426  		if !(a.Block.Func.useFMA(v)) {
  4427  			break
  4428  		}
  4429  		v.reset(OpRISCV64FNMSUBD)
  4430  		v.AddArg3(x, y, a)
  4431  		return true
  4432  	}
  4433  	// match: (FSUBD (FMULD x y) a)
  4434  	// cond: a.Block.Func.useFMA(v)
  4435  	// result: (FMSUBD x y a)
  4436  	for {
  4437  		if v_0.Op != OpRISCV64FMULD {
  4438  			break
  4439  		}
  4440  		y := v_0.Args[1]
  4441  		x := v_0.Args[0]
  4442  		a := v_1
  4443  		if !(a.Block.Func.useFMA(v)) {
  4444  			break
  4445  		}
  4446  		v.reset(OpRISCV64FMSUBD)
  4447  		v.AddArg3(x, y, a)
  4448  		return true
  4449  	}
  4450  	return false
  4451  }
  4452  func rewriteValueRISCV64_OpRISCV64FSUBS(v *Value) bool {
  4453  	v_1 := v.Args[1]
  4454  	v_0 := v.Args[0]
  4455  	// match: (FSUBS a (FMULS x y))
  4456  	// cond: a.Block.Func.useFMA(v)
  4457  	// result: (FNMSUBS x y a)
  4458  	for {
  4459  		a := v_0
  4460  		if v_1.Op != OpRISCV64FMULS {
  4461  			break
  4462  		}
  4463  		y := v_1.Args[1]
  4464  		x := v_1.Args[0]
  4465  		if !(a.Block.Func.useFMA(v)) {
  4466  			break
  4467  		}
  4468  		v.reset(OpRISCV64FNMSUBS)
  4469  		v.AddArg3(x, y, a)
  4470  		return true
  4471  	}
  4472  	// match: (FSUBS (FMULS x y) a)
  4473  	// cond: a.Block.Func.useFMA(v)
  4474  	// result: (FMSUBS x y a)
  4475  	for {
  4476  		if v_0.Op != OpRISCV64FMULS {
  4477  			break
  4478  		}
  4479  		y := v_0.Args[1]
  4480  		x := v_0.Args[0]
  4481  		a := v_1
  4482  		if !(a.Block.Func.useFMA(v)) {
  4483  			break
  4484  		}
  4485  		v.reset(OpRISCV64FMSUBS)
  4486  		v.AddArg3(x, y, a)
  4487  		return true
  4488  	}
  4489  	return false
  4490  }
  4491  func rewriteValueRISCV64_OpRISCV64LoweredPanicBoundsCR(v *Value) bool {
  4492  	v_1 := v.Args[1]
  4493  	v_0 := v.Args[0]
  4494  	// match: (LoweredPanicBoundsCR [kind] {p} (MOVDconst [c]) mem)
  4495  	// result: (LoweredPanicBoundsCC [kind] {PanicBoundsCC{Cx:p.C, Cy:c}} mem)
  4496  	for {
  4497  		kind := auxIntToInt64(v.AuxInt)
  4498  		p := auxToPanicBoundsC(v.Aux)
  4499  		if v_0.Op != OpRISCV64MOVDconst {
  4500  			break
  4501  		}
  4502  		c := auxIntToInt64(v_0.AuxInt)
  4503  		mem := v_1
  4504  		v.reset(OpRISCV64LoweredPanicBoundsCC)
  4505  		v.AuxInt = int64ToAuxInt(kind)
  4506  		v.Aux = panicBoundsCCToAux(PanicBoundsCC{Cx: p.C, Cy: c})
  4507  		v.AddArg(mem)
  4508  		return true
  4509  	}
  4510  	return false
  4511  }
  4512  func rewriteValueRISCV64_OpRISCV64LoweredPanicBoundsRC(v *Value) bool {
  4513  	v_1 := v.Args[1]
  4514  	v_0 := v.Args[0]
  4515  	// match: (LoweredPanicBoundsRC [kind] {p} (MOVDconst [c]) mem)
  4516  	// result: (LoweredPanicBoundsCC [kind] {PanicBoundsCC{Cx:c, Cy:p.C}} mem)
  4517  	for {
  4518  		kind := auxIntToInt64(v.AuxInt)
  4519  		p := auxToPanicBoundsC(v.Aux)
  4520  		if v_0.Op != OpRISCV64MOVDconst {
  4521  			break
  4522  		}
  4523  		c := auxIntToInt64(v_0.AuxInt)
  4524  		mem := v_1
  4525  		v.reset(OpRISCV64LoweredPanicBoundsCC)
  4526  		v.AuxInt = int64ToAuxInt(kind)
  4527  		v.Aux = panicBoundsCCToAux(PanicBoundsCC{Cx: c, Cy: p.C})
  4528  		v.AddArg(mem)
  4529  		return true
  4530  	}
  4531  	return false
  4532  }
  4533  func rewriteValueRISCV64_OpRISCV64LoweredPanicBoundsRR(v *Value) bool {
  4534  	v_2 := v.Args[2]
  4535  	v_1 := v.Args[1]
  4536  	v_0 := v.Args[0]
  4537  	// match: (LoweredPanicBoundsRR [kind] x (MOVDconst [c]) mem)
  4538  	// result: (LoweredPanicBoundsRC [kind] x {PanicBoundsC{C:c}} mem)
  4539  	for {
  4540  		kind := auxIntToInt64(v.AuxInt)
  4541  		x := v_0
  4542  		if v_1.Op != OpRISCV64MOVDconst {
  4543  			break
  4544  		}
  4545  		c := auxIntToInt64(v_1.AuxInt)
  4546  		mem := v_2
  4547  		v.reset(OpRISCV64LoweredPanicBoundsRC)
  4548  		v.AuxInt = int64ToAuxInt(kind)
  4549  		v.Aux = panicBoundsCToAux(PanicBoundsC{C: c})
  4550  		v.AddArg2(x, mem)
  4551  		return true
  4552  	}
  4553  	// match: (LoweredPanicBoundsRR [kind] (MOVDconst [c]) y mem)
  4554  	// result: (LoweredPanicBoundsCR [kind] {PanicBoundsC{C:c}} y mem)
  4555  	for {
  4556  		kind := auxIntToInt64(v.AuxInt)
  4557  		if v_0.Op != OpRISCV64MOVDconst {
  4558  			break
  4559  		}
  4560  		c := auxIntToInt64(v_0.AuxInt)
  4561  		y := v_1
  4562  		mem := v_2
  4563  		v.reset(OpRISCV64LoweredPanicBoundsCR)
  4564  		v.AuxInt = int64ToAuxInt(kind)
  4565  		v.Aux = panicBoundsCToAux(PanicBoundsC{C: c})
  4566  		v.AddArg2(y, mem)
  4567  		return true
  4568  	}
  4569  	return false
  4570  }
  4571  func rewriteValueRISCV64_OpRISCV64MOVBUload(v *Value) bool {
  4572  	v_1 := v.Args[1]
  4573  	v_0 := v.Args[0]
  4574  	b := v.Block
  4575  	config := b.Func.Config
  4576  	// match: (MOVBUload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  4577  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  4578  	// result: (MOVBUload [off1+off2] {mergeSym(sym1,sym2)} base mem)
  4579  	for {
  4580  		off1 := auxIntToInt32(v.AuxInt)
  4581  		sym1 := auxToSym(v.Aux)
  4582  		if v_0.Op != OpRISCV64MOVaddr {
  4583  			break
  4584  		}
  4585  		off2 := auxIntToInt32(v_0.AuxInt)
  4586  		sym2 := auxToSym(v_0.Aux)
  4587  		base := v_0.Args[0]
  4588  		mem := v_1
  4589  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4590  			break
  4591  		}
  4592  		v.reset(OpRISCV64MOVBUload)
  4593  		v.AuxInt = int32ToAuxInt(off1 + off2)
  4594  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4595  		v.AddArg2(base, mem)
  4596  		return true
  4597  	}
  4598  	// match: (MOVBUload [off1] {sym} (ADDI [off2] base) mem)
  4599  	// cond: is32Bit(int64(off1)+off2)
  4600  	// result: (MOVBUload [off1+int32(off2)] {sym} base mem)
  4601  	for {
  4602  		off1 := auxIntToInt32(v.AuxInt)
  4603  		sym := auxToSym(v.Aux)
  4604  		if v_0.Op != OpRISCV64ADDI {
  4605  			break
  4606  		}
  4607  		off2 := auxIntToInt64(v_0.AuxInt)
  4608  		base := v_0.Args[0]
  4609  		mem := v_1
  4610  		if !(is32Bit(int64(off1) + off2)) {
  4611  			break
  4612  		}
  4613  		v.reset(OpRISCV64MOVBUload)
  4614  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4615  		v.Aux = symToAux(sym)
  4616  		v.AddArg2(base, mem)
  4617  		return true
  4618  	}
  4619  	// match: (MOVBUload [off] {sym} ptr1 (MOVBstore [off] {sym} ptr2 x _))
  4620  	// cond: isSamePtr(ptr1, ptr2)
  4621  	// result: (MOVBUreg x)
  4622  	for {
  4623  		off := auxIntToInt32(v.AuxInt)
  4624  		sym := auxToSym(v.Aux)
  4625  		ptr1 := v_0
  4626  		if v_1.Op != OpRISCV64MOVBstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  4627  			break
  4628  		}
  4629  		x := v_1.Args[1]
  4630  		ptr2 := v_1.Args[0]
  4631  		if !(isSamePtr(ptr1, ptr2)) {
  4632  			break
  4633  		}
  4634  		v.reset(OpRISCV64MOVBUreg)
  4635  		v.AddArg(x)
  4636  		return true
  4637  	}
  4638  	return false
  4639  }
  4640  func rewriteValueRISCV64_OpRISCV64MOVBUreg(v *Value) bool {
  4641  	v_0 := v.Args[0]
  4642  	b := v.Block
  4643  	// match: (MOVBUreg x:(FLES _ _))
  4644  	// result: x
  4645  	for {
  4646  		x := v_0
  4647  		if x.Op != OpRISCV64FLES {
  4648  			break
  4649  		}
  4650  		v.copyOf(x)
  4651  		return true
  4652  	}
  4653  	// match: (MOVBUreg x:(FLTS _ _))
  4654  	// result: x
  4655  	for {
  4656  		x := v_0
  4657  		if x.Op != OpRISCV64FLTS {
  4658  			break
  4659  		}
  4660  		v.copyOf(x)
  4661  		return true
  4662  	}
  4663  	// match: (MOVBUreg x:(FEQS _ _))
  4664  	// result: x
  4665  	for {
  4666  		x := v_0
  4667  		if x.Op != OpRISCV64FEQS {
  4668  			break
  4669  		}
  4670  		v.copyOf(x)
  4671  		return true
  4672  	}
  4673  	// match: (MOVBUreg x:(FNES _ _))
  4674  	// result: x
  4675  	for {
  4676  		x := v_0
  4677  		if x.Op != OpRISCV64FNES {
  4678  			break
  4679  		}
  4680  		v.copyOf(x)
  4681  		return true
  4682  	}
  4683  	// match: (MOVBUreg x:(FLED _ _))
  4684  	// result: x
  4685  	for {
  4686  		x := v_0
  4687  		if x.Op != OpRISCV64FLED {
  4688  			break
  4689  		}
  4690  		v.copyOf(x)
  4691  		return true
  4692  	}
  4693  	// match: (MOVBUreg x:(FLTD _ _))
  4694  	// result: x
  4695  	for {
  4696  		x := v_0
  4697  		if x.Op != OpRISCV64FLTD {
  4698  			break
  4699  		}
  4700  		v.copyOf(x)
  4701  		return true
  4702  	}
  4703  	// match: (MOVBUreg x:(FEQD _ _))
  4704  	// result: x
  4705  	for {
  4706  		x := v_0
  4707  		if x.Op != OpRISCV64FEQD {
  4708  			break
  4709  		}
  4710  		v.copyOf(x)
  4711  		return true
  4712  	}
  4713  	// match: (MOVBUreg x:(FNED _ _))
  4714  	// result: x
  4715  	for {
  4716  		x := v_0
  4717  		if x.Op != OpRISCV64FNED {
  4718  			break
  4719  		}
  4720  		v.copyOf(x)
  4721  		return true
  4722  	}
  4723  	// match: (MOVBUreg x:(SEQZ _))
  4724  	// result: x
  4725  	for {
  4726  		x := v_0
  4727  		if x.Op != OpRISCV64SEQZ {
  4728  			break
  4729  		}
  4730  		v.copyOf(x)
  4731  		return true
  4732  	}
  4733  	// match: (MOVBUreg x:(SNEZ _))
  4734  	// result: x
  4735  	for {
  4736  		x := v_0
  4737  		if x.Op != OpRISCV64SNEZ {
  4738  			break
  4739  		}
  4740  		v.copyOf(x)
  4741  		return true
  4742  	}
  4743  	// match: (MOVBUreg x:(SLT _ _))
  4744  	// result: x
  4745  	for {
  4746  		x := v_0
  4747  		if x.Op != OpRISCV64SLT {
  4748  			break
  4749  		}
  4750  		v.copyOf(x)
  4751  		return true
  4752  	}
  4753  	// match: (MOVBUreg x:(SLTU _ _))
  4754  	// result: x
  4755  	for {
  4756  		x := v_0
  4757  		if x.Op != OpRISCV64SLTU {
  4758  			break
  4759  		}
  4760  		v.copyOf(x)
  4761  		return true
  4762  	}
  4763  	// match: (MOVBUreg x:(ANDI [c] y))
  4764  	// cond: c >= 0 && int64(uint8(c)) == c
  4765  	// result: x
  4766  	for {
  4767  		x := v_0
  4768  		if x.Op != OpRISCV64ANDI {
  4769  			break
  4770  		}
  4771  		c := auxIntToInt64(x.AuxInt)
  4772  		if !(c >= 0 && int64(uint8(c)) == c) {
  4773  			break
  4774  		}
  4775  		v.copyOf(x)
  4776  		return true
  4777  	}
  4778  	// match: (MOVBUreg (ANDI [c] x))
  4779  	// cond: c < 0
  4780  	// result: (ANDI [int64(uint8(c))] x)
  4781  	for {
  4782  		if v_0.Op != OpRISCV64ANDI {
  4783  			break
  4784  		}
  4785  		c := auxIntToInt64(v_0.AuxInt)
  4786  		x := v_0.Args[0]
  4787  		if !(c < 0) {
  4788  			break
  4789  		}
  4790  		v.reset(OpRISCV64ANDI)
  4791  		v.AuxInt = int64ToAuxInt(int64(uint8(c)))
  4792  		v.AddArg(x)
  4793  		return true
  4794  	}
  4795  	// match: (MOVBUreg (MOVDconst [c]))
  4796  	// result: (MOVDconst [int64(uint8(c))])
  4797  	for {
  4798  		if v_0.Op != OpRISCV64MOVDconst {
  4799  			break
  4800  		}
  4801  		c := auxIntToInt64(v_0.AuxInt)
  4802  		v.reset(OpRISCV64MOVDconst)
  4803  		v.AuxInt = int64ToAuxInt(int64(uint8(c)))
  4804  		return true
  4805  	}
  4806  	// match: (MOVBUreg x:(MOVBUload _ _))
  4807  	// result: (MOVDreg x)
  4808  	for {
  4809  		x := v_0
  4810  		if x.Op != OpRISCV64MOVBUload {
  4811  			break
  4812  		}
  4813  		v.reset(OpRISCV64MOVDreg)
  4814  		v.AddArg(x)
  4815  		return true
  4816  	}
  4817  	// match: (MOVBUreg x:(Select0 (LoweredAtomicLoad8 _ _)))
  4818  	// result: (MOVDreg x)
  4819  	for {
  4820  		x := v_0
  4821  		if x.Op != OpSelect0 {
  4822  			break
  4823  		}
  4824  		x_0 := x.Args[0]
  4825  		if x_0.Op != OpRISCV64LoweredAtomicLoad8 {
  4826  			break
  4827  		}
  4828  		v.reset(OpRISCV64MOVDreg)
  4829  		v.AddArg(x)
  4830  		return true
  4831  	}
  4832  	// match: (MOVBUreg x:(Select0 (LoweredAtomicCas32 _ _ _ _)))
  4833  	// result: (MOVDreg x)
  4834  	for {
  4835  		x := v_0
  4836  		if x.Op != OpSelect0 {
  4837  			break
  4838  		}
  4839  		x_0 := x.Args[0]
  4840  		if x_0.Op != OpRISCV64LoweredAtomicCas32 {
  4841  			break
  4842  		}
  4843  		v.reset(OpRISCV64MOVDreg)
  4844  		v.AddArg(x)
  4845  		return true
  4846  	}
  4847  	// match: (MOVBUreg x:(Select0 (LoweredAtomicCas64 _ _ _ _)))
  4848  	// result: (MOVDreg x)
  4849  	for {
  4850  		x := v_0
  4851  		if x.Op != OpSelect0 {
  4852  			break
  4853  		}
  4854  		x_0 := x.Args[0]
  4855  		if x_0.Op != OpRISCV64LoweredAtomicCas64 {
  4856  			break
  4857  		}
  4858  		v.reset(OpRISCV64MOVDreg)
  4859  		v.AddArg(x)
  4860  		return true
  4861  	}
  4862  	// match: (MOVBUreg x:(MOVBUreg _))
  4863  	// result: (MOVDreg x)
  4864  	for {
  4865  		x := v_0
  4866  		if x.Op != OpRISCV64MOVBUreg {
  4867  			break
  4868  		}
  4869  		v.reset(OpRISCV64MOVDreg)
  4870  		v.AddArg(x)
  4871  		return true
  4872  	}
  4873  	// match: (MOVBUreg <t> x:(MOVBload [off] {sym} ptr mem))
  4874  	// cond: x.Uses == 1 && clobber(x)
  4875  	// result: @x.Block (MOVBUload <t> [off] {sym} ptr mem)
  4876  	for {
  4877  		t := v.Type
  4878  		x := v_0
  4879  		if x.Op != OpRISCV64MOVBload {
  4880  			break
  4881  		}
  4882  		off := auxIntToInt32(x.AuxInt)
  4883  		sym := auxToSym(x.Aux)
  4884  		mem := x.Args[1]
  4885  		ptr := x.Args[0]
  4886  		if !(x.Uses == 1 && clobber(x)) {
  4887  			break
  4888  		}
  4889  		b = x.Block
  4890  		v0 := b.NewValue0(x.Pos, OpRISCV64MOVBUload, t)
  4891  		v.copyOf(v0)
  4892  		v0.AuxInt = int32ToAuxInt(off)
  4893  		v0.Aux = symToAux(sym)
  4894  		v0.AddArg2(ptr, mem)
  4895  		return true
  4896  	}
  4897  	return false
  4898  }
  4899  func rewriteValueRISCV64_OpRISCV64MOVBload(v *Value) bool {
  4900  	v_1 := v.Args[1]
  4901  	v_0 := v.Args[0]
  4902  	b := v.Block
  4903  	config := b.Func.Config
  4904  	// match: (MOVBload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  4905  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  4906  	// result: (MOVBload [off1+off2] {mergeSym(sym1,sym2)} base mem)
  4907  	for {
  4908  		off1 := auxIntToInt32(v.AuxInt)
  4909  		sym1 := auxToSym(v.Aux)
  4910  		if v_0.Op != OpRISCV64MOVaddr {
  4911  			break
  4912  		}
  4913  		off2 := auxIntToInt32(v_0.AuxInt)
  4914  		sym2 := auxToSym(v_0.Aux)
  4915  		base := v_0.Args[0]
  4916  		mem := v_1
  4917  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4918  			break
  4919  		}
  4920  		v.reset(OpRISCV64MOVBload)
  4921  		v.AuxInt = int32ToAuxInt(off1 + off2)
  4922  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4923  		v.AddArg2(base, mem)
  4924  		return true
  4925  	}
  4926  	// match: (MOVBload [off1] {sym} (ADDI [off2] base) mem)
  4927  	// cond: is32Bit(int64(off1)+off2)
  4928  	// result: (MOVBload [off1+int32(off2)] {sym} base mem)
  4929  	for {
  4930  		off1 := auxIntToInt32(v.AuxInt)
  4931  		sym := auxToSym(v.Aux)
  4932  		if v_0.Op != OpRISCV64ADDI {
  4933  			break
  4934  		}
  4935  		off2 := auxIntToInt64(v_0.AuxInt)
  4936  		base := v_0.Args[0]
  4937  		mem := v_1
  4938  		if !(is32Bit(int64(off1) + off2)) {
  4939  			break
  4940  		}
  4941  		v.reset(OpRISCV64MOVBload)
  4942  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4943  		v.Aux = symToAux(sym)
  4944  		v.AddArg2(base, mem)
  4945  		return true
  4946  	}
  4947  	// match: (MOVBload [off] {sym} ptr1 (MOVBstore [off] {sym} ptr2 x _))
  4948  	// cond: isSamePtr(ptr1, ptr2)
  4949  	// result: (MOVBreg x)
  4950  	for {
  4951  		off := auxIntToInt32(v.AuxInt)
  4952  		sym := auxToSym(v.Aux)
  4953  		ptr1 := v_0
  4954  		if v_1.Op != OpRISCV64MOVBstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  4955  			break
  4956  		}
  4957  		x := v_1.Args[1]
  4958  		ptr2 := v_1.Args[0]
  4959  		if !(isSamePtr(ptr1, ptr2)) {
  4960  			break
  4961  		}
  4962  		v.reset(OpRISCV64MOVBreg)
  4963  		v.AddArg(x)
  4964  		return true
  4965  	}
  4966  	return false
  4967  }
  4968  func rewriteValueRISCV64_OpRISCV64MOVBreg(v *Value) bool {
  4969  	v_0 := v.Args[0]
  4970  	b := v.Block
  4971  	// match: (MOVBreg x:(ANDI [c] y))
  4972  	// cond: c >= 0 && int64(int8(c)) == c
  4973  	// result: x
  4974  	for {
  4975  		x := v_0
  4976  		if x.Op != OpRISCV64ANDI {
  4977  			break
  4978  		}
  4979  		c := auxIntToInt64(x.AuxInt)
  4980  		if !(c >= 0 && int64(int8(c)) == c) {
  4981  			break
  4982  		}
  4983  		v.copyOf(x)
  4984  		return true
  4985  	}
  4986  	// match: (MOVBreg (MOVDconst [c]))
  4987  	// result: (MOVDconst [int64(int8(c))])
  4988  	for {
  4989  		if v_0.Op != OpRISCV64MOVDconst {
  4990  			break
  4991  		}
  4992  		c := auxIntToInt64(v_0.AuxInt)
  4993  		v.reset(OpRISCV64MOVDconst)
  4994  		v.AuxInt = int64ToAuxInt(int64(int8(c)))
  4995  		return true
  4996  	}
  4997  	// match: (MOVBreg x:(MOVBload _ _))
  4998  	// result: (MOVDreg x)
  4999  	for {
  5000  		x := v_0
  5001  		if x.Op != OpRISCV64MOVBload {
  5002  			break
  5003  		}
  5004  		v.reset(OpRISCV64MOVDreg)
  5005  		v.AddArg(x)
  5006  		return true
  5007  	}
  5008  	// match: (MOVBreg x:(MOVBreg _))
  5009  	// result: (MOVDreg x)
  5010  	for {
  5011  		x := v_0
  5012  		if x.Op != OpRISCV64MOVBreg {
  5013  			break
  5014  		}
  5015  		v.reset(OpRISCV64MOVDreg)
  5016  		v.AddArg(x)
  5017  		return true
  5018  	}
  5019  	// match: (MOVBreg <t> x:(MOVBUload [off] {sym} ptr mem))
  5020  	// cond: x.Uses == 1 && clobber(x)
  5021  	// result: @x.Block (MOVBload <t> [off] {sym} ptr mem)
  5022  	for {
  5023  		t := v.Type
  5024  		x := v_0
  5025  		if x.Op != OpRISCV64MOVBUload {
  5026  			break
  5027  		}
  5028  		off := auxIntToInt32(x.AuxInt)
  5029  		sym := auxToSym(x.Aux)
  5030  		mem := x.Args[1]
  5031  		ptr := x.Args[0]
  5032  		if !(x.Uses == 1 && clobber(x)) {
  5033  			break
  5034  		}
  5035  		b = x.Block
  5036  		v0 := b.NewValue0(x.Pos, OpRISCV64MOVBload, t)
  5037  		v.copyOf(v0)
  5038  		v0.AuxInt = int32ToAuxInt(off)
  5039  		v0.Aux = symToAux(sym)
  5040  		v0.AddArg2(ptr, mem)
  5041  		return true
  5042  	}
  5043  	return false
  5044  }
  5045  func rewriteValueRISCV64_OpRISCV64MOVBstore(v *Value) bool {
  5046  	v_2 := v.Args[2]
  5047  	v_1 := v.Args[1]
  5048  	v_0 := v.Args[0]
  5049  	b := v.Block
  5050  	config := b.Func.Config
  5051  	// match: (MOVBstore [off1] {sym1} (MOVaddr [off2] {sym2} base) val mem)
  5052  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  5053  	// result: (MOVBstore [off1+off2] {mergeSym(sym1,sym2)} base val mem)
  5054  	for {
  5055  		off1 := auxIntToInt32(v.AuxInt)
  5056  		sym1 := auxToSym(v.Aux)
  5057  		if v_0.Op != OpRISCV64MOVaddr {
  5058  			break
  5059  		}
  5060  		off2 := auxIntToInt32(v_0.AuxInt)
  5061  		sym2 := auxToSym(v_0.Aux)
  5062  		base := v_0.Args[0]
  5063  		val := v_1
  5064  		mem := v_2
  5065  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  5066  			break
  5067  		}
  5068  		v.reset(OpRISCV64MOVBstore)
  5069  		v.AuxInt = int32ToAuxInt(off1 + off2)
  5070  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5071  		v.AddArg3(base, val, mem)
  5072  		return true
  5073  	}
  5074  	// match: (MOVBstore [off1] {sym} (ADDI [off2] base) val mem)
  5075  	// cond: is32Bit(int64(off1)+off2)
  5076  	// result: (MOVBstore [off1+int32(off2)] {sym} base val mem)
  5077  	for {
  5078  		off1 := auxIntToInt32(v.AuxInt)
  5079  		sym := auxToSym(v.Aux)
  5080  		if v_0.Op != OpRISCV64ADDI {
  5081  			break
  5082  		}
  5083  		off2 := auxIntToInt64(v_0.AuxInt)
  5084  		base := v_0.Args[0]
  5085  		val := v_1
  5086  		mem := v_2
  5087  		if !(is32Bit(int64(off1) + off2)) {
  5088  			break
  5089  		}
  5090  		v.reset(OpRISCV64MOVBstore)
  5091  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5092  		v.Aux = symToAux(sym)
  5093  		v.AddArg3(base, val, mem)
  5094  		return true
  5095  	}
  5096  	// match: (MOVBstore [off] {sym} ptr (MOVDconst [0]) mem)
  5097  	// result: (MOVBstorezero [off] {sym} ptr mem)
  5098  	for {
  5099  		off := auxIntToInt32(v.AuxInt)
  5100  		sym := auxToSym(v.Aux)
  5101  		ptr := v_0
  5102  		if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
  5103  			break
  5104  		}
  5105  		mem := v_2
  5106  		v.reset(OpRISCV64MOVBstorezero)
  5107  		v.AuxInt = int32ToAuxInt(off)
  5108  		v.Aux = symToAux(sym)
  5109  		v.AddArg2(ptr, mem)
  5110  		return true
  5111  	}
  5112  	// match: (MOVBstore [off] {sym} ptr (MOVBreg x) mem)
  5113  	// result: (MOVBstore [off] {sym} ptr x mem)
  5114  	for {
  5115  		off := auxIntToInt32(v.AuxInt)
  5116  		sym := auxToSym(v.Aux)
  5117  		ptr := v_0
  5118  		if v_1.Op != OpRISCV64MOVBreg {
  5119  			break
  5120  		}
  5121  		x := v_1.Args[0]
  5122  		mem := v_2
  5123  		v.reset(OpRISCV64MOVBstore)
  5124  		v.AuxInt = int32ToAuxInt(off)
  5125  		v.Aux = symToAux(sym)
  5126  		v.AddArg3(ptr, x, mem)
  5127  		return true
  5128  	}
  5129  	// match: (MOVBstore [off] {sym} ptr (MOVHreg x) mem)
  5130  	// result: (MOVBstore [off] {sym} ptr x mem)
  5131  	for {
  5132  		off := auxIntToInt32(v.AuxInt)
  5133  		sym := auxToSym(v.Aux)
  5134  		ptr := v_0
  5135  		if v_1.Op != OpRISCV64MOVHreg {
  5136  			break
  5137  		}
  5138  		x := v_1.Args[0]
  5139  		mem := v_2
  5140  		v.reset(OpRISCV64MOVBstore)
  5141  		v.AuxInt = int32ToAuxInt(off)
  5142  		v.Aux = symToAux(sym)
  5143  		v.AddArg3(ptr, x, mem)
  5144  		return true
  5145  	}
  5146  	// match: (MOVBstore [off] {sym} ptr (MOVWreg x) mem)
  5147  	// result: (MOVBstore [off] {sym} ptr x mem)
  5148  	for {
  5149  		off := auxIntToInt32(v.AuxInt)
  5150  		sym := auxToSym(v.Aux)
  5151  		ptr := v_0
  5152  		if v_1.Op != OpRISCV64MOVWreg {
  5153  			break
  5154  		}
  5155  		x := v_1.Args[0]
  5156  		mem := v_2
  5157  		v.reset(OpRISCV64MOVBstore)
  5158  		v.AuxInt = int32ToAuxInt(off)
  5159  		v.Aux = symToAux(sym)
  5160  		v.AddArg3(ptr, x, mem)
  5161  		return true
  5162  	}
  5163  	// match: (MOVBstore [off] {sym} ptr (MOVBUreg x) mem)
  5164  	// result: (MOVBstore [off] {sym} ptr x mem)
  5165  	for {
  5166  		off := auxIntToInt32(v.AuxInt)
  5167  		sym := auxToSym(v.Aux)
  5168  		ptr := v_0
  5169  		if v_1.Op != OpRISCV64MOVBUreg {
  5170  			break
  5171  		}
  5172  		x := v_1.Args[0]
  5173  		mem := v_2
  5174  		v.reset(OpRISCV64MOVBstore)
  5175  		v.AuxInt = int32ToAuxInt(off)
  5176  		v.Aux = symToAux(sym)
  5177  		v.AddArg3(ptr, x, mem)
  5178  		return true
  5179  	}
  5180  	// match: (MOVBstore [off] {sym} ptr (MOVHUreg x) mem)
  5181  	// result: (MOVBstore [off] {sym} ptr x mem)
  5182  	for {
  5183  		off := auxIntToInt32(v.AuxInt)
  5184  		sym := auxToSym(v.Aux)
  5185  		ptr := v_0
  5186  		if v_1.Op != OpRISCV64MOVHUreg {
  5187  			break
  5188  		}
  5189  		x := v_1.Args[0]
  5190  		mem := v_2
  5191  		v.reset(OpRISCV64MOVBstore)
  5192  		v.AuxInt = int32ToAuxInt(off)
  5193  		v.Aux = symToAux(sym)
  5194  		v.AddArg3(ptr, x, mem)
  5195  		return true
  5196  	}
  5197  	// match: (MOVBstore [off] {sym} ptr (MOVWUreg x) mem)
  5198  	// result: (MOVBstore [off] {sym} ptr x mem)
  5199  	for {
  5200  		off := auxIntToInt32(v.AuxInt)
  5201  		sym := auxToSym(v.Aux)
  5202  		ptr := v_0
  5203  		if v_1.Op != OpRISCV64MOVWUreg {
  5204  			break
  5205  		}
  5206  		x := v_1.Args[0]
  5207  		mem := v_2
  5208  		v.reset(OpRISCV64MOVBstore)
  5209  		v.AuxInt = int32ToAuxInt(off)
  5210  		v.Aux = symToAux(sym)
  5211  		v.AddArg3(ptr, x, mem)
  5212  		return true
  5213  	}
  5214  	return false
  5215  }
  5216  func rewriteValueRISCV64_OpRISCV64MOVBstorezero(v *Value) bool {
  5217  	v_1 := v.Args[1]
  5218  	v_0 := v.Args[0]
  5219  	b := v.Block
  5220  	config := b.Func.Config
  5221  	// match: (MOVBstorezero [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  5222  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  5223  	// result: (MOVBstorezero [off1+off2] {mergeSym(sym1,sym2)} base mem)
  5224  	for {
  5225  		off1 := auxIntToInt32(v.AuxInt)
  5226  		sym1 := auxToSym(v.Aux)
  5227  		if v_0.Op != OpRISCV64MOVaddr {
  5228  			break
  5229  		}
  5230  		off2 := auxIntToInt32(v_0.AuxInt)
  5231  		sym2 := auxToSym(v_0.Aux)
  5232  		base := v_0.Args[0]
  5233  		mem := v_1
  5234  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  5235  			break
  5236  		}
  5237  		v.reset(OpRISCV64MOVBstorezero)
  5238  		v.AuxInt = int32ToAuxInt(off1 + off2)
  5239  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5240  		v.AddArg2(base, mem)
  5241  		return true
  5242  	}
  5243  	// match: (MOVBstorezero [off1] {sym} (ADDI [off2] base) mem)
  5244  	// cond: is32Bit(int64(off1)+off2)
  5245  	// result: (MOVBstorezero [off1+int32(off2)] {sym} base mem)
  5246  	for {
  5247  		off1 := auxIntToInt32(v.AuxInt)
  5248  		sym := auxToSym(v.Aux)
  5249  		if v_0.Op != OpRISCV64ADDI {
  5250  			break
  5251  		}
  5252  		off2 := auxIntToInt64(v_0.AuxInt)
  5253  		base := v_0.Args[0]
  5254  		mem := v_1
  5255  		if !(is32Bit(int64(off1) + off2)) {
  5256  			break
  5257  		}
  5258  		v.reset(OpRISCV64MOVBstorezero)
  5259  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5260  		v.Aux = symToAux(sym)
  5261  		v.AddArg2(base, mem)
  5262  		return true
  5263  	}
  5264  	return false
  5265  }
  5266  func rewriteValueRISCV64_OpRISCV64MOVDload(v *Value) bool {
  5267  	v_1 := v.Args[1]
  5268  	v_0 := v.Args[0]
  5269  	b := v.Block
  5270  	config := b.Func.Config
  5271  	// match: (MOVDload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  5272  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  5273  	// result: (MOVDload [off1+off2] {mergeSym(sym1,sym2)} base mem)
  5274  	for {
  5275  		off1 := auxIntToInt32(v.AuxInt)
  5276  		sym1 := auxToSym(v.Aux)
  5277  		if v_0.Op != OpRISCV64MOVaddr {
  5278  			break
  5279  		}
  5280  		off2 := auxIntToInt32(v_0.AuxInt)
  5281  		sym2 := auxToSym(v_0.Aux)
  5282  		base := v_0.Args[0]
  5283  		mem := v_1
  5284  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  5285  			break
  5286  		}
  5287  		v.reset(OpRISCV64MOVDload)
  5288  		v.AuxInt = int32ToAuxInt(off1 + off2)
  5289  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5290  		v.AddArg2(base, mem)
  5291  		return true
  5292  	}
  5293  	// match: (MOVDload [off1] {sym} (ADDI [off2] base) mem)
  5294  	// cond: is32Bit(int64(off1)+off2)
  5295  	// result: (MOVDload [off1+int32(off2)] {sym} base mem)
  5296  	for {
  5297  		off1 := auxIntToInt32(v.AuxInt)
  5298  		sym := auxToSym(v.Aux)
  5299  		if v_0.Op != OpRISCV64ADDI {
  5300  			break
  5301  		}
  5302  		off2 := auxIntToInt64(v_0.AuxInt)
  5303  		base := v_0.Args[0]
  5304  		mem := v_1
  5305  		if !(is32Bit(int64(off1) + off2)) {
  5306  			break
  5307  		}
  5308  		v.reset(OpRISCV64MOVDload)
  5309  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5310  		v.Aux = symToAux(sym)
  5311  		v.AddArg2(base, mem)
  5312  		return true
  5313  	}
  5314  	// match: (MOVDload [off] {sym} ptr1 (MOVDstore [off] {sym} ptr2 x _))
  5315  	// cond: isSamePtr(ptr1, ptr2)
  5316  	// result: (MOVDreg x)
  5317  	for {
  5318  		off := auxIntToInt32(v.AuxInt)
  5319  		sym := auxToSym(v.Aux)
  5320  		ptr1 := v_0
  5321  		if v_1.Op != OpRISCV64MOVDstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  5322  			break
  5323  		}
  5324  		x := v_1.Args[1]
  5325  		ptr2 := v_1.Args[0]
  5326  		if !(isSamePtr(ptr1, ptr2)) {
  5327  			break
  5328  		}
  5329  		v.reset(OpRISCV64MOVDreg)
  5330  		v.AddArg(x)
  5331  		return true
  5332  	}
  5333  	// match: (MOVDload [off] {sym} ptr1 (FMOVDstore [off] {sym} ptr2 x _))
  5334  	// cond: isSamePtr(ptr1, ptr2)
  5335  	// result: (FMVXD x)
  5336  	for {
  5337  		off := auxIntToInt32(v.AuxInt)
  5338  		sym := auxToSym(v.Aux)
  5339  		ptr1 := v_0
  5340  		if v_1.Op != OpRISCV64FMOVDstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  5341  			break
  5342  		}
  5343  		x := v_1.Args[1]
  5344  		ptr2 := v_1.Args[0]
  5345  		if !(isSamePtr(ptr1, ptr2)) {
  5346  			break
  5347  		}
  5348  		v.reset(OpRISCV64FMVXD)
  5349  		v.AddArg(x)
  5350  		return true
  5351  	}
  5352  	return false
  5353  }
  5354  func rewriteValueRISCV64_OpRISCV64MOVDnop(v *Value) bool {
  5355  	v_0 := v.Args[0]
  5356  	// match: (MOVDnop (MOVDconst [c]))
  5357  	// result: (MOVDconst [c])
  5358  	for {
  5359  		if v_0.Op != OpRISCV64MOVDconst {
  5360  			break
  5361  		}
  5362  		c := auxIntToInt64(v_0.AuxInt)
  5363  		v.reset(OpRISCV64MOVDconst)
  5364  		v.AuxInt = int64ToAuxInt(c)
  5365  		return true
  5366  	}
  5367  	return false
  5368  }
  5369  func rewriteValueRISCV64_OpRISCV64MOVDreg(v *Value) bool {
  5370  	v_0 := v.Args[0]
  5371  	// match: (MOVDreg x)
  5372  	// cond: x.Uses == 1
  5373  	// result: (MOVDnop x)
  5374  	for {
  5375  		x := v_0
  5376  		if !(x.Uses == 1) {
  5377  			break
  5378  		}
  5379  		v.reset(OpRISCV64MOVDnop)
  5380  		v.AddArg(x)
  5381  		return true
  5382  	}
  5383  	return false
  5384  }
  5385  func rewriteValueRISCV64_OpRISCV64MOVDstore(v *Value) bool {
  5386  	v_2 := v.Args[2]
  5387  	v_1 := v.Args[1]
  5388  	v_0 := v.Args[0]
  5389  	b := v.Block
  5390  	config := b.Func.Config
  5391  	// match: (MOVDstore [off1] {sym1} (MOVaddr [off2] {sym2} base) val mem)
  5392  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  5393  	// result: (MOVDstore [off1+off2] {mergeSym(sym1,sym2)} base val mem)
  5394  	for {
  5395  		off1 := auxIntToInt32(v.AuxInt)
  5396  		sym1 := auxToSym(v.Aux)
  5397  		if v_0.Op != OpRISCV64MOVaddr {
  5398  			break
  5399  		}
  5400  		off2 := auxIntToInt32(v_0.AuxInt)
  5401  		sym2 := auxToSym(v_0.Aux)
  5402  		base := v_0.Args[0]
  5403  		val := v_1
  5404  		mem := v_2
  5405  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  5406  			break
  5407  		}
  5408  		v.reset(OpRISCV64MOVDstore)
  5409  		v.AuxInt = int32ToAuxInt(off1 + off2)
  5410  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5411  		v.AddArg3(base, val, mem)
  5412  		return true
  5413  	}
  5414  	// match: (MOVDstore [off1] {sym} (ADDI [off2] base) val mem)
  5415  	// cond: is32Bit(int64(off1)+off2)
  5416  	// result: (MOVDstore [off1+int32(off2)] {sym} base val mem)
  5417  	for {
  5418  		off1 := auxIntToInt32(v.AuxInt)
  5419  		sym := auxToSym(v.Aux)
  5420  		if v_0.Op != OpRISCV64ADDI {
  5421  			break
  5422  		}
  5423  		off2 := auxIntToInt64(v_0.AuxInt)
  5424  		base := v_0.Args[0]
  5425  		val := v_1
  5426  		mem := v_2
  5427  		if !(is32Bit(int64(off1) + off2)) {
  5428  			break
  5429  		}
  5430  		v.reset(OpRISCV64MOVDstore)
  5431  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5432  		v.Aux = symToAux(sym)
  5433  		v.AddArg3(base, val, mem)
  5434  		return true
  5435  	}
  5436  	// match: (MOVDstore [off] {sym} ptr (MOVDconst [0]) mem)
  5437  	// result: (MOVDstorezero [off] {sym} ptr mem)
  5438  	for {
  5439  		off := auxIntToInt32(v.AuxInt)
  5440  		sym := auxToSym(v.Aux)
  5441  		ptr := v_0
  5442  		if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
  5443  			break
  5444  		}
  5445  		mem := v_2
  5446  		v.reset(OpRISCV64MOVDstorezero)
  5447  		v.AuxInt = int32ToAuxInt(off)
  5448  		v.Aux = symToAux(sym)
  5449  		v.AddArg2(ptr, mem)
  5450  		return true
  5451  	}
  5452  	return false
  5453  }
  5454  func rewriteValueRISCV64_OpRISCV64MOVDstorezero(v *Value) bool {
  5455  	v_1 := v.Args[1]
  5456  	v_0 := v.Args[0]
  5457  	b := v.Block
  5458  	config := b.Func.Config
  5459  	// match: (MOVDstorezero [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  5460  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  5461  	// result: (MOVDstorezero [off1+off2] {mergeSym(sym1,sym2)} base mem)
  5462  	for {
  5463  		off1 := auxIntToInt32(v.AuxInt)
  5464  		sym1 := auxToSym(v.Aux)
  5465  		if v_0.Op != OpRISCV64MOVaddr {
  5466  			break
  5467  		}
  5468  		off2 := auxIntToInt32(v_0.AuxInt)
  5469  		sym2 := auxToSym(v_0.Aux)
  5470  		base := v_0.Args[0]
  5471  		mem := v_1
  5472  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  5473  			break
  5474  		}
  5475  		v.reset(OpRISCV64MOVDstorezero)
  5476  		v.AuxInt = int32ToAuxInt(off1 + off2)
  5477  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5478  		v.AddArg2(base, mem)
  5479  		return true
  5480  	}
  5481  	// match: (MOVDstorezero [off1] {sym} (ADDI [off2] base) mem)
  5482  	// cond: is32Bit(int64(off1)+off2)
  5483  	// result: (MOVDstorezero [off1+int32(off2)] {sym} base mem)
  5484  	for {
  5485  		off1 := auxIntToInt32(v.AuxInt)
  5486  		sym := auxToSym(v.Aux)
  5487  		if v_0.Op != OpRISCV64ADDI {
  5488  			break
  5489  		}
  5490  		off2 := auxIntToInt64(v_0.AuxInt)
  5491  		base := v_0.Args[0]
  5492  		mem := v_1
  5493  		if !(is32Bit(int64(off1) + off2)) {
  5494  			break
  5495  		}
  5496  		v.reset(OpRISCV64MOVDstorezero)
  5497  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5498  		v.Aux = symToAux(sym)
  5499  		v.AddArg2(base, mem)
  5500  		return true
  5501  	}
  5502  	return false
  5503  }
  5504  func rewriteValueRISCV64_OpRISCV64MOVHUload(v *Value) bool {
  5505  	v_1 := v.Args[1]
  5506  	v_0 := v.Args[0]
  5507  	b := v.Block
  5508  	config := b.Func.Config
  5509  	// match: (MOVHUload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  5510  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  5511  	// result: (MOVHUload [off1+off2] {mergeSym(sym1,sym2)} base mem)
  5512  	for {
  5513  		off1 := auxIntToInt32(v.AuxInt)
  5514  		sym1 := auxToSym(v.Aux)
  5515  		if v_0.Op != OpRISCV64MOVaddr {
  5516  			break
  5517  		}
  5518  		off2 := auxIntToInt32(v_0.AuxInt)
  5519  		sym2 := auxToSym(v_0.Aux)
  5520  		base := v_0.Args[0]
  5521  		mem := v_1
  5522  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  5523  			break
  5524  		}
  5525  		v.reset(OpRISCV64MOVHUload)
  5526  		v.AuxInt = int32ToAuxInt(off1 + off2)
  5527  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5528  		v.AddArg2(base, mem)
  5529  		return true
  5530  	}
  5531  	// match: (MOVHUload [off1] {sym} (ADDI [off2] base) mem)
  5532  	// cond: is32Bit(int64(off1)+off2)
  5533  	// result: (MOVHUload [off1+int32(off2)] {sym} base mem)
  5534  	for {
  5535  		off1 := auxIntToInt32(v.AuxInt)
  5536  		sym := auxToSym(v.Aux)
  5537  		if v_0.Op != OpRISCV64ADDI {
  5538  			break
  5539  		}
  5540  		off2 := auxIntToInt64(v_0.AuxInt)
  5541  		base := v_0.Args[0]
  5542  		mem := v_1
  5543  		if !(is32Bit(int64(off1) + off2)) {
  5544  			break
  5545  		}
  5546  		v.reset(OpRISCV64MOVHUload)
  5547  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5548  		v.Aux = symToAux(sym)
  5549  		v.AddArg2(base, mem)
  5550  		return true
  5551  	}
  5552  	// match: (MOVHUload [off] {sym} ptr1 (MOVHstore [off] {sym} ptr2 x _))
  5553  	// cond: isSamePtr(ptr1, ptr2)
  5554  	// result: (MOVHUreg x)
  5555  	for {
  5556  		off := auxIntToInt32(v.AuxInt)
  5557  		sym := auxToSym(v.Aux)
  5558  		ptr1 := v_0
  5559  		if v_1.Op != OpRISCV64MOVHstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  5560  			break
  5561  		}
  5562  		x := v_1.Args[1]
  5563  		ptr2 := v_1.Args[0]
  5564  		if !(isSamePtr(ptr1, ptr2)) {
  5565  			break
  5566  		}
  5567  		v.reset(OpRISCV64MOVHUreg)
  5568  		v.AddArg(x)
  5569  		return true
  5570  	}
  5571  	return false
  5572  }
  5573  func rewriteValueRISCV64_OpRISCV64MOVHUreg(v *Value) bool {
  5574  	v_0 := v.Args[0]
  5575  	b := v.Block
  5576  	// match: (MOVHUreg x:(ANDI [c] y))
  5577  	// cond: c >= 0 && int64(uint16(c)) == c
  5578  	// result: x
  5579  	for {
  5580  		x := v_0
  5581  		if x.Op != OpRISCV64ANDI {
  5582  			break
  5583  		}
  5584  		c := auxIntToInt64(x.AuxInt)
  5585  		if !(c >= 0 && int64(uint16(c)) == c) {
  5586  			break
  5587  		}
  5588  		v.copyOf(x)
  5589  		return true
  5590  	}
  5591  	// match: (MOVHUreg (ANDI [c] x))
  5592  	// cond: c < 0
  5593  	// result: (ANDI [int64(uint16(c))] x)
  5594  	for {
  5595  		if v_0.Op != OpRISCV64ANDI {
  5596  			break
  5597  		}
  5598  		c := auxIntToInt64(v_0.AuxInt)
  5599  		x := v_0.Args[0]
  5600  		if !(c < 0) {
  5601  			break
  5602  		}
  5603  		v.reset(OpRISCV64ANDI)
  5604  		v.AuxInt = int64ToAuxInt(int64(uint16(c)))
  5605  		v.AddArg(x)
  5606  		return true
  5607  	}
  5608  	// match: (MOVHUreg (MOVDconst [c]))
  5609  	// result: (MOVDconst [int64(uint16(c))])
  5610  	for {
  5611  		if v_0.Op != OpRISCV64MOVDconst {
  5612  			break
  5613  		}
  5614  		c := auxIntToInt64(v_0.AuxInt)
  5615  		v.reset(OpRISCV64MOVDconst)
  5616  		v.AuxInt = int64ToAuxInt(int64(uint16(c)))
  5617  		return true
  5618  	}
  5619  	// match: (MOVHUreg x:(MOVBUload _ _))
  5620  	// result: (MOVDreg x)
  5621  	for {
  5622  		x := v_0
  5623  		if x.Op != OpRISCV64MOVBUload {
  5624  			break
  5625  		}
  5626  		v.reset(OpRISCV64MOVDreg)
  5627  		v.AddArg(x)
  5628  		return true
  5629  	}
  5630  	// match: (MOVHUreg x:(MOVHUload _ _))
  5631  	// result: (MOVDreg x)
  5632  	for {
  5633  		x := v_0
  5634  		if x.Op != OpRISCV64MOVHUload {
  5635  			break
  5636  		}
  5637  		v.reset(OpRISCV64MOVDreg)
  5638  		v.AddArg(x)
  5639  		return true
  5640  	}
  5641  	// match: (MOVHUreg x:(MOVBUreg _))
  5642  	// result: (MOVDreg x)
  5643  	for {
  5644  		x := v_0
  5645  		if x.Op != OpRISCV64MOVBUreg {
  5646  			break
  5647  		}
  5648  		v.reset(OpRISCV64MOVDreg)
  5649  		v.AddArg(x)
  5650  		return true
  5651  	}
  5652  	// match: (MOVHUreg x:(MOVHUreg _))
  5653  	// result: (MOVDreg x)
  5654  	for {
  5655  		x := v_0
  5656  		if x.Op != OpRISCV64MOVHUreg {
  5657  			break
  5658  		}
  5659  		v.reset(OpRISCV64MOVDreg)
  5660  		v.AddArg(x)
  5661  		return true
  5662  	}
  5663  	// match: (MOVHUreg <t> x:(MOVHload [off] {sym} ptr mem))
  5664  	// cond: x.Uses == 1 && clobber(x)
  5665  	// result: @x.Block (MOVHUload <t> [off] {sym} ptr mem)
  5666  	for {
  5667  		t := v.Type
  5668  		x := v_0
  5669  		if x.Op != OpRISCV64MOVHload {
  5670  			break
  5671  		}
  5672  		off := auxIntToInt32(x.AuxInt)
  5673  		sym := auxToSym(x.Aux)
  5674  		mem := x.Args[1]
  5675  		ptr := x.Args[0]
  5676  		if !(x.Uses == 1 && clobber(x)) {
  5677  			break
  5678  		}
  5679  		b = x.Block
  5680  		v0 := b.NewValue0(x.Pos, OpRISCV64MOVHUload, t)
  5681  		v.copyOf(v0)
  5682  		v0.AuxInt = int32ToAuxInt(off)
  5683  		v0.Aux = symToAux(sym)
  5684  		v0.AddArg2(ptr, mem)
  5685  		return true
  5686  	}
  5687  	return false
  5688  }
  5689  func rewriteValueRISCV64_OpRISCV64MOVHload(v *Value) bool {
  5690  	v_1 := v.Args[1]
  5691  	v_0 := v.Args[0]
  5692  	b := v.Block
  5693  	config := b.Func.Config
  5694  	// match: (MOVHload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  5695  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  5696  	// result: (MOVHload [off1+off2] {mergeSym(sym1,sym2)} base mem)
  5697  	for {
  5698  		off1 := auxIntToInt32(v.AuxInt)
  5699  		sym1 := auxToSym(v.Aux)
  5700  		if v_0.Op != OpRISCV64MOVaddr {
  5701  			break
  5702  		}
  5703  		off2 := auxIntToInt32(v_0.AuxInt)
  5704  		sym2 := auxToSym(v_0.Aux)
  5705  		base := v_0.Args[0]
  5706  		mem := v_1
  5707  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  5708  			break
  5709  		}
  5710  		v.reset(OpRISCV64MOVHload)
  5711  		v.AuxInt = int32ToAuxInt(off1 + off2)
  5712  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5713  		v.AddArg2(base, mem)
  5714  		return true
  5715  	}
  5716  	// match: (MOVHload [off1] {sym} (ADDI [off2] base) mem)
  5717  	// cond: is32Bit(int64(off1)+off2)
  5718  	// result: (MOVHload [off1+int32(off2)] {sym} base mem)
  5719  	for {
  5720  		off1 := auxIntToInt32(v.AuxInt)
  5721  		sym := auxToSym(v.Aux)
  5722  		if v_0.Op != OpRISCV64ADDI {
  5723  			break
  5724  		}
  5725  		off2 := auxIntToInt64(v_0.AuxInt)
  5726  		base := v_0.Args[0]
  5727  		mem := v_1
  5728  		if !(is32Bit(int64(off1) + off2)) {
  5729  			break
  5730  		}
  5731  		v.reset(OpRISCV64MOVHload)
  5732  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5733  		v.Aux = symToAux(sym)
  5734  		v.AddArg2(base, mem)
  5735  		return true
  5736  	}
  5737  	// match: (MOVHload [off] {sym} ptr1 (MOVHstore [off] {sym} ptr2 x _))
  5738  	// cond: isSamePtr(ptr1, ptr2)
  5739  	// result: (MOVHreg x)
  5740  	for {
  5741  		off := auxIntToInt32(v.AuxInt)
  5742  		sym := auxToSym(v.Aux)
  5743  		ptr1 := v_0
  5744  		if v_1.Op != OpRISCV64MOVHstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  5745  			break
  5746  		}
  5747  		x := v_1.Args[1]
  5748  		ptr2 := v_1.Args[0]
  5749  		if !(isSamePtr(ptr1, ptr2)) {
  5750  			break
  5751  		}
  5752  		v.reset(OpRISCV64MOVHreg)
  5753  		v.AddArg(x)
  5754  		return true
  5755  	}
  5756  	return false
  5757  }
  5758  func rewriteValueRISCV64_OpRISCV64MOVHreg(v *Value) bool {
  5759  	v_0 := v.Args[0]
  5760  	b := v.Block
  5761  	// match: (MOVHreg x:(ANDI [c] y))
  5762  	// cond: c >= 0 && int64(int16(c)) == c
  5763  	// result: x
  5764  	for {
  5765  		x := v_0
  5766  		if x.Op != OpRISCV64ANDI {
  5767  			break
  5768  		}
  5769  		c := auxIntToInt64(x.AuxInt)
  5770  		if !(c >= 0 && int64(int16(c)) == c) {
  5771  			break
  5772  		}
  5773  		v.copyOf(x)
  5774  		return true
  5775  	}
  5776  	// match: (MOVHreg (MOVDconst [c]))
  5777  	// result: (MOVDconst [int64(int16(c))])
  5778  	for {
  5779  		if v_0.Op != OpRISCV64MOVDconst {
  5780  			break
  5781  		}
  5782  		c := auxIntToInt64(v_0.AuxInt)
  5783  		v.reset(OpRISCV64MOVDconst)
  5784  		v.AuxInt = int64ToAuxInt(int64(int16(c)))
  5785  		return true
  5786  	}
  5787  	// match: (MOVHreg x:(MOVBload _ _))
  5788  	// result: (MOVDreg x)
  5789  	for {
  5790  		x := v_0
  5791  		if x.Op != OpRISCV64MOVBload {
  5792  			break
  5793  		}
  5794  		v.reset(OpRISCV64MOVDreg)
  5795  		v.AddArg(x)
  5796  		return true
  5797  	}
  5798  	// match: (MOVHreg x:(MOVBUload _ _))
  5799  	// result: (MOVDreg x)
  5800  	for {
  5801  		x := v_0
  5802  		if x.Op != OpRISCV64MOVBUload {
  5803  			break
  5804  		}
  5805  		v.reset(OpRISCV64MOVDreg)
  5806  		v.AddArg(x)
  5807  		return true
  5808  	}
  5809  	// match: (MOVHreg x:(MOVHload _ _))
  5810  	// result: (MOVDreg x)
  5811  	for {
  5812  		x := v_0
  5813  		if x.Op != OpRISCV64MOVHload {
  5814  			break
  5815  		}
  5816  		v.reset(OpRISCV64MOVDreg)
  5817  		v.AddArg(x)
  5818  		return true
  5819  	}
  5820  	// match: (MOVHreg x:(MOVBreg _))
  5821  	// result: (MOVDreg x)
  5822  	for {
  5823  		x := v_0
  5824  		if x.Op != OpRISCV64MOVBreg {
  5825  			break
  5826  		}
  5827  		v.reset(OpRISCV64MOVDreg)
  5828  		v.AddArg(x)
  5829  		return true
  5830  	}
  5831  	// match: (MOVHreg x:(MOVBUreg _))
  5832  	// result: (MOVDreg x)
  5833  	for {
  5834  		x := v_0
  5835  		if x.Op != OpRISCV64MOVBUreg {
  5836  			break
  5837  		}
  5838  		v.reset(OpRISCV64MOVDreg)
  5839  		v.AddArg(x)
  5840  		return true
  5841  	}
  5842  	// match: (MOVHreg x:(MOVHreg _))
  5843  	// result: (MOVDreg x)
  5844  	for {
  5845  		x := v_0
  5846  		if x.Op != OpRISCV64MOVHreg {
  5847  			break
  5848  		}
  5849  		v.reset(OpRISCV64MOVDreg)
  5850  		v.AddArg(x)
  5851  		return true
  5852  	}
  5853  	// match: (MOVHreg <t> x:(MOVHUload [off] {sym} ptr mem))
  5854  	// cond: x.Uses == 1 && clobber(x)
  5855  	// result: @x.Block (MOVHload <t> [off] {sym} ptr mem)
  5856  	for {
  5857  		t := v.Type
  5858  		x := v_0
  5859  		if x.Op != OpRISCV64MOVHUload {
  5860  			break
  5861  		}
  5862  		off := auxIntToInt32(x.AuxInt)
  5863  		sym := auxToSym(x.Aux)
  5864  		mem := x.Args[1]
  5865  		ptr := x.Args[0]
  5866  		if !(x.Uses == 1 && clobber(x)) {
  5867  			break
  5868  		}
  5869  		b = x.Block
  5870  		v0 := b.NewValue0(x.Pos, OpRISCV64MOVHload, t)
  5871  		v.copyOf(v0)
  5872  		v0.AuxInt = int32ToAuxInt(off)
  5873  		v0.Aux = symToAux(sym)
  5874  		v0.AddArg2(ptr, mem)
  5875  		return true
  5876  	}
  5877  	return false
  5878  }
  5879  func rewriteValueRISCV64_OpRISCV64MOVHstore(v *Value) bool {
  5880  	v_2 := v.Args[2]
  5881  	v_1 := v.Args[1]
  5882  	v_0 := v.Args[0]
  5883  	b := v.Block
  5884  	config := b.Func.Config
  5885  	// match: (MOVHstore [off1] {sym1} (MOVaddr [off2] {sym2} base) val mem)
  5886  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  5887  	// result: (MOVHstore [off1+off2] {mergeSym(sym1,sym2)} base val mem)
  5888  	for {
  5889  		off1 := auxIntToInt32(v.AuxInt)
  5890  		sym1 := auxToSym(v.Aux)
  5891  		if v_0.Op != OpRISCV64MOVaddr {
  5892  			break
  5893  		}
  5894  		off2 := auxIntToInt32(v_0.AuxInt)
  5895  		sym2 := auxToSym(v_0.Aux)
  5896  		base := v_0.Args[0]
  5897  		val := v_1
  5898  		mem := v_2
  5899  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  5900  			break
  5901  		}
  5902  		v.reset(OpRISCV64MOVHstore)
  5903  		v.AuxInt = int32ToAuxInt(off1 + off2)
  5904  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5905  		v.AddArg3(base, val, mem)
  5906  		return true
  5907  	}
  5908  	// match: (MOVHstore [off1] {sym} (ADDI [off2] base) val mem)
  5909  	// cond: is32Bit(int64(off1)+off2)
  5910  	// result: (MOVHstore [off1+int32(off2)] {sym} base val mem)
  5911  	for {
  5912  		off1 := auxIntToInt32(v.AuxInt)
  5913  		sym := auxToSym(v.Aux)
  5914  		if v_0.Op != OpRISCV64ADDI {
  5915  			break
  5916  		}
  5917  		off2 := auxIntToInt64(v_0.AuxInt)
  5918  		base := v_0.Args[0]
  5919  		val := v_1
  5920  		mem := v_2
  5921  		if !(is32Bit(int64(off1) + off2)) {
  5922  			break
  5923  		}
  5924  		v.reset(OpRISCV64MOVHstore)
  5925  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5926  		v.Aux = symToAux(sym)
  5927  		v.AddArg3(base, val, mem)
  5928  		return true
  5929  	}
  5930  	// match: (MOVHstore [off] {sym} ptr (MOVDconst [0]) mem)
  5931  	// result: (MOVHstorezero [off] {sym} ptr mem)
  5932  	for {
  5933  		off := auxIntToInt32(v.AuxInt)
  5934  		sym := auxToSym(v.Aux)
  5935  		ptr := v_0
  5936  		if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
  5937  			break
  5938  		}
  5939  		mem := v_2
  5940  		v.reset(OpRISCV64MOVHstorezero)
  5941  		v.AuxInt = int32ToAuxInt(off)
  5942  		v.Aux = symToAux(sym)
  5943  		v.AddArg2(ptr, mem)
  5944  		return true
  5945  	}
  5946  	// match: (MOVHstore [off] {sym} ptr (MOVHreg x) mem)
  5947  	// result: (MOVHstore [off] {sym} ptr x mem)
  5948  	for {
  5949  		off := auxIntToInt32(v.AuxInt)
  5950  		sym := auxToSym(v.Aux)
  5951  		ptr := v_0
  5952  		if v_1.Op != OpRISCV64MOVHreg {
  5953  			break
  5954  		}
  5955  		x := v_1.Args[0]
  5956  		mem := v_2
  5957  		v.reset(OpRISCV64MOVHstore)
  5958  		v.AuxInt = int32ToAuxInt(off)
  5959  		v.Aux = symToAux(sym)
  5960  		v.AddArg3(ptr, x, mem)
  5961  		return true
  5962  	}
  5963  	// match: (MOVHstore [off] {sym} ptr (MOVWreg x) mem)
  5964  	// result: (MOVHstore [off] {sym} ptr x mem)
  5965  	for {
  5966  		off := auxIntToInt32(v.AuxInt)
  5967  		sym := auxToSym(v.Aux)
  5968  		ptr := v_0
  5969  		if v_1.Op != OpRISCV64MOVWreg {
  5970  			break
  5971  		}
  5972  		x := v_1.Args[0]
  5973  		mem := v_2
  5974  		v.reset(OpRISCV64MOVHstore)
  5975  		v.AuxInt = int32ToAuxInt(off)
  5976  		v.Aux = symToAux(sym)
  5977  		v.AddArg3(ptr, x, mem)
  5978  		return true
  5979  	}
  5980  	// match: (MOVHstore [off] {sym} ptr (MOVHUreg x) mem)
  5981  	// result: (MOVHstore [off] {sym} ptr x mem)
  5982  	for {
  5983  		off := auxIntToInt32(v.AuxInt)
  5984  		sym := auxToSym(v.Aux)
  5985  		ptr := v_0
  5986  		if v_1.Op != OpRISCV64MOVHUreg {
  5987  			break
  5988  		}
  5989  		x := v_1.Args[0]
  5990  		mem := v_2
  5991  		v.reset(OpRISCV64MOVHstore)
  5992  		v.AuxInt = int32ToAuxInt(off)
  5993  		v.Aux = symToAux(sym)
  5994  		v.AddArg3(ptr, x, mem)
  5995  		return true
  5996  	}
  5997  	// match: (MOVHstore [off] {sym} ptr (MOVWUreg x) mem)
  5998  	// result: (MOVHstore [off] {sym} ptr x mem)
  5999  	for {
  6000  		off := auxIntToInt32(v.AuxInt)
  6001  		sym := auxToSym(v.Aux)
  6002  		ptr := v_0
  6003  		if v_1.Op != OpRISCV64MOVWUreg {
  6004  			break
  6005  		}
  6006  		x := v_1.Args[0]
  6007  		mem := v_2
  6008  		v.reset(OpRISCV64MOVHstore)
  6009  		v.AuxInt = int32ToAuxInt(off)
  6010  		v.Aux = symToAux(sym)
  6011  		v.AddArg3(ptr, x, mem)
  6012  		return true
  6013  	}
  6014  	return false
  6015  }
  6016  func rewriteValueRISCV64_OpRISCV64MOVHstorezero(v *Value) bool {
  6017  	v_1 := v.Args[1]
  6018  	v_0 := v.Args[0]
  6019  	b := v.Block
  6020  	config := b.Func.Config
  6021  	// match: (MOVHstorezero [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  6022  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  6023  	// result: (MOVHstorezero [off1+off2] {mergeSym(sym1,sym2)} base mem)
  6024  	for {
  6025  		off1 := auxIntToInt32(v.AuxInt)
  6026  		sym1 := auxToSym(v.Aux)
  6027  		if v_0.Op != OpRISCV64MOVaddr {
  6028  			break
  6029  		}
  6030  		off2 := auxIntToInt32(v_0.AuxInt)
  6031  		sym2 := auxToSym(v_0.Aux)
  6032  		base := v_0.Args[0]
  6033  		mem := v_1
  6034  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  6035  			break
  6036  		}
  6037  		v.reset(OpRISCV64MOVHstorezero)
  6038  		v.AuxInt = int32ToAuxInt(off1 + off2)
  6039  		v.Aux = symToAux(mergeSym(sym1, sym2))
  6040  		v.AddArg2(base, mem)
  6041  		return true
  6042  	}
  6043  	// match: (MOVHstorezero [off1] {sym} (ADDI [off2] base) mem)
  6044  	// cond: is32Bit(int64(off1)+off2)
  6045  	// result: (MOVHstorezero [off1+int32(off2)] {sym} base mem)
  6046  	for {
  6047  		off1 := auxIntToInt32(v.AuxInt)
  6048  		sym := auxToSym(v.Aux)
  6049  		if v_0.Op != OpRISCV64ADDI {
  6050  			break
  6051  		}
  6052  		off2 := auxIntToInt64(v_0.AuxInt)
  6053  		base := v_0.Args[0]
  6054  		mem := v_1
  6055  		if !(is32Bit(int64(off1) + off2)) {
  6056  			break
  6057  		}
  6058  		v.reset(OpRISCV64MOVHstorezero)
  6059  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  6060  		v.Aux = symToAux(sym)
  6061  		v.AddArg2(base, mem)
  6062  		return true
  6063  	}
  6064  	return false
  6065  }
  6066  func rewriteValueRISCV64_OpRISCV64MOVWUload(v *Value) bool {
  6067  	v_1 := v.Args[1]
  6068  	v_0 := v.Args[0]
  6069  	b := v.Block
  6070  	config := b.Func.Config
  6071  	typ := &b.Func.Config.Types
  6072  	// match: (MOVWUload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  6073  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  6074  	// result: (MOVWUload [off1+off2] {mergeSym(sym1,sym2)} base mem)
  6075  	for {
  6076  		off1 := auxIntToInt32(v.AuxInt)
  6077  		sym1 := auxToSym(v.Aux)
  6078  		if v_0.Op != OpRISCV64MOVaddr {
  6079  			break
  6080  		}
  6081  		off2 := auxIntToInt32(v_0.AuxInt)
  6082  		sym2 := auxToSym(v_0.Aux)
  6083  		base := v_0.Args[0]
  6084  		mem := v_1
  6085  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  6086  			break
  6087  		}
  6088  		v.reset(OpRISCV64MOVWUload)
  6089  		v.AuxInt = int32ToAuxInt(off1 + off2)
  6090  		v.Aux = symToAux(mergeSym(sym1, sym2))
  6091  		v.AddArg2(base, mem)
  6092  		return true
  6093  	}
  6094  	// match: (MOVWUload [off1] {sym} (ADDI [off2] base) mem)
  6095  	// cond: is32Bit(int64(off1)+off2)
  6096  	// result: (MOVWUload [off1+int32(off2)] {sym} base mem)
  6097  	for {
  6098  		off1 := auxIntToInt32(v.AuxInt)
  6099  		sym := auxToSym(v.Aux)
  6100  		if v_0.Op != OpRISCV64ADDI {
  6101  			break
  6102  		}
  6103  		off2 := auxIntToInt64(v_0.AuxInt)
  6104  		base := v_0.Args[0]
  6105  		mem := v_1
  6106  		if !(is32Bit(int64(off1) + off2)) {
  6107  			break
  6108  		}
  6109  		v.reset(OpRISCV64MOVWUload)
  6110  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  6111  		v.Aux = symToAux(sym)
  6112  		v.AddArg2(base, mem)
  6113  		return true
  6114  	}
  6115  	// match: (MOVWUload [off] {sym} ptr1 (MOVWstore [off] {sym} ptr2 x _))
  6116  	// cond: isSamePtr(ptr1, ptr2)
  6117  	// result: (MOVWUreg x)
  6118  	for {
  6119  		off := auxIntToInt32(v.AuxInt)
  6120  		sym := auxToSym(v.Aux)
  6121  		ptr1 := v_0
  6122  		if v_1.Op != OpRISCV64MOVWstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  6123  			break
  6124  		}
  6125  		x := v_1.Args[1]
  6126  		ptr2 := v_1.Args[0]
  6127  		if !(isSamePtr(ptr1, ptr2)) {
  6128  			break
  6129  		}
  6130  		v.reset(OpRISCV64MOVWUreg)
  6131  		v.AddArg(x)
  6132  		return true
  6133  	}
  6134  	// match: (MOVWUload [off] {sym} ptr1 (FMOVWstore [off] {sym} ptr2 x _))
  6135  	// cond: isSamePtr(ptr1, ptr2)
  6136  	// result: (MOVWUreg (FMVXS x))
  6137  	for {
  6138  		off := auxIntToInt32(v.AuxInt)
  6139  		sym := auxToSym(v.Aux)
  6140  		ptr1 := v_0
  6141  		if v_1.Op != OpRISCV64FMOVWstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  6142  			break
  6143  		}
  6144  		x := v_1.Args[1]
  6145  		ptr2 := v_1.Args[0]
  6146  		if !(isSamePtr(ptr1, ptr2)) {
  6147  			break
  6148  		}
  6149  		v.reset(OpRISCV64MOVWUreg)
  6150  		v0 := b.NewValue0(v_1.Pos, OpRISCV64FMVXS, typ.Int32)
  6151  		v0.AddArg(x)
  6152  		v.AddArg(v0)
  6153  		return true
  6154  	}
  6155  	return false
  6156  }
  6157  func rewriteValueRISCV64_OpRISCV64MOVWUreg(v *Value) bool {
  6158  	v_0 := v.Args[0]
  6159  	b := v.Block
  6160  	typ := &b.Func.Config.Types
  6161  	// match: (MOVWUreg x:(ANDI [c] y))
  6162  	// cond: c >= 0 && int64(uint32(c)) == c
  6163  	// result: x
  6164  	for {
  6165  		x := v_0
  6166  		if x.Op != OpRISCV64ANDI {
  6167  			break
  6168  		}
  6169  		c := auxIntToInt64(x.AuxInt)
  6170  		if !(c >= 0 && int64(uint32(c)) == c) {
  6171  			break
  6172  		}
  6173  		v.copyOf(x)
  6174  		return true
  6175  	}
  6176  	// match: (MOVWUreg (ANDI [c] x))
  6177  	// cond: c < 0
  6178  	// result: (AND (MOVDconst [int64(uint32(c))]) x)
  6179  	for {
  6180  		if v_0.Op != OpRISCV64ANDI {
  6181  			break
  6182  		}
  6183  		c := auxIntToInt64(v_0.AuxInt)
  6184  		x := v_0.Args[0]
  6185  		if !(c < 0) {
  6186  			break
  6187  		}
  6188  		v.reset(OpRISCV64AND)
  6189  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  6190  		v0.AuxInt = int64ToAuxInt(int64(uint32(c)))
  6191  		v.AddArg2(v0, x)
  6192  		return true
  6193  	}
  6194  	// match: (MOVWUreg (MOVDconst [c]))
  6195  	// result: (MOVDconst [int64(uint32(c))])
  6196  	for {
  6197  		if v_0.Op != OpRISCV64MOVDconst {
  6198  			break
  6199  		}
  6200  		c := auxIntToInt64(v_0.AuxInt)
  6201  		v.reset(OpRISCV64MOVDconst)
  6202  		v.AuxInt = int64ToAuxInt(int64(uint32(c)))
  6203  		return true
  6204  	}
  6205  	// match: (MOVWUreg x:(MOVBUload _ _))
  6206  	// result: (MOVDreg x)
  6207  	for {
  6208  		x := v_0
  6209  		if x.Op != OpRISCV64MOVBUload {
  6210  			break
  6211  		}
  6212  		v.reset(OpRISCV64MOVDreg)
  6213  		v.AddArg(x)
  6214  		return true
  6215  	}
  6216  	// match: (MOVWUreg x:(MOVHUload _ _))
  6217  	// result: (MOVDreg x)
  6218  	for {
  6219  		x := v_0
  6220  		if x.Op != OpRISCV64MOVHUload {
  6221  			break
  6222  		}
  6223  		v.reset(OpRISCV64MOVDreg)
  6224  		v.AddArg(x)
  6225  		return true
  6226  	}
  6227  	// match: (MOVWUreg x:(MOVWUload _ _))
  6228  	// result: (MOVDreg x)
  6229  	for {
  6230  		x := v_0
  6231  		if x.Op != OpRISCV64MOVWUload {
  6232  			break
  6233  		}
  6234  		v.reset(OpRISCV64MOVDreg)
  6235  		v.AddArg(x)
  6236  		return true
  6237  	}
  6238  	// match: (MOVWUreg x:(MOVBUreg _))
  6239  	// result: (MOVDreg x)
  6240  	for {
  6241  		x := v_0
  6242  		if x.Op != OpRISCV64MOVBUreg {
  6243  			break
  6244  		}
  6245  		v.reset(OpRISCV64MOVDreg)
  6246  		v.AddArg(x)
  6247  		return true
  6248  	}
  6249  	// match: (MOVWUreg x:(MOVHUreg _))
  6250  	// result: (MOVDreg x)
  6251  	for {
  6252  		x := v_0
  6253  		if x.Op != OpRISCV64MOVHUreg {
  6254  			break
  6255  		}
  6256  		v.reset(OpRISCV64MOVDreg)
  6257  		v.AddArg(x)
  6258  		return true
  6259  	}
  6260  	// match: (MOVWUreg x:(MOVWUreg _))
  6261  	// result: (MOVDreg x)
  6262  	for {
  6263  		x := v_0
  6264  		if x.Op != OpRISCV64MOVWUreg {
  6265  			break
  6266  		}
  6267  		v.reset(OpRISCV64MOVDreg)
  6268  		v.AddArg(x)
  6269  		return true
  6270  	}
  6271  	// match: (MOVWUreg <t> x:(MOVWload [off] {sym} ptr mem))
  6272  	// cond: x.Uses == 1 && clobber(x)
  6273  	// result: @x.Block (MOVWUload <t> [off] {sym} ptr mem)
  6274  	for {
  6275  		t := v.Type
  6276  		x := v_0
  6277  		if x.Op != OpRISCV64MOVWload {
  6278  			break
  6279  		}
  6280  		off := auxIntToInt32(x.AuxInt)
  6281  		sym := auxToSym(x.Aux)
  6282  		mem := x.Args[1]
  6283  		ptr := x.Args[0]
  6284  		if !(x.Uses == 1 && clobber(x)) {
  6285  			break
  6286  		}
  6287  		b = x.Block
  6288  		v0 := b.NewValue0(x.Pos, OpRISCV64MOVWUload, t)
  6289  		v.copyOf(v0)
  6290  		v0.AuxInt = int32ToAuxInt(off)
  6291  		v0.Aux = symToAux(sym)
  6292  		v0.AddArg2(ptr, mem)
  6293  		return true
  6294  	}
  6295  	return false
  6296  }
  6297  func rewriteValueRISCV64_OpRISCV64MOVWload(v *Value) bool {
  6298  	v_1 := v.Args[1]
  6299  	v_0 := v.Args[0]
  6300  	b := v.Block
  6301  	config := b.Func.Config
  6302  	// match: (MOVWload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  6303  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  6304  	// result: (MOVWload [off1+off2] {mergeSym(sym1,sym2)} base mem)
  6305  	for {
  6306  		off1 := auxIntToInt32(v.AuxInt)
  6307  		sym1 := auxToSym(v.Aux)
  6308  		if v_0.Op != OpRISCV64MOVaddr {
  6309  			break
  6310  		}
  6311  		off2 := auxIntToInt32(v_0.AuxInt)
  6312  		sym2 := auxToSym(v_0.Aux)
  6313  		base := v_0.Args[0]
  6314  		mem := v_1
  6315  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  6316  			break
  6317  		}
  6318  		v.reset(OpRISCV64MOVWload)
  6319  		v.AuxInt = int32ToAuxInt(off1 + off2)
  6320  		v.Aux = symToAux(mergeSym(sym1, sym2))
  6321  		v.AddArg2(base, mem)
  6322  		return true
  6323  	}
  6324  	// match: (MOVWload [off1] {sym} (ADDI [off2] base) mem)
  6325  	// cond: is32Bit(int64(off1)+off2)
  6326  	// result: (MOVWload [off1+int32(off2)] {sym} base mem)
  6327  	for {
  6328  		off1 := auxIntToInt32(v.AuxInt)
  6329  		sym := auxToSym(v.Aux)
  6330  		if v_0.Op != OpRISCV64ADDI {
  6331  			break
  6332  		}
  6333  		off2 := auxIntToInt64(v_0.AuxInt)
  6334  		base := v_0.Args[0]
  6335  		mem := v_1
  6336  		if !(is32Bit(int64(off1) + off2)) {
  6337  			break
  6338  		}
  6339  		v.reset(OpRISCV64MOVWload)
  6340  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  6341  		v.Aux = symToAux(sym)
  6342  		v.AddArg2(base, mem)
  6343  		return true
  6344  	}
  6345  	// match: (MOVWload [off] {sym} ptr1 (MOVWstore [off] {sym} ptr2 x _))
  6346  	// cond: isSamePtr(ptr1, ptr2)
  6347  	// result: (MOVWreg x)
  6348  	for {
  6349  		off := auxIntToInt32(v.AuxInt)
  6350  		sym := auxToSym(v.Aux)
  6351  		ptr1 := v_0
  6352  		if v_1.Op != OpRISCV64MOVWstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  6353  			break
  6354  		}
  6355  		x := v_1.Args[1]
  6356  		ptr2 := v_1.Args[0]
  6357  		if !(isSamePtr(ptr1, ptr2)) {
  6358  			break
  6359  		}
  6360  		v.reset(OpRISCV64MOVWreg)
  6361  		v.AddArg(x)
  6362  		return true
  6363  	}
  6364  	// match: (MOVWload [off] {sym} ptr1 (FMOVWstore [off] {sym} ptr2 x _))
  6365  	// cond: isSamePtr(ptr1, ptr2)
  6366  	// result: (FMVXS x)
  6367  	for {
  6368  		off := auxIntToInt32(v.AuxInt)
  6369  		sym := auxToSym(v.Aux)
  6370  		ptr1 := v_0
  6371  		if v_1.Op != OpRISCV64FMOVWstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  6372  			break
  6373  		}
  6374  		x := v_1.Args[1]
  6375  		ptr2 := v_1.Args[0]
  6376  		if !(isSamePtr(ptr1, ptr2)) {
  6377  			break
  6378  		}
  6379  		v.reset(OpRISCV64FMVXS)
  6380  		v.AddArg(x)
  6381  		return true
  6382  	}
  6383  	return false
  6384  }
  6385  func rewriteValueRISCV64_OpRISCV64MOVWreg(v *Value) bool {
  6386  	v_0 := v.Args[0]
  6387  	b := v.Block
  6388  	// match: (MOVWreg x:(ANDI [c] y))
  6389  	// cond: c >= 0 && int64(int32(c)) == c
  6390  	// result: x
  6391  	for {
  6392  		x := v_0
  6393  		if x.Op != OpRISCV64ANDI {
  6394  			break
  6395  		}
  6396  		c := auxIntToInt64(x.AuxInt)
  6397  		if !(c >= 0 && int64(int32(c)) == c) {
  6398  			break
  6399  		}
  6400  		v.copyOf(x)
  6401  		return true
  6402  	}
  6403  	// match: (MOVWreg (NEG x))
  6404  	// result: (NEGW x)
  6405  	for {
  6406  		if v_0.Op != OpRISCV64NEG {
  6407  			break
  6408  		}
  6409  		x := v_0.Args[0]
  6410  		v.reset(OpRISCV64NEGW)
  6411  		v.AddArg(x)
  6412  		return true
  6413  	}
  6414  	// match: (MOVWreg (MOVDconst [c]))
  6415  	// result: (MOVDconst [int64(int32(c))])
  6416  	for {
  6417  		if v_0.Op != OpRISCV64MOVDconst {
  6418  			break
  6419  		}
  6420  		c := auxIntToInt64(v_0.AuxInt)
  6421  		v.reset(OpRISCV64MOVDconst)
  6422  		v.AuxInt = int64ToAuxInt(int64(int32(c)))
  6423  		return true
  6424  	}
  6425  	// match: (MOVWreg x:(MOVBload _ _))
  6426  	// result: (MOVDreg x)
  6427  	for {
  6428  		x := v_0
  6429  		if x.Op != OpRISCV64MOVBload {
  6430  			break
  6431  		}
  6432  		v.reset(OpRISCV64MOVDreg)
  6433  		v.AddArg(x)
  6434  		return true
  6435  	}
  6436  	// match: (MOVWreg x:(MOVBUload _ _))
  6437  	// result: (MOVDreg x)
  6438  	for {
  6439  		x := v_0
  6440  		if x.Op != OpRISCV64MOVBUload {
  6441  			break
  6442  		}
  6443  		v.reset(OpRISCV64MOVDreg)
  6444  		v.AddArg(x)
  6445  		return true
  6446  	}
  6447  	// match: (MOVWreg x:(MOVHload _ _))
  6448  	// result: (MOVDreg x)
  6449  	for {
  6450  		x := v_0
  6451  		if x.Op != OpRISCV64MOVHload {
  6452  			break
  6453  		}
  6454  		v.reset(OpRISCV64MOVDreg)
  6455  		v.AddArg(x)
  6456  		return true
  6457  	}
  6458  	// match: (MOVWreg x:(MOVHUload _ _))
  6459  	// result: (MOVDreg x)
  6460  	for {
  6461  		x := v_0
  6462  		if x.Op != OpRISCV64MOVHUload {
  6463  			break
  6464  		}
  6465  		v.reset(OpRISCV64MOVDreg)
  6466  		v.AddArg(x)
  6467  		return true
  6468  	}
  6469  	// match: (MOVWreg x:(MOVWload _ _))
  6470  	// result: (MOVDreg x)
  6471  	for {
  6472  		x := v_0
  6473  		if x.Op != OpRISCV64MOVWload {
  6474  			break
  6475  		}
  6476  		v.reset(OpRISCV64MOVDreg)
  6477  		v.AddArg(x)
  6478  		return true
  6479  	}
  6480  	// match: (MOVWreg x:(ADDIW _))
  6481  	// result: (MOVDreg x)
  6482  	for {
  6483  		x := v_0
  6484  		if x.Op != OpRISCV64ADDIW {
  6485  			break
  6486  		}
  6487  		v.reset(OpRISCV64MOVDreg)
  6488  		v.AddArg(x)
  6489  		return true
  6490  	}
  6491  	// match: (MOVWreg x:(SUBW _ _))
  6492  	// result: (MOVDreg x)
  6493  	for {
  6494  		x := v_0
  6495  		if x.Op != OpRISCV64SUBW {
  6496  			break
  6497  		}
  6498  		v.reset(OpRISCV64MOVDreg)
  6499  		v.AddArg(x)
  6500  		return true
  6501  	}
  6502  	// match: (MOVWreg x:(NEGW _))
  6503  	// result: (MOVDreg x)
  6504  	for {
  6505  		x := v_0
  6506  		if x.Op != OpRISCV64NEGW {
  6507  			break
  6508  		}
  6509  		v.reset(OpRISCV64MOVDreg)
  6510  		v.AddArg(x)
  6511  		return true
  6512  	}
  6513  	// match: (MOVWreg x:(MULW _ _))
  6514  	// result: (MOVDreg x)
  6515  	for {
  6516  		x := v_0
  6517  		if x.Op != OpRISCV64MULW {
  6518  			break
  6519  		}
  6520  		v.reset(OpRISCV64MOVDreg)
  6521  		v.AddArg(x)
  6522  		return true
  6523  	}
  6524  	// match: (MOVWreg x:(DIVW _ _))
  6525  	// result: (MOVDreg x)
  6526  	for {
  6527  		x := v_0
  6528  		if x.Op != OpRISCV64DIVW {
  6529  			break
  6530  		}
  6531  		v.reset(OpRISCV64MOVDreg)
  6532  		v.AddArg(x)
  6533  		return true
  6534  	}
  6535  	// match: (MOVWreg x:(DIVUW _ _))
  6536  	// result: (MOVDreg x)
  6537  	for {
  6538  		x := v_0
  6539  		if x.Op != OpRISCV64DIVUW {
  6540  			break
  6541  		}
  6542  		v.reset(OpRISCV64MOVDreg)
  6543  		v.AddArg(x)
  6544  		return true
  6545  	}
  6546  	// match: (MOVWreg x:(REMW _ _))
  6547  	// result: (MOVDreg x)
  6548  	for {
  6549  		x := v_0
  6550  		if x.Op != OpRISCV64REMW {
  6551  			break
  6552  		}
  6553  		v.reset(OpRISCV64MOVDreg)
  6554  		v.AddArg(x)
  6555  		return true
  6556  	}
  6557  	// match: (MOVWreg x:(REMUW _ _))
  6558  	// result: (MOVDreg x)
  6559  	for {
  6560  		x := v_0
  6561  		if x.Op != OpRISCV64REMUW {
  6562  			break
  6563  		}
  6564  		v.reset(OpRISCV64MOVDreg)
  6565  		v.AddArg(x)
  6566  		return true
  6567  	}
  6568  	// match: (MOVWreg x:(ROLW _ _))
  6569  	// result: (MOVDreg x)
  6570  	for {
  6571  		x := v_0
  6572  		if x.Op != OpRISCV64ROLW {
  6573  			break
  6574  		}
  6575  		v.reset(OpRISCV64MOVDreg)
  6576  		v.AddArg(x)
  6577  		return true
  6578  	}
  6579  	// match: (MOVWreg x:(RORW _ _))
  6580  	// result: (MOVDreg x)
  6581  	for {
  6582  		x := v_0
  6583  		if x.Op != OpRISCV64RORW {
  6584  			break
  6585  		}
  6586  		v.reset(OpRISCV64MOVDreg)
  6587  		v.AddArg(x)
  6588  		return true
  6589  	}
  6590  	// match: (MOVWreg x:(RORIW _))
  6591  	// result: (MOVDreg x)
  6592  	for {
  6593  		x := v_0
  6594  		if x.Op != OpRISCV64RORIW {
  6595  			break
  6596  		}
  6597  		v.reset(OpRISCV64MOVDreg)
  6598  		v.AddArg(x)
  6599  		return true
  6600  	}
  6601  	// match: (MOVWreg x:(MOVBreg _))
  6602  	// result: (MOVDreg x)
  6603  	for {
  6604  		x := v_0
  6605  		if x.Op != OpRISCV64MOVBreg {
  6606  			break
  6607  		}
  6608  		v.reset(OpRISCV64MOVDreg)
  6609  		v.AddArg(x)
  6610  		return true
  6611  	}
  6612  	// match: (MOVWreg x:(MOVBUreg _))
  6613  	// result: (MOVDreg x)
  6614  	for {
  6615  		x := v_0
  6616  		if x.Op != OpRISCV64MOVBUreg {
  6617  			break
  6618  		}
  6619  		v.reset(OpRISCV64MOVDreg)
  6620  		v.AddArg(x)
  6621  		return true
  6622  	}
  6623  	// match: (MOVWreg x:(MOVHreg _))
  6624  	// result: (MOVDreg x)
  6625  	for {
  6626  		x := v_0
  6627  		if x.Op != OpRISCV64MOVHreg {
  6628  			break
  6629  		}
  6630  		v.reset(OpRISCV64MOVDreg)
  6631  		v.AddArg(x)
  6632  		return true
  6633  	}
  6634  	// match: (MOVWreg x:(MOVWreg _))
  6635  	// result: (MOVDreg x)
  6636  	for {
  6637  		x := v_0
  6638  		if x.Op != OpRISCV64MOVWreg {
  6639  			break
  6640  		}
  6641  		v.reset(OpRISCV64MOVDreg)
  6642  		v.AddArg(x)
  6643  		return true
  6644  	}
  6645  	// match: (MOVWreg <t> x:(MOVWUload [off] {sym} ptr mem))
  6646  	// cond: x.Uses == 1 && clobber(x)
  6647  	// result: @x.Block (MOVWload <t> [off] {sym} ptr mem)
  6648  	for {
  6649  		t := v.Type
  6650  		x := v_0
  6651  		if x.Op != OpRISCV64MOVWUload {
  6652  			break
  6653  		}
  6654  		off := auxIntToInt32(x.AuxInt)
  6655  		sym := auxToSym(x.Aux)
  6656  		mem := x.Args[1]
  6657  		ptr := x.Args[0]
  6658  		if !(x.Uses == 1 && clobber(x)) {
  6659  			break
  6660  		}
  6661  		b = x.Block
  6662  		v0 := b.NewValue0(x.Pos, OpRISCV64MOVWload, t)
  6663  		v.copyOf(v0)
  6664  		v0.AuxInt = int32ToAuxInt(off)
  6665  		v0.Aux = symToAux(sym)
  6666  		v0.AddArg2(ptr, mem)
  6667  		return true
  6668  	}
  6669  	return false
  6670  }
  6671  func rewriteValueRISCV64_OpRISCV64MOVWstore(v *Value) bool {
  6672  	v_2 := v.Args[2]
  6673  	v_1 := v.Args[1]
  6674  	v_0 := v.Args[0]
  6675  	b := v.Block
  6676  	config := b.Func.Config
  6677  	// match: (MOVWstore [off1] {sym1} (MOVaddr [off2] {sym2} base) val mem)
  6678  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  6679  	// result: (MOVWstore [off1+off2] {mergeSym(sym1,sym2)} base val mem)
  6680  	for {
  6681  		off1 := auxIntToInt32(v.AuxInt)
  6682  		sym1 := auxToSym(v.Aux)
  6683  		if v_0.Op != OpRISCV64MOVaddr {
  6684  			break
  6685  		}
  6686  		off2 := auxIntToInt32(v_0.AuxInt)
  6687  		sym2 := auxToSym(v_0.Aux)
  6688  		base := v_0.Args[0]
  6689  		val := v_1
  6690  		mem := v_2
  6691  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  6692  			break
  6693  		}
  6694  		v.reset(OpRISCV64MOVWstore)
  6695  		v.AuxInt = int32ToAuxInt(off1 + off2)
  6696  		v.Aux = symToAux(mergeSym(sym1, sym2))
  6697  		v.AddArg3(base, val, mem)
  6698  		return true
  6699  	}
  6700  	// match: (MOVWstore [off1] {sym} (ADDI [off2] base) val mem)
  6701  	// cond: is32Bit(int64(off1)+off2)
  6702  	// result: (MOVWstore [off1+int32(off2)] {sym} base val mem)
  6703  	for {
  6704  		off1 := auxIntToInt32(v.AuxInt)
  6705  		sym := auxToSym(v.Aux)
  6706  		if v_0.Op != OpRISCV64ADDI {
  6707  			break
  6708  		}
  6709  		off2 := auxIntToInt64(v_0.AuxInt)
  6710  		base := v_0.Args[0]
  6711  		val := v_1
  6712  		mem := v_2
  6713  		if !(is32Bit(int64(off1) + off2)) {
  6714  			break
  6715  		}
  6716  		v.reset(OpRISCV64MOVWstore)
  6717  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  6718  		v.Aux = symToAux(sym)
  6719  		v.AddArg3(base, val, mem)
  6720  		return true
  6721  	}
  6722  	// match: (MOVWstore [off] {sym} ptr (MOVDconst [0]) mem)
  6723  	// result: (MOVWstorezero [off] {sym} ptr mem)
  6724  	for {
  6725  		off := auxIntToInt32(v.AuxInt)
  6726  		sym := auxToSym(v.Aux)
  6727  		ptr := v_0
  6728  		if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
  6729  			break
  6730  		}
  6731  		mem := v_2
  6732  		v.reset(OpRISCV64MOVWstorezero)
  6733  		v.AuxInt = int32ToAuxInt(off)
  6734  		v.Aux = symToAux(sym)
  6735  		v.AddArg2(ptr, mem)
  6736  		return true
  6737  	}
  6738  	// match: (MOVWstore [off] {sym} ptr (MOVWreg x) mem)
  6739  	// result: (MOVWstore [off] {sym} ptr x mem)
  6740  	for {
  6741  		off := auxIntToInt32(v.AuxInt)
  6742  		sym := auxToSym(v.Aux)
  6743  		ptr := v_0
  6744  		if v_1.Op != OpRISCV64MOVWreg {
  6745  			break
  6746  		}
  6747  		x := v_1.Args[0]
  6748  		mem := v_2
  6749  		v.reset(OpRISCV64MOVWstore)
  6750  		v.AuxInt = int32ToAuxInt(off)
  6751  		v.Aux = symToAux(sym)
  6752  		v.AddArg3(ptr, x, mem)
  6753  		return true
  6754  	}
  6755  	// match: (MOVWstore [off] {sym} ptr (MOVWUreg x) mem)
  6756  	// result: (MOVWstore [off] {sym} ptr x mem)
  6757  	for {
  6758  		off := auxIntToInt32(v.AuxInt)
  6759  		sym := auxToSym(v.Aux)
  6760  		ptr := v_0
  6761  		if v_1.Op != OpRISCV64MOVWUreg {
  6762  			break
  6763  		}
  6764  		x := v_1.Args[0]
  6765  		mem := v_2
  6766  		v.reset(OpRISCV64MOVWstore)
  6767  		v.AuxInt = int32ToAuxInt(off)
  6768  		v.Aux = symToAux(sym)
  6769  		v.AddArg3(ptr, x, mem)
  6770  		return true
  6771  	}
  6772  	return false
  6773  }
  6774  func rewriteValueRISCV64_OpRISCV64MOVWstorezero(v *Value) bool {
  6775  	v_1 := v.Args[1]
  6776  	v_0 := v.Args[0]
  6777  	b := v.Block
  6778  	config := b.Func.Config
  6779  	// match: (MOVWstorezero [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  6780  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)
  6781  	// result: (MOVWstorezero [off1+off2] {mergeSym(sym1,sym2)} base mem)
  6782  	for {
  6783  		off1 := auxIntToInt32(v.AuxInt)
  6784  		sym1 := auxToSym(v.Aux)
  6785  		if v_0.Op != OpRISCV64MOVaddr {
  6786  			break
  6787  		}
  6788  		off2 := auxIntToInt32(v_0.AuxInt)
  6789  		sym2 := auxToSym(v_0.Aux)
  6790  		base := v_0.Args[0]
  6791  		mem := v_1
  6792  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (base.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  6793  			break
  6794  		}
  6795  		v.reset(OpRISCV64MOVWstorezero)
  6796  		v.AuxInt = int32ToAuxInt(off1 + off2)
  6797  		v.Aux = symToAux(mergeSym(sym1, sym2))
  6798  		v.AddArg2(base, mem)
  6799  		return true
  6800  	}
  6801  	// match: (MOVWstorezero [off1] {sym} (ADDI [off2] base) mem)
  6802  	// cond: is32Bit(int64(off1)+off2)
  6803  	// result: (MOVWstorezero [off1+int32(off2)] {sym} base mem)
  6804  	for {
  6805  		off1 := auxIntToInt32(v.AuxInt)
  6806  		sym := auxToSym(v.Aux)
  6807  		if v_0.Op != OpRISCV64ADDI {
  6808  			break
  6809  		}
  6810  		off2 := auxIntToInt64(v_0.AuxInt)
  6811  		base := v_0.Args[0]
  6812  		mem := v_1
  6813  		if !(is32Bit(int64(off1) + off2)) {
  6814  			break
  6815  		}
  6816  		v.reset(OpRISCV64MOVWstorezero)
  6817  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  6818  		v.Aux = symToAux(sym)
  6819  		v.AddArg2(base, mem)
  6820  		return true
  6821  	}
  6822  	return false
  6823  }
  6824  func rewriteValueRISCV64_OpRISCV64NEG(v *Value) bool {
  6825  	v_0 := v.Args[0]
  6826  	b := v.Block
  6827  	// match: (NEG (SUB x y))
  6828  	// result: (SUB y x)
  6829  	for {
  6830  		if v_0.Op != OpRISCV64SUB {
  6831  			break
  6832  		}
  6833  		y := v_0.Args[1]
  6834  		x := v_0.Args[0]
  6835  		v.reset(OpRISCV64SUB)
  6836  		v.AddArg2(y, x)
  6837  		return true
  6838  	}
  6839  	// match: (NEG <t> s:(ADDI [val] (SUB x y)))
  6840  	// cond: s.Uses == 1 && is32Bit(-val)
  6841  	// result: (ADDI [-val] (SUB <t> y x))
  6842  	for {
  6843  		t := v.Type
  6844  		s := v_0
  6845  		if s.Op != OpRISCV64ADDI {
  6846  			break
  6847  		}
  6848  		val := auxIntToInt64(s.AuxInt)
  6849  		s_0 := s.Args[0]
  6850  		if s_0.Op != OpRISCV64SUB {
  6851  			break
  6852  		}
  6853  		y := s_0.Args[1]
  6854  		x := s_0.Args[0]
  6855  		if !(s.Uses == 1 && is32Bit(-val)) {
  6856  			break
  6857  		}
  6858  		v.reset(OpRISCV64ADDI)
  6859  		v.AuxInt = int64ToAuxInt(-val)
  6860  		v0 := b.NewValue0(v.Pos, OpRISCV64SUB, t)
  6861  		v0.AddArg2(y, x)
  6862  		v.AddArg(v0)
  6863  		return true
  6864  	}
  6865  	// match: (NEG (NEG x))
  6866  	// result: x
  6867  	for {
  6868  		if v_0.Op != OpRISCV64NEG {
  6869  			break
  6870  		}
  6871  		x := v_0.Args[0]
  6872  		v.copyOf(x)
  6873  		return true
  6874  	}
  6875  	// match: (NEG <t> s:(ADDI [val] (NEG x)))
  6876  	// cond: s.Uses == 1 && is32Bit(-val)
  6877  	// result: (ADDI [-val] x)
  6878  	for {
  6879  		s := v_0
  6880  		if s.Op != OpRISCV64ADDI {
  6881  			break
  6882  		}
  6883  		val := auxIntToInt64(s.AuxInt)
  6884  		s_0 := s.Args[0]
  6885  		if s_0.Op != OpRISCV64NEG {
  6886  			break
  6887  		}
  6888  		x := s_0.Args[0]
  6889  		if !(s.Uses == 1 && is32Bit(-val)) {
  6890  			break
  6891  		}
  6892  		v.reset(OpRISCV64ADDI)
  6893  		v.AuxInt = int64ToAuxInt(-val)
  6894  		v.AddArg(x)
  6895  		return true
  6896  	}
  6897  	// match: (NEG (MOVDconst [x]))
  6898  	// result: (MOVDconst [-x])
  6899  	for {
  6900  		if v_0.Op != OpRISCV64MOVDconst {
  6901  			break
  6902  		}
  6903  		x := auxIntToInt64(v_0.AuxInt)
  6904  		v.reset(OpRISCV64MOVDconst)
  6905  		v.AuxInt = int64ToAuxInt(-x)
  6906  		return true
  6907  	}
  6908  	return false
  6909  }
  6910  func rewriteValueRISCV64_OpRISCV64NEGW(v *Value) bool {
  6911  	v_0 := v.Args[0]
  6912  	// match: (NEGW (MOVDconst [x]))
  6913  	// result: (MOVDconst [int64(int32(-x))])
  6914  	for {
  6915  		if v_0.Op != OpRISCV64MOVDconst {
  6916  			break
  6917  		}
  6918  		x := auxIntToInt64(v_0.AuxInt)
  6919  		v.reset(OpRISCV64MOVDconst)
  6920  		v.AuxInt = int64ToAuxInt(int64(int32(-x)))
  6921  		return true
  6922  	}
  6923  	return false
  6924  }
  6925  func rewriteValueRISCV64_OpRISCV64OR(v *Value) bool {
  6926  	v_1 := v.Args[1]
  6927  	v_0 := v.Args[0]
  6928  	// match: (OR (MOVDconst [val]) x)
  6929  	// cond: is32Bit(val)
  6930  	// result: (ORI [val] x)
  6931  	for {
  6932  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  6933  			if v_0.Op != OpRISCV64MOVDconst {
  6934  				continue
  6935  			}
  6936  			val := auxIntToInt64(v_0.AuxInt)
  6937  			x := v_1
  6938  			if !(is32Bit(val)) {
  6939  				continue
  6940  			}
  6941  			v.reset(OpRISCV64ORI)
  6942  			v.AuxInt = int64ToAuxInt(val)
  6943  			v.AddArg(x)
  6944  			return true
  6945  		}
  6946  		break
  6947  	}
  6948  	// match: (OR x x)
  6949  	// result: x
  6950  	for {
  6951  		x := v_0
  6952  		if x != v_1 {
  6953  			break
  6954  		}
  6955  		v.copyOf(x)
  6956  		return true
  6957  	}
  6958  	return false
  6959  }
  6960  func rewriteValueRISCV64_OpRISCV64ORI(v *Value) bool {
  6961  	v_0 := v.Args[0]
  6962  	// match: (ORI [0] x)
  6963  	// result: x
  6964  	for {
  6965  		if auxIntToInt64(v.AuxInt) != 0 {
  6966  			break
  6967  		}
  6968  		x := v_0
  6969  		v.copyOf(x)
  6970  		return true
  6971  	}
  6972  	// match: (ORI [-1] x)
  6973  	// result: (MOVDconst [-1])
  6974  	for {
  6975  		if auxIntToInt64(v.AuxInt) != -1 {
  6976  			break
  6977  		}
  6978  		v.reset(OpRISCV64MOVDconst)
  6979  		v.AuxInt = int64ToAuxInt(-1)
  6980  		return true
  6981  	}
  6982  	// match: (ORI [x] (MOVDconst [y]))
  6983  	// result: (MOVDconst [x | y])
  6984  	for {
  6985  		x := auxIntToInt64(v.AuxInt)
  6986  		if v_0.Op != OpRISCV64MOVDconst {
  6987  			break
  6988  		}
  6989  		y := auxIntToInt64(v_0.AuxInt)
  6990  		v.reset(OpRISCV64MOVDconst)
  6991  		v.AuxInt = int64ToAuxInt(x | y)
  6992  		return true
  6993  	}
  6994  	// match: (ORI [x] (ORI [y] z))
  6995  	// result: (ORI [x | y] z)
  6996  	for {
  6997  		x := auxIntToInt64(v.AuxInt)
  6998  		if v_0.Op != OpRISCV64ORI {
  6999  			break
  7000  		}
  7001  		y := auxIntToInt64(v_0.AuxInt)
  7002  		z := v_0.Args[0]
  7003  		v.reset(OpRISCV64ORI)
  7004  		v.AuxInt = int64ToAuxInt(x | y)
  7005  		v.AddArg(z)
  7006  		return true
  7007  	}
  7008  	return false
  7009  }
  7010  func rewriteValueRISCV64_OpRISCV64ORN(v *Value) bool {
  7011  	v_1 := v.Args[1]
  7012  	v_0 := v.Args[0]
  7013  	// match: (ORN x x)
  7014  	// result: (MOVDconst [-1])
  7015  	for {
  7016  		x := v_0
  7017  		if x != v_1 {
  7018  			break
  7019  		}
  7020  		v.reset(OpRISCV64MOVDconst)
  7021  		v.AuxInt = int64ToAuxInt(-1)
  7022  		return true
  7023  	}
  7024  	return false
  7025  }
  7026  func rewriteValueRISCV64_OpRISCV64ROL(v *Value) bool {
  7027  	v_1 := v.Args[1]
  7028  	v_0 := v.Args[0]
  7029  	// match: (ROL x (MOVDconst [val]))
  7030  	// result: (RORI [-val&63] x)
  7031  	for {
  7032  		x := v_0
  7033  		if v_1.Op != OpRISCV64MOVDconst {
  7034  			break
  7035  		}
  7036  		val := auxIntToInt64(v_1.AuxInt)
  7037  		v.reset(OpRISCV64RORI)
  7038  		v.AuxInt = int64ToAuxInt(-val & 63)
  7039  		v.AddArg(x)
  7040  		return true
  7041  	}
  7042  	// match: (ROL x (NEG y))
  7043  	// result: (ROR x y)
  7044  	for {
  7045  		x := v_0
  7046  		if v_1.Op != OpRISCV64NEG {
  7047  			break
  7048  		}
  7049  		y := v_1.Args[0]
  7050  		v.reset(OpRISCV64ROR)
  7051  		v.AddArg2(x, y)
  7052  		return true
  7053  	}
  7054  	return false
  7055  }
  7056  func rewriteValueRISCV64_OpRISCV64ROLW(v *Value) bool {
  7057  	v_1 := v.Args[1]
  7058  	v_0 := v.Args[0]
  7059  	// match: (ROLW x (MOVDconst [val]))
  7060  	// result: (RORIW [-val&31] x)
  7061  	for {
  7062  		x := v_0
  7063  		if v_1.Op != OpRISCV64MOVDconst {
  7064  			break
  7065  		}
  7066  		val := auxIntToInt64(v_1.AuxInt)
  7067  		v.reset(OpRISCV64RORIW)
  7068  		v.AuxInt = int64ToAuxInt(-val & 31)
  7069  		v.AddArg(x)
  7070  		return true
  7071  	}
  7072  	// match: (ROLW x (NEG y))
  7073  	// result: (RORW x y)
  7074  	for {
  7075  		x := v_0
  7076  		if v_1.Op != OpRISCV64NEG {
  7077  			break
  7078  		}
  7079  		y := v_1.Args[0]
  7080  		v.reset(OpRISCV64RORW)
  7081  		v.AddArg2(x, y)
  7082  		return true
  7083  	}
  7084  	return false
  7085  }
  7086  func rewriteValueRISCV64_OpRISCV64ROR(v *Value) bool {
  7087  	v_1 := v.Args[1]
  7088  	v_0 := v.Args[0]
  7089  	// match: (ROR x (MOVDconst [val]))
  7090  	// result: (RORI [val&63] x)
  7091  	for {
  7092  		x := v_0
  7093  		if v_1.Op != OpRISCV64MOVDconst {
  7094  			break
  7095  		}
  7096  		val := auxIntToInt64(v_1.AuxInt)
  7097  		v.reset(OpRISCV64RORI)
  7098  		v.AuxInt = int64ToAuxInt(val & 63)
  7099  		v.AddArg(x)
  7100  		return true
  7101  	}
  7102  	return false
  7103  }
  7104  func rewriteValueRISCV64_OpRISCV64RORW(v *Value) bool {
  7105  	v_1 := v.Args[1]
  7106  	v_0 := v.Args[0]
  7107  	// match: (RORW x (MOVDconst [val]))
  7108  	// result: (RORIW [val&31] x)
  7109  	for {
  7110  		x := v_0
  7111  		if v_1.Op != OpRISCV64MOVDconst {
  7112  			break
  7113  		}
  7114  		val := auxIntToInt64(v_1.AuxInt)
  7115  		v.reset(OpRISCV64RORIW)
  7116  		v.AuxInt = int64ToAuxInt(val & 31)
  7117  		v.AddArg(x)
  7118  		return true
  7119  	}
  7120  	return false
  7121  }
  7122  func rewriteValueRISCV64_OpRISCV64SEQZ(v *Value) bool {
  7123  	v_0 := v.Args[0]
  7124  	b := v.Block
  7125  	typ := &b.Func.Config.Types
  7126  	// match: (SEQZ (NEG x))
  7127  	// result: (SEQZ x)
  7128  	for {
  7129  		if v_0.Op != OpRISCV64NEG {
  7130  			break
  7131  		}
  7132  		x := v_0.Args[0]
  7133  		v.reset(OpRISCV64SEQZ)
  7134  		v.AddArg(x)
  7135  		return true
  7136  	}
  7137  	// match: (SEQZ (SEQZ x))
  7138  	// result: (SNEZ x)
  7139  	for {
  7140  		if v_0.Op != OpRISCV64SEQZ {
  7141  			break
  7142  		}
  7143  		x := v_0.Args[0]
  7144  		v.reset(OpRISCV64SNEZ)
  7145  		v.AddArg(x)
  7146  		return true
  7147  	}
  7148  	// match: (SEQZ (SNEZ x))
  7149  	// result: (SEQZ x)
  7150  	for {
  7151  		if v_0.Op != OpRISCV64SNEZ {
  7152  			break
  7153  		}
  7154  		x := v_0.Args[0]
  7155  		v.reset(OpRISCV64SEQZ)
  7156  		v.AddArg(x)
  7157  		return true
  7158  	}
  7159  	// match: (SEQZ (ANDI [c] (FCLASSD (FNEGD x))))
  7160  	// result: (SEQZ (ANDI <typ.Int64> [(c&0b11_0000_0000)|int64(bits.Reverse8(uint8(c))&0b1111_1111)] (FCLASSD x)))
  7161  	for {
  7162  		if v_0.Op != OpRISCV64ANDI {
  7163  			break
  7164  		}
  7165  		c := auxIntToInt64(v_0.AuxInt)
  7166  		v_0_0 := v_0.Args[0]
  7167  		if v_0_0.Op != OpRISCV64FCLASSD {
  7168  			break
  7169  		}
  7170  		v_0_0_0 := v_0_0.Args[0]
  7171  		if v_0_0_0.Op != OpRISCV64FNEGD {
  7172  			break
  7173  		}
  7174  		x := v_0_0_0.Args[0]
  7175  		v.reset(OpRISCV64SEQZ)
  7176  		v0 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.Int64)
  7177  		v0.AuxInt = int64ToAuxInt((c & 0b11_0000_0000) | int64(bits.Reverse8(uint8(c))&0b1111_1111))
  7178  		v1 := b.NewValue0(v.Pos, OpRISCV64FCLASSD, typ.Int64)
  7179  		v1.AddArg(x)
  7180  		v0.AddArg(v1)
  7181  		v.AddArg(v0)
  7182  		return true
  7183  	}
  7184  	// match: (SEQZ (ANDI [c] (FCLASSD (FABSD x))))
  7185  	// result: (SEQZ (ANDI <typ.Int64> [(c&0b11_1111_0000)|int64(bits.Reverse8(uint8(c))&0b0000_1111)] (FCLASSD x)))
  7186  	for {
  7187  		if v_0.Op != OpRISCV64ANDI {
  7188  			break
  7189  		}
  7190  		c := auxIntToInt64(v_0.AuxInt)
  7191  		v_0_0 := v_0.Args[0]
  7192  		if v_0_0.Op != OpRISCV64FCLASSD {
  7193  			break
  7194  		}
  7195  		v_0_0_0 := v_0_0.Args[0]
  7196  		if v_0_0_0.Op != OpRISCV64FABSD {
  7197  			break
  7198  		}
  7199  		x := v_0_0_0.Args[0]
  7200  		v.reset(OpRISCV64SEQZ)
  7201  		v0 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.Int64)
  7202  		v0.AuxInt = int64ToAuxInt((c & 0b11_1111_0000) | int64(bits.Reverse8(uint8(c))&0b0000_1111))
  7203  		v1 := b.NewValue0(v.Pos, OpRISCV64FCLASSD, typ.Int64)
  7204  		v1.AddArg(x)
  7205  		v0.AddArg(v1)
  7206  		v.AddArg(v0)
  7207  		return true
  7208  	}
  7209  	return false
  7210  }
  7211  func rewriteValueRISCV64_OpRISCV64SLL(v *Value) bool {
  7212  	v_1 := v.Args[1]
  7213  	v_0 := v.Args[0]
  7214  	// match: (SLL x (MOVDconst [val]))
  7215  	// result: (SLLI [val&63] x)
  7216  	for {
  7217  		x := v_0
  7218  		if v_1.Op != OpRISCV64MOVDconst {
  7219  			break
  7220  		}
  7221  		val := auxIntToInt64(v_1.AuxInt)
  7222  		v.reset(OpRISCV64SLLI)
  7223  		v.AuxInt = int64ToAuxInt(val & 63)
  7224  		v.AddArg(x)
  7225  		return true
  7226  	}
  7227  	return false
  7228  }
  7229  func rewriteValueRISCV64_OpRISCV64SLLI(v *Value) bool {
  7230  	v_0 := v.Args[0]
  7231  	// match: (SLLI [x] (MOVDconst [y]))
  7232  	// cond: is32Bit(y << uint32(x))
  7233  	// result: (MOVDconst [y << uint32(x)])
  7234  	for {
  7235  		x := auxIntToInt64(v.AuxInt)
  7236  		if v_0.Op != OpRISCV64MOVDconst {
  7237  			break
  7238  		}
  7239  		y := auxIntToInt64(v_0.AuxInt)
  7240  		if !(is32Bit(y << uint32(x))) {
  7241  			break
  7242  		}
  7243  		v.reset(OpRISCV64MOVDconst)
  7244  		v.AuxInt = int64ToAuxInt(y << uint32(x))
  7245  		return true
  7246  	}
  7247  	// match: (SLLI <t> [c] (ADD x x))
  7248  	// cond: c < t.Size() * 8 - 1
  7249  	// result: (SLLI [c+1] x)
  7250  	for {
  7251  		t := v.Type
  7252  		c := auxIntToInt64(v.AuxInt)
  7253  		if v_0.Op != OpRISCV64ADD {
  7254  			break
  7255  		}
  7256  		x := v_0.Args[1]
  7257  		if x != v_0.Args[0] || !(c < t.Size()*8-1) {
  7258  			break
  7259  		}
  7260  		v.reset(OpRISCV64SLLI)
  7261  		v.AuxInt = int64ToAuxInt(c + 1)
  7262  		v.AddArg(x)
  7263  		return true
  7264  	}
  7265  	// match: (SLLI <t> [c] (ADD x x))
  7266  	// cond: c >= t.Size() * 8 - 1
  7267  	// result: (MOVDconst [0])
  7268  	for {
  7269  		t := v.Type
  7270  		c := auxIntToInt64(v.AuxInt)
  7271  		if v_0.Op != OpRISCV64ADD {
  7272  			break
  7273  		}
  7274  		x := v_0.Args[1]
  7275  		if x != v_0.Args[0] || !(c >= t.Size()*8-1) {
  7276  			break
  7277  		}
  7278  		v.reset(OpRISCV64MOVDconst)
  7279  		v.AuxInt = int64ToAuxInt(0)
  7280  		return true
  7281  	}
  7282  	return false
  7283  }
  7284  func rewriteValueRISCV64_OpRISCV64SLLW(v *Value) bool {
  7285  	v_1 := v.Args[1]
  7286  	v_0 := v.Args[0]
  7287  	// match: (SLLW x (MOVDconst [val]))
  7288  	// result: (SLLIW [val&31] x)
  7289  	for {
  7290  		x := v_0
  7291  		if v_1.Op != OpRISCV64MOVDconst {
  7292  			break
  7293  		}
  7294  		val := auxIntToInt64(v_1.AuxInt)
  7295  		v.reset(OpRISCV64SLLIW)
  7296  		v.AuxInt = int64ToAuxInt(val & 31)
  7297  		v.AddArg(x)
  7298  		return true
  7299  	}
  7300  	return false
  7301  }
  7302  func rewriteValueRISCV64_OpRISCV64SLT(v *Value) bool {
  7303  	v_1 := v.Args[1]
  7304  	v_0 := v.Args[0]
  7305  	// match: (SLT x (MOVDconst [val]))
  7306  	// cond: is12Bit(val)
  7307  	// result: (SLTI [val] x)
  7308  	for {
  7309  		x := v_0
  7310  		if v_1.Op != OpRISCV64MOVDconst {
  7311  			break
  7312  		}
  7313  		val := auxIntToInt64(v_1.AuxInt)
  7314  		if !(is12Bit(val)) {
  7315  			break
  7316  		}
  7317  		v.reset(OpRISCV64SLTI)
  7318  		v.AuxInt = int64ToAuxInt(val)
  7319  		v.AddArg(x)
  7320  		return true
  7321  	}
  7322  	// match: (SLT x x)
  7323  	// result: (MOVDconst [0])
  7324  	for {
  7325  		x := v_0
  7326  		if x != v_1 {
  7327  			break
  7328  		}
  7329  		v.reset(OpRISCV64MOVDconst)
  7330  		v.AuxInt = int64ToAuxInt(0)
  7331  		return true
  7332  	}
  7333  	return false
  7334  }
  7335  func rewriteValueRISCV64_OpRISCV64SLTI(v *Value) bool {
  7336  	v_0 := v.Args[0]
  7337  	// match: (SLTI [x] (MOVDconst [y]))
  7338  	// result: (MOVDconst [b2i(int64(y) < int64(x))])
  7339  	for {
  7340  		x := auxIntToInt64(v.AuxInt)
  7341  		if v_0.Op != OpRISCV64MOVDconst {
  7342  			break
  7343  		}
  7344  		y := auxIntToInt64(v_0.AuxInt)
  7345  		v.reset(OpRISCV64MOVDconst)
  7346  		v.AuxInt = int64ToAuxInt(b2i(int64(y) < int64(x)))
  7347  		return true
  7348  	}
  7349  	// match: (SLTI [x] (ANDI [y] _))
  7350  	// cond: y >= 0 && int64(y) < int64(x)
  7351  	// result: (MOVDconst [1])
  7352  	for {
  7353  		x := auxIntToInt64(v.AuxInt)
  7354  		if v_0.Op != OpRISCV64ANDI {
  7355  			break
  7356  		}
  7357  		y := auxIntToInt64(v_0.AuxInt)
  7358  		if !(y >= 0 && int64(y) < int64(x)) {
  7359  			break
  7360  		}
  7361  		v.reset(OpRISCV64MOVDconst)
  7362  		v.AuxInt = int64ToAuxInt(1)
  7363  		return true
  7364  	}
  7365  	return false
  7366  }
  7367  func rewriteValueRISCV64_OpRISCV64SLTIU(v *Value) bool {
  7368  	v_0 := v.Args[0]
  7369  	// match: (SLTIU [x] (MOVDconst [y]))
  7370  	// result: (MOVDconst [b2i(uint64(y) < uint64(x))])
  7371  	for {
  7372  		x := auxIntToInt64(v.AuxInt)
  7373  		if v_0.Op != OpRISCV64MOVDconst {
  7374  			break
  7375  		}
  7376  		y := auxIntToInt64(v_0.AuxInt)
  7377  		v.reset(OpRISCV64MOVDconst)
  7378  		v.AuxInt = int64ToAuxInt(b2i(uint64(y) < uint64(x)))
  7379  		return true
  7380  	}
  7381  	// match: (SLTIU [x] (ANDI [y] _))
  7382  	// cond: y >= 0 && uint64(y) < uint64(x)
  7383  	// result: (MOVDconst [1])
  7384  	for {
  7385  		x := auxIntToInt64(v.AuxInt)
  7386  		if v_0.Op != OpRISCV64ANDI {
  7387  			break
  7388  		}
  7389  		y := auxIntToInt64(v_0.AuxInt)
  7390  		if !(y >= 0 && uint64(y) < uint64(x)) {
  7391  			break
  7392  		}
  7393  		v.reset(OpRISCV64MOVDconst)
  7394  		v.AuxInt = int64ToAuxInt(1)
  7395  		return true
  7396  	}
  7397  	// match: (SLTIU [x] (ORI [y] _))
  7398  	// cond: y >= 0 && uint64(y) >= uint64(x)
  7399  	// result: (MOVDconst [0])
  7400  	for {
  7401  		x := auxIntToInt64(v.AuxInt)
  7402  		if v_0.Op != OpRISCV64ORI {
  7403  			break
  7404  		}
  7405  		y := auxIntToInt64(v_0.AuxInt)
  7406  		if !(y >= 0 && uint64(y) >= uint64(x)) {
  7407  			break
  7408  		}
  7409  		v.reset(OpRISCV64MOVDconst)
  7410  		v.AuxInt = int64ToAuxInt(0)
  7411  		return true
  7412  	}
  7413  	return false
  7414  }
  7415  func rewriteValueRISCV64_OpRISCV64SLTU(v *Value) bool {
  7416  	v_1 := v.Args[1]
  7417  	v_0 := v.Args[0]
  7418  	// match: (SLTU x (MOVDconst [val]))
  7419  	// cond: is12Bit(val)
  7420  	// result: (SLTIU [val] x)
  7421  	for {
  7422  		x := v_0
  7423  		if v_1.Op != OpRISCV64MOVDconst {
  7424  			break
  7425  		}
  7426  		val := auxIntToInt64(v_1.AuxInt)
  7427  		if !(is12Bit(val)) {
  7428  			break
  7429  		}
  7430  		v.reset(OpRISCV64SLTIU)
  7431  		v.AuxInt = int64ToAuxInt(val)
  7432  		v.AddArg(x)
  7433  		return true
  7434  	}
  7435  	// match: (SLTU x x)
  7436  	// result: (MOVDconst [0])
  7437  	for {
  7438  		x := v_0
  7439  		if x != v_1 {
  7440  			break
  7441  		}
  7442  		v.reset(OpRISCV64MOVDconst)
  7443  		v.AuxInt = int64ToAuxInt(0)
  7444  		return true
  7445  	}
  7446  	return false
  7447  }
  7448  func rewriteValueRISCV64_OpRISCV64SNEZ(v *Value) bool {
  7449  	v_0 := v.Args[0]
  7450  	b := v.Block
  7451  	typ := &b.Func.Config.Types
  7452  	// match: (SNEZ (NEG x))
  7453  	// result: (SNEZ x)
  7454  	for {
  7455  		if v_0.Op != OpRISCV64NEG {
  7456  			break
  7457  		}
  7458  		x := v_0.Args[0]
  7459  		v.reset(OpRISCV64SNEZ)
  7460  		v.AddArg(x)
  7461  		return true
  7462  	}
  7463  	// match: (SNEZ (SEQZ x))
  7464  	// result: (SEQZ x)
  7465  	for {
  7466  		if v_0.Op != OpRISCV64SEQZ {
  7467  			break
  7468  		}
  7469  		x := v_0.Args[0]
  7470  		v.reset(OpRISCV64SEQZ)
  7471  		v.AddArg(x)
  7472  		return true
  7473  	}
  7474  	// match: (SNEZ (SNEZ x))
  7475  	// result: (SNEZ x)
  7476  	for {
  7477  		if v_0.Op != OpRISCV64SNEZ {
  7478  			break
  7479  		}
  7480  		x := v_0.Args[0]
  7481  		v.reset(OpRISCV64SNEZ)
  7482  		v.AddArg(x)
  7483  		return true
  7484  	}
  7485  	// match: (SNEZ (ANDI [c] (FCLASSD (FNEGD x))))
  7486  	// result: (SNEZ (ANDI <typ.Int64> [(c&0b11_0000_0000)|int64(bits.Reverse8(uint8(c))&0b1111_1111)] (FCLASSD x)))
  7487  	for {
  7488  		if v_0.Op != OpRISCV64ANDI {
  7489  			break
  7490  		}
  7491  		c := auxIntToInt64(v_0.AuxInt)
  7492  		v_0_0 := v_0.Args[0]
  7493  		if v_0_0.Op != OpRISCV64FCLASSD {
  7494  			break
  7495  		}
  7496  		v_0_0_0 := v_0_0.Args[0]
  7497  		if v_0_0_0.Op != OpRISCV64FNEGD {
  7498  			break
  7499  		}
  7500  		x := v_0_0_0.Args[0]
  7501  		v.reset(OpRISCV64SNEZ)
  7502  		v0 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.Int64)
  7503  		v0.AuxInt = int64ToAuxInt((c & 0b11_0000_0000) | int64(bits.Reverse8(uint8(c))&0b1111_1111))
  7504  		v1 := b.NewValue0(v.Pos, OpRISCV64FCLASSD, typ.Int64)
  7505  		v1.AddArg(x)
  7506  		v0.AddArg(v1)
  7507  		v.AddArg(v0)
  7508  		return true
  7509  	}
  7510  	// match: (SNEZ (ANDI [c] (FCLASSD (FABSD x))))
  7511  	// result: (SNEZ (ANDI <typ.Int64> [(c&0b11_1111_0000)|int64(bits.Reverse8(uint8(c))&0b0000_1111)] (FCLASSD x)))
  7512  	for {
  7513  		if v_0.Op != OpRISCV64ANDI {
  7514  			break
  7515  		}
  7516  		c := auxIntToInt64(v_0.AuxInt)
  7517  		v_0_0 := v_0.Args[0]
  7518  		if v_0_0.Op != OpRISCV64FCLASSD {
  7519  			break
  7520  		}
  7521  		v_0_0_0 := v_0_0.Args[0]
  7522  		if v_0_0_0.Op != OpRISCV64FABSD {
  7523  			break
  7524  		}
  7525  		x := v_0_0_0.Args[0]
  7526  		v.reset(OpRISCV64SNEZ)
  7527  		v0 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.Int64)
  7528  		v0.AuxInt = int64ToAuxInt((c & 0b11_1111_0000) | int64(bits.Reverse8(uint8(c))&0b0000_1111))
  7529  		v1 := b.NewValue0(v.Pos, OpRISCV64FCLASSD, typ.Int64)
  7530  		v1.AddArg(x)
  7531  		v0.AddArg(v1)
  7532  		v.AddArg(v0)
  7533  		return true
  7534  	}
  7535  	return false
  7536  }
  7537  func rewriteValueRISCV64_OpRISCV64SRA(v *Value) bool {
  7538  	v_1 := v.Args[1]
  7539  	v_0 := v.Args[0]
  7540  	// match: (SRA x (MOVDconst [val]))
  7541  	// result: (SRAI [val&63] x)
  7542  	for {
  7543  		x := v_0
  7544  		if v_1.Op != OpRISCV64MOVDconst {
  7545  			break
  7546  		}
  7547  		val := auxIntToInt64(v_1.AuxInt)
  7548  		v.reset(OpRISCV64SRAI)
  7549  		v.AuxInt = int64ToAuxInt(val & 63)
  7550  		v.AddArg(x)
  7551  		return true
  7552  	}
  7553  	return false
  7554  }
  7555  func rewriteValueRISCV64_OpRISCV64SRAI(v *Value) bool {
  7556  	v_0 := v.Args[0]
  7557  	b := v.Block
  7558  	// match: (SRAI [x] (MOVWreg y))
  7559  	// cond: x >= 0 && x <= 31
  7560  	// result: (SRAIW [x] y)
  7561  	for {
  7562  		x := auxIntToInt64(v.AuxInt)
  7563  		if v_0.Op != OpRISCV64MOVWreg {
  7564  			break
  7565  		}
  7566  		y := v_0.Args[0]
  7567  		if !(x >= 0 && x <= 31) {
  7568  			break
  7569  		}
  7570  		v.reset(OpRISCV64SRAIW)
  7571  		v.AuxInt = int64ToAuxInt(x)
  7572  		v.AddArg(y)
  7573  		return true
  7574  	}
  7575  	// match: (SRAI <t> [x] (MOVBreg y))
  7576  	// cond: x >= 8
  7577  	// result: (SRAI [63] (SLLI <t> [56] y))
  7578  	for {
  7579  		t := v.Type
  7580  		x := auxIntToInt64(v.AuxInt)
  7581  		if v_0.Op != OpRISCV64MOVBreg {
  7582  			break
  7583  		}
  7584  		y := v_0.Args[0]
  7585  		if !(x >= 8) {
  7586  			break
  7587  		}
  7588  		v.reset(OpRISCV64SRAI)
  7589  		v.AuxInt = int64ToAuxInt(63)
  7590  		v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, t)
  7591  		v0.AuxInt = int64ToAuxInt(56)
  7592  		v0.AddArg(y)
  7593  		v.AddArg(v0)
  7594  		return true
  7595  	}
  7596  	// match: (SRAI <t> [x] (MOVHreg y))
  7597  	// cond: x >= 16
  7598  	// result: (SRAI [63] (SLLI <t> [48] y))
  7599  	for {
  7600  		t := v.Type
  7601  		x := auxIntToInt64(v.AuxInt)
  7602  		if v_0.Op != OpRISCV64MOVHreg {
  7603  			break
  7604  		}
  7605  		y := v_0.Args[0]
  7606  		if !(x >= 16) {
  7607  			break
  7608  		}
  7609  		v.reset(OpRISCV64SRAI)
  7610  		v.AuxInt = int64ToAuxInt(63)
  7611  		v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, t)
  7612  		v0.AuxInt = int64ToAuxInt(48)
  7613  		v0.AddArg(y)
  7614  		v.AddArg(v0)
  7615  		return true
  7616  	}
  7617  	// match: (SRAI [x] (MOVWreg y))
  7618  	// cond: x >= 32
  7619  	// result: (SRAIW [31] y)
  7620  	for {
  7621  		x := auxIntToInt64(v.AuxInt)
  7622  		if v_0.Op != OpRISCV64MOVWreg {
  7623  			break
  7624  		}
  7625  		y := v_0.Args[0]
  7626  		if !(x >= 32) {
  7627  			break
  7628  		}
  7629  		v.reset(OpRISCV64SRAIW)
  7630  		v.AuxInt = int64ToAuxInt(31)
  7631  		v.AddArg(y)
  7632  		return true
  7633  	}
  7634  	// match: (SRAI [x] (MOVDconst [y]))
  7635  	// result: (MOVDconst [int64(y) >> uint32(x)])
  7636  	for {
  7637  		x := auxIntToInt64(v.AuxInt)
  7638  		if v_0.Op != OpRISCV64MOVDconst {
  7639  			break
  7640  		}
  7641  		y := auxIntToInt64(v_0.AuxInt)
  7642  		v.reset(OpRISCV64MOVDconst)
  7643  		v.AuxInt = int64ToAuxInt(int64(y) >> uint32(x))
  7644  		return true
  7645  	}
  7646  	return false
  7647  }
  7648  func rewriteValueRISCV64_OpRISCV64SRAW(v *Value) bool {
  7649  	v_1 := v.Args[1]
  7650  	v_0 := v.Args[0]
  7651  	// match: (SRAW x (MOVDconst [val]))
  7652  	// result: (SRAIW [val&31] x)
  7653  	for {
  7654  		x := v_0
  7655  		if v_1.Op != OpRISCV64MOVDconst {
  7656  			break
  7657  		}
  7658  		val := auxIntToInt64(v_1.AuxInt)
  7659  		v.reset(OpRISCV64SRAIW)
  7660  		v.AuxInt = int64ToAuxInt(val & 31)
  7661  		v.AddArg(x)
  7662  		return true
  7663  	}
  7664  	return false
  7665  }
  7666  func rewriteValueRISCV64_OpRISCV64SRL(v *Value) bool {
  7667  	v_1 := v.Args[1]
  7668  	v_0 := v.Args[0]
  7669  	// match: (SRL x (MOVDconst [val]))
  7670  	// result: (SRLI [val&63] x)
  7671  	for {
  7672  		x := v_0
  7673  		if v_1.Op != OpRISCV64MOVDconst {
  7674  			break
  7675  		}
  7676  		val := auxIntToInt64(v_1.AuxInt)
  7677  		v.reset(OpRISCV64SRLI)
  7678  		v.AuxInt = int64ToAuxInt(val & 63)
  7679  		v.AddArg(x)
  7680  		return true
  7681  	}
  7682  	return false
  7683  }
  7684  func rewriteValueRISCV64_OpRISCV64SRLI(v *Value) bool {
  7685  	v_0 := v.Args[0]
  7686  	// match: (SRLI [x] (MOVWUreg y))
  7687  	// cond: x >= 0 && x <= 31
  7688  	// result: (SRLIW [x] y)
  7689  	for {
  7690  		x := auxIntToInt64(v.AuxInt)
  7691  		if v_0.Op != OpRISCV64MOVWUreg {
  7692  			break
  7693  		}
  7694  		y := v_0.Args[0]
  7695  		if !(x >= 0 && x <= 31) {
  7696  			break
  7697  		}
  7698  		v.reset(OpRISCV64SRLIW)
  7699  		v.AuxInt = int64ToAuxInt(x)
  7700  		v.AddArg(y)
  7701  		return true
  7702  	}
  7703  	// match: (SRLI [x] (MOVBUreg y))
  7704  	// cond: x >= 8
  7705  	// result: (MOVDconst [0])
  7706  	for {
  7707  		x := auxIntToInt64(v.AuxInt)
  7708  		if v_0.Op != OpRISCV64MOVBUreg {
  7709  			break
  7710  		}
  7711  		if !(x >= 8) {
  7712  			break
  7713  		}
  7714  		v.reset(OpRISCV64MOVDconst)
  7715  		v.AuxInt = int64ToAuxInt(0)
  7716  		return true
  7717  	}
  7718  	// match: (SRLI [x] (MOVHUreg y))
  7719  	// cond: x >= 16
  7720  	// result: (MOVDconst [0])
  7721  	for {
  7722  		x := auxIntToInt64(v.AuxInt)
  7723  		if v_0.Op != OpRISCV64MOVHUreg {
  7724  			break
  7725  		}
  7726  		if !(x >= 16) {
  7727  			break
  7728  		}
  7729  		v.reset(OpRISCV64MOVDconst)
  7730  		v.AuxInt = int64ToAuxInt(0)
  7731  		return true
  7732  	}
  7733  	// match: (SRLI [x] (MOVWUreg y))
  7734  	// cond: x >= 32
  7735  	// result: (MOVDconst [0])
  7736  	for {
  7737  		x := auxIntToInt64(v.AuxInt)
  7738  		if v_0.Op != OpRISCV64MOVWUreg {
  7739  			break
  7740  		}
  7741  		if !(x >= 32) {
  7742  			break
  7743  		}
  7744  		v.reset(OpRISCV64MOVDconst)
  7745  		v.AuxInt = int64ToAuxInt(0)
  7746  		return true
  7747  	}
  7748  	// match: (SRLI [x] (MOVDconst [y]))
  7749  	// result: (MOVDconst [int64(uint64(y) >> uint32(x))])
  7750  	for {
  7751  		x := auxIntToInt64(v.AuxInt)
  7752  		if v_0.Op != OpRISCV64MOVDconst {
  7753  			break
  7754  		}
  7755  		y := auxIntToInt64(v_0.AuxInt)
  7756  		v.reset(OpRISCV64MOVDconst)
  7757  		v.AuxInt = int64ToAuxInt(int64(uint64(y) >> uint32(x)))
  7758  		return true
  7759  	}
  7760  	return false
  7761  }
  7762  func rewriteValueRISCV64_OpRISCV64SRLW(v *Value) bool {
  7763  	v_1 := v.Args[1]
  7764  	v_0 := v.Args[0]
  7765  	// match: (SRLW x (MOVDconst [val]))
  7766  	// result: (SRLIW [val&31] x)
  7767  	for {
  7768  		x := v_0
  7769  		if v_1.Op != OpRISCV64MOVDconst {
  7770  			break
  7771  		}
  7772  		val := auxIntToInt64(v_1.AuxInt)
  7773  		v.reset(OpRISCV64SRLIW)
  7774  		v.AuxInt = int64ToAuxInt(val & 31)
  7775  		v.AddArg(x)
  7776  		return true
  7777  	}
  7778  	return false
  7779  }
  7780  func rewriteValueRISCV64_OpRISCV64SUB(v *Value) bool {
  7781  	v_1 := v.Args[1]
  7782  	v_0 := v.Args[0]
  7783  	b := v.Block
  7784  	// match: (SUB x (NEG y))
  7785  	// result: (ADD x y)
  7786  	for {
  7787  		x := v_0
  7788  		if v_1.Op != OpRISCV64NEG {
  7789  			break
  7790  		}
  7791  		y := v_1.Args[0]
  7792  		v.reset(OpRISCV64ADD)
  7793  		v.AddArg2(x, y)
  7794  		return true
  7795  	}
  7796  	// match: (SUB x x)
  7797  	// result: (MOVDconst [0])
  7798  	for {
  7799  		x := v_0
  7800  		if x != v_1 {
  7801  			break
  7802  		}
  7803  		v.reset(OpRISCV64MOVDconst)
  7804  		v.AuxInt = int64ToAuxInt(0)
  7805  		return true
  7806  	}
  7807  	// match: (SUB x (MOVDconst [val]))
  7808  	// cond: is32Bit(-val)
  7809  	// result: (ADDI [-val] x)
  7810  	for {
  7811  		x := v_0
  7812  		if v_1.Op != OpRISCV64MOVDconst {
  7813  			break
  7814  		}
  7815  		val := auxIntToInt64(v_1.AuxInt)
  7816  		if !(is32Bit(-val)) {
  7817  			break
  7818  		}
  7819  		v.reset(OpRISCV64ADDI)
  7820  		v.AuxInt = int64ToAuxInt(-val)
  7821  		v.AddArg(x)
  7822  		return true
  7823  	}
  7824  	// match: (SUB <t> (MOVDconst [val]) y)
  7825  	// cond: is32Bit(-val)
  7826  	// result: (NEG (ADDI <t> [-val] y))
  7827  	for {
  7828  		t := v.Type
  7829  		if v_0.Op != OpRISCV64MOVDconst {
  7830  			break
  7831  		}
  7832  		val := auxIntToInt64(v_0.AuxInt)
  7833  		y := v_1
  7834  		if !(is32Bit(-val)) {
  7835  			break
  7836  		}
  7837  		v.reset(OpRISCV64NEG)
  7838  		v0 := b.NewValue0(v.Pos, OpRISCV64ADDI, t)
  7839  		v0.AuxInt = int64ToAuxInt(-val)
  7840  		v0.AddArg(y)
  7841  		v.AddArg(v0)
  7842  		return true
  7843  	}
  7844  	// match: (SUB x (MOVDconst [0]))
  7845  	// result: x
  7846  	for {
  7847  		x := v_0
  7848  		if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
  7849  			break
  7850  		}
  7851  		v.copyOf(x)
  7852  		return true
  7853  	}
  7854  	// match: (SUB (MOVDconst [0]) x)
  7855  	// result: (NEG x)
  7856  	for {
  7857  		if v_0.Op != OpRISCV64MOVDconst || auxIntToInt64(v_0.AuxInt) != 0 {
  7858  			break
  7859  		}
  7860  		x := v_1
  7861  		v.reset(OpRISCV64NEG)
  7862  		v.AddArg(x)
  7863  		return true
  7864  	}
  7865  	return false
  7866  }
  7867  func rewriteValueRISCV64_OpRISCV64SUBW(v *Value) bool {
  7868  	v_1 := v.Args[1]
  7869  	v_0 := v.Args[0]
  7870  	// match: (SUBW x (MOVDconst [0]))
  7871  	// result: (ADDIW [0] x)
  7872  	for {
  7873  		x := v_0
  7874  		if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
  7875  			break
  7876  		}
  7877  		v.reset(OpRISCV64ADDIW)
  7878  		v.AuxInt = int64ToAuxInt(0)
  7879  		v.AddArg(x)
  7880  		return true
  7881  	}
  7882  	// match: (SUBW (MOVDconst [0]) x)
  7883  	// result: (NEGW x)
  7884  	for {
  7885  		if v_0.Op != OpRISCV64MOVDconst || auxIntToInt64(v_0.AuxInt) != 0 {
  7886  			break
  7887  		}
  7888  		x := v_1
  7889  		v.reset(OpRISCV64NEGW)
  7890  		v.AddArg(x)
  7891  		return true
  7892  	}
  7893  	return false
  7894  }
  7895  func rewriteValueRISCV64_OpRISCV64XOR(v *Value) bool {
  7896  	v_1 := v.Args[1]
  7897  	v_0 := v.Args[0]
  7898  	// match: (XOR (MOVDconst [val]) x)
  7899  	// cond: is32Bit(val)
  7900  	// result: (XORI [val] x)
  7901  	for {
  7902  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7903  			if v_0.Op != OpRISCV64MOVDconst {
  7904  				continue
  7905  			}
  7906  			val := auxIntToInt64(v_0.AuxInt)
  7907  			x := v_1
  7908  			if !(is32Bit(val)) {
  7909  				continue
  7910  			}
  7911  			v.reset(OpRISCV64XORI)
  7912  			v.AuxInt = int64ToAuxInt(val)
  7913  			v.AddArg(x)
  7914  			return true
  7915  		}
  7916  		break
  7917  	}
  7918  	// match: (XOR x x)
  7919  	// result: (MOVDconst [0])
  7920  	for {
  7921  		x := v_0
  7922  		if x != v_1 {
  7923  			break
  7924  		}
  7925  		v.reset(OpRISCV64MOVDconst)
  7926  		v.AuxInt = int64ToAuxInt(0)
  7927  		return true
  7928  	}
  7929  	return false
  7930  }
  7931  func rewriteValueRISCV64_OpRotateLeft16(v *Value) bool {
  7932  	v_1 := v.Args[1]
  7933  	v_0 := v.Args[0]
  7934  	b := v.Block
  7935  	typ := &b.Func.Config.Types
  7936  	// match: (RotateLeft16 <t> x y)
  7937  	// result: (OR (SLL <t> x (ANDI [15] <y.Type> y)) (SRL <t> (ZeroExt16to64 x) (ANDI [15] <y.Type> (NEG <y.Type> y))))
  7938  	for {
  7939  		t := v.Type
  7940  		x := v_0
  7941  		y := v_1
  7942  		v.reset(OpRISCV64OR)
  7943  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  7944  		v1 := b.NewValue0(v.Pos, OpRISCV64ANDI, y.Type)
  7945  		v1.AuxInt = int64ToAuxInt(15)
  7946  		v1.AddArg(y)
  7947  		v0.AddArg2(x, v1)
  7948  		v2 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  7949  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7950  		v3.AddArg(x)
  7951  		v4 := b.NewValue0(v.Pos, OpRISCV64ANDI, y.Type)
  7952  		v4.AuxInt = int64ToAuxInt(15)
  7953  		v5 := b.NewValue0(v.Pos, OpRISCV64NEG, y.Type)
  7954  		v5.AddArg(y)
  7955  		v4.AddArg(v5)
  7956  		v2.AddArg2(v3, v4)
  7957  		v.AddArg2(v0, v2)
  7958  		return true
  7959  	}
  7960  }
  7961  func rewriteValueRISCV64_OpRotateLeft8(v *Value) bool {
  7962  	v_1 := v.Args[1]
  7963  	v_0 := v.Args[0]
  7964  	b := v.Block
  7965  	typ := &b.Func.Config.Types
  7966  	// match: (RotateLeft8 <t> x y)
  7967  	// result: (OR (SLL <t> x (ANDI [7] <y.Type> y)) (SRL <t> (ZeroExt8to64 x) (ANDI [7] <y.Type> (NEG <y.Type> y))))
  7968  	for {
  7969  		t := v.Type
  7970  		x := v_0
  7971  		y := v_1
  7972  		v.reset(OpRISCV64OR)
  7973  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  7974  		v1 := b.NewValue0(v.Pos, OpRISCV64ANDI, y.Type)
  7975  		v1.AuxInt = int64ToAuxInt(7)
  7976  		v1.AddArg(y)
  7977  		v0.AddArg2(x, v1)
  7978  		v2 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  7979  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7980  		v3.AddArg(x)
  7981  		v4 := b.NewValue0(v.Pos, OpRISCV64ANDI, y.Type)
  7982  		v4.AuxInt = int64ToAuxInt(7)
  7983  		v5 := b.NewValue0(v.Pos, OpRISCV64NEG, y.Type)
  7984  		v5.AddArg(y)
  7985  		v4.AddArg(v5)
  7986  		v2.AddArg2(v3, v4)
  7987  		v.AddArg2(v0, v2)
  7988  		return true
  7989  	}
  7990  }
  7991  func rewriteValueRISCV64_OpRsh16Ux16(v *Value) bool {
  7992  	v_1 := v.Args[1]
  7993  	v_0 := v.Args[0]
  7994  	b := v.Block
  7995  	typ := &b.Func.Config.Types
  7996  	// match: (Rsh16Ux16 <t> x y)
  7997  	// cond: !shiftIsBounded(v)
  7998  	// result: (AND (SRL <t> (ZeroExt16to64 x) y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
  7999  	for {
  8000  		t := v.Type
  8001  		x := v_0
  8002  		y := v_1
  8003  		if !(!shiftIsBounded(v)) {
  8004  			break
  8005  		}
  8006  		v.reset(OpRISCV64AND)
  8007  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  8008  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8009  		v1.AddArg(x)
  8010  		v0.AddArg2(v1, y)
  8011  		v2 := b.NewValue0(v.Pos, OpNeg16, t)
  8012  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  8013  		v3.AuxInt = int64ToAuxInt(64)
  8014  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8015  		v4.AddArg(y)
  8016  		v3.AddArg(v4)
  8017  		v2.AddArg(v3)
  8018  		v.AddArg2(v0, v2)
  8019  		return true
  8020  	}
  8021  	// match: (Rsh16Ux16 x y)
  8022  	// cond: shiftIsBounded(v)
  8023  	// result: (SRL (ZeroExt16to64 x) y)
  8024  	for {
  8025  		x := v_0
  8026  		y := v_1
  8027  		if !(shiftIsBounded(v)) {
  8028  			break
  8029  		}
  8030  		v.reset(OpRISCV64SRL)
  8031  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8032  		v0.AddArg(x)
  8033  		v.AddArg2(v0, y)
  8034  		return true
  8035  	}
  8036  	return false
  8037  }
  8038  func rewriteValueRISCV64_OpRsh16Ux32(v *Value) bool {
  8039  	v_1 := v.Args[1]
  8040  	v_0 := v.Args[0]
  8041  	b := v.Block
  8042  	typ := &b.Func.Config.Types
  8043  	// match: (Rsh16Ux32 <t> x y)
  8044  	// cond: !shiftIsBounded(v)
  8045  	// result: (AND (SRL <t> (ZeroExt16to64 x) y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
  8046  	for {
  8047  		t := v.Type
  8048  		x := v_0
  8049  		y := v_1
  8050  		if !(!shiftIsBounded(v)) {
  8051  			break
  8052  		}
  8053  		v.reset(OpRISCV64AND)
  8054  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  8055  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8056  		v1.AddArg(x)
  8057  		v0.AddArg2(v1, y)
  8058  		v2 := b.NewValue0(v.Pos, OpNeg16, t)
  8059  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  8060  		v3.AuxInt = int64ToAuxInt(64)
  8061  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8062  		v4.AddArg(y)
  8063  		v3.AddArg(v4)
  8064  		v2.AddArg(v3)
  8065  		v.AddArg2(v0, v2)
  8066  		return true
  8067  	}
  8068  	// match: (Rsh16Ux32 x y)
  8069  	// cond: shiftIsBounded(v)
  8070  	// result: (SRL (ZeroExt16to64 x) y)
  8071  	for {
  8072  		x := v_0
  8073  		y := v_1
  8074  		if !(shiftIsBounded(v)) {
  8075  			break
  8076  		}
  8077  		v.reset(OpRISCV64SRL)
  8078  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8079  		v0.AddArg(x)
  8080  		v.AddArg2(v0, y)
  8081  		return true
  8082  	}
  8083  	return false
  8084  }
  8085  func rewriteValueRISCV64_OpRsh16Ux64(v *Value) bool {
  8086  	v_1 := v.Args[1]
  8087  	v_0 := v.Args[0]
  8088  	b := v.Block
  8089  	typ := &b.Func.Config.Types
  8090  	// match: (Rsh16Ux64 <t> x y)
  8091  	// cond: !shiftIsBounded(v)
  8092  	// result: (AND (SRL <t> (ZeroExt16to64 x) y) (Neg16 <t> (SLTIU <t> [64] y)))
  8093  	for {
  8094  		t := v.Type
  8095  		x := v_0
  8096  		y := v_1
  8097  		if !(!shiftIsBounded(v)) {
  8098  			break
  8099  		}
  8100  		v.reset(OpRISCV64AND)
  8101  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  8102  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8103  		v1.AddArg(x)
  8104  		v0.AddArg2(v1, y)
  8105  		v2 := b.NewValue0(v.Pos, OpNeg16, t)
  8106  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  8107  		v3.AuxInt = int64ToAuxInt(64)
  8108  		v3.AddArg(y)
  8109  		v2.AddArg(v3)
  8110  		v.AddArg2(v0, v2)
  8111  		return true
  8112  	}
  8113  	// match: (Rsh16Ux64 x y)
  8114  	// cond: shiftIsBounded(v)
  8115  	// result: (SRL (ZeroExt16to64 x) y)
  8116  	for {
  8117  		x := v_0
  8118  		y := v_1
  8119  		if !(shiftIsBounded(v)) {
  8120  			break
  8121  		}
  8122  		v.reset(OpRISCV64SRL)
  8123  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8124  		v0.AddArg(x)
  8125  		v.AddArg2(v0, y)
  8126  		return true
  8127  	}
  8128  	return false
  8129  }
  8130  func rewriteValueRISCV64_OpRsh16Ux8(v *Value) bool {
  8131  	v_1 := v.Args[1]
  8132  	v_0 := v.Args[0]
  8133  	b := v.Block
  8134  	typ := &b.Func.Config.Types
  8135  	// match: (Rsh16Ux8 <t> x y)
  8136  	// cond: !shiftIsBounded(v)
  8137  	// result: (AND (SRL <t> (ZeroExt16to64 x) y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
  8138  	for {
  8139  		t := v.Type
  8140  		x := v_0
  8141  		y := v_1
  8142  		if !(!shiftIsBounded(v)) {
  8143  			break
  8144  		}
  8145  		v.reset(OpRISCV64AND)
  8146  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  8147  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8148  		v1.AddArg(x)
  8149  		v0.AddArg2(v1, y)
  8150  		v2 := b.NewValue0(v.Pos, OpNeg16, t)
  8151  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  8152  		v3.AuxInt = int64ToAuxInt(64)
  8153  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8154  		v4.AddArg(y)
  8155  		v3.AddArg(v4)
  8156  		v2.AddArg(v3)
  8157  		v.AddArg2(v0, v2)
  8158  		return true
  8159  	}
  8160  	// match: (Rsh16Ux8 x y)
  8161  	// cond: shiftIsBounded(v)
  8162  	// result: (SRL (ZeroExt16to64 x) y)
  8163  	for {
  8164  		x := v_0
  8165  		y := v_1
  8166  		if !(shiftIsBounded(v)) {
  8167  			break
  8168  		}
  8169  		v.reset(OpRISCV64SRL)
  8170  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8171  		v0.AddArg(x)
  8172  		v.AddArg2(v0, y)
  8173  		return true
  8174  	}
  8175  	return false
  8176  }
  8177  func rewriteValueRISCV64_OpRsh16x16(v *Value) bool {
  8178  	v_1 := v.Args[1]
  8179  	v_0 := v.Args[0]
  8180  	b := v.Block
  8181  	typ := &b.Func.Config.Types
  8182  	// match: (Rsh16x16 <t> x y)
  8183  	// cond: !shiftIsBounded(v)
  8184  	// result: (SRA <t> (SignExt16to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt16to64 y)))))
  8185  	for {
  8186  		t := v.Type
  8187  		x := v_0
  8188  		y := v_1
  8189  		if !(!shiftIsBounded(v)) {
  8190  			break
  8191  		}
  8192  		v.reset(OpRISCV64SRA)
  8193  		v.Type = t
  8194  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  8195  		v0.AddArg(x)
  8196  		v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  8197  		v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  8198  		v2.AuxInt = int64ToAuxInt(-1)
  8199  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  8200  		v3.AuxInt = int64ToAuxInt(64)
  8201  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8202  		v4.AddArg(y)
  8203  		v3.AddArg(v4)
  8204  		v2.AddArg(v3)
  8205  		v1.AddArg2(y, v2)
  8206  		v.AddArg2(v0, v1)
  8207  		return true
  8208  	}
  8209  	// match: (Rsh16x16 x y)
  8210  	// cond: shiftIsBounded(v)
  8211  	// result: (SRA (SignExt16to64 x) y)
  8212  	for {
  8213  		x := v_0
  8214  		y := v_1
  8215  		if !(shiftIsBounded(v)) {
  8216  			break
  8217  		}
  8218  		v.reset(OpRISCV64SRA)
  8219  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  8220  		v0.AddArg(x)
  8221  		v.AddArg2(v0, y)
  8222  		return true
  8223  	}
  8224  	return false
  8225  }
  8226  func rewriteValueRISCV64_OpRsh16x32(v *Value) bool {
  8227  	v_1 := v.Args[1]
  8228  	v_0 := v.Args[0]
  8229  	b := v.Block
  8230  	typ := &b.Func.Config.Types
  8231  	// match: (Rsh16x32 <t> x y)
  8232  	// cond: !shiftIsBounded(v)
  8233  	// result: (SRA <t> (SignExt16to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt32to64 y)))))
  8234  	for {
  8235  		t := v.Type
  8236  		x := v_0
  8237  		y := v_1
  8238  		if !(!shiftIsBounded(v)) {
  8239  			break
  8240  		}
  8241  		v.reset(OpRISCV64SRA)
  8242  		v.Type = t
  8243  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  8244  		v0.AddArg(x)
  8245  		v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  8246  		v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  8247  		v2.AuxInt = int64ToAuxInt(-1)
  8248  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  8249  		v3.AuxInt = int64ToAuxInt(64)
  8250  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8251  		v4.AddArg(y)
  8252  		v3.AddArg(v4)
  8253  		v2.AddArg(v3)
  8254  		v1.AddArg2(y, v2)
  8255  		v.AddArg2(v0, v1)
  8256  		return true
  8257  	}
  8258  	// match: (Rsh16x32 x y)
  8259  	// cond: shiftIsBounded(v)
  8260  	// result: (SRA (SignExt16to64 x) y)
  8261  	for {
  8262  		x := v_0
  8263  		y := v_1
  8264  		if !(shiftIsBounded(v)) {
  8265  			break
  8266  		}
  8267  		v.reset(OpRISCV64SRA)
  8268  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  8269  		v0.AddArg(x)
  8270  		v.AddArg2(v0, y)
  8271  		return true
  8272  	}
  8273  	return false
  8274  }
  8275  func rewriteValueRISCV64_OpRsh16x64(v *Value) bool {
  8276  	v_1 := v.Args[1]
  8277  	v_0 := v.Args[0]
  8278  	b := v.Block
  8279  	typ := &b.Func.Config.Types
  8280  	// match: (Rsh16x64 <t> x y)
  8281  	// cond: !shiftIsBounded(v)
  8282  	// result: (SRA <t> (SignExt16to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] y))))
  8283  	for {
  8284  		t := v.Type
  8285  		x := v_0
  8286  		y := v_1
  8287  		if !(!shiftIsBounded(v)) {
  8288  			break
  8289  		}
  8290  		v.reset(OpRISCV64SRA)
  8291  		v.Type = t
  8292  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  8293  		v0.AddArg(x)
  8294  		v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  8295  		v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  8296  		v2.AuxInt = int64ToAuxInt(-1)
  8297  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  8298  		v3.AuxInt = int64ToAuxInt(64)
  8299  		v3.AddArg(y)
  8300  		v2.AddArg(v3)
  8301  		v1.AddArg2(y, v2)
  8302  		v.AddArg2(v0, v1)
  8303  		return true
  8304  	}
  8305  	// match: (Rsh16x64 x y)
  8306  	// cond: shiftIsBounded(v)
  8307  	// result: (SRA (SignExt16to64 x) y)
  8308  	for {
  8309  		x := v_0
  8310  		y := v_1
  8311  		if !(shiftIsBounded(v)) {
  8312  			break
  8313  		}
  8314  		v.reset(OpRISCV64SRA)
  8315  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  8316  		v0.AddArg(x)
  8317  		v.AddArg2(v0, y)
  8318  		return true
  8319  	}
  8320  	return false
  8321  }
  8322  func rewriteValueRISCV64_OpRsh16x8(v *Value) bool {
  8323  	v_1 := v.Args[1]
  8324  	v_0 := v.Args[0]
  8325  	b := v.Block
  8326  	typ := &b.Func.Config.Types
  8327  	// match: (Rsh16x8 <t> x y)
  8328  	// cond: !shiftIsBounded(v)
  8329  	// result: (SRA <t> (SignExt16to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt8to64 y)))))
  8330  	for {
  8331  		t := v.Type
  8332  		x := v_0
  8333  		y := v_1
  8334  		if !(!shiftIsBounded(v)) {
  8335  			break
  8336  		}
  8337  		v.reset(OpRISCV64SRA)
  8338  		v.Type = t
  8339  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  8340  		v0.AddArg(x)
  8341  		v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  8342  		v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  8343  		v2.AuxInt = int64ToAuxInt(-1)
  8344  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  8345  		v3.AuxInt = int64ToAuxInt(64)
  8346  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8347  		v4.AddArg(y)
  8348  		v3.AddArg(v4)
  8349  		v2.AddArg(v3)
  8350  		v1.AddArg2(y, v2)
  8351  		v.AddArg2(v0, v1)
  8352  		return true
  8353  	}
  8354  	// match: (Rsh16x8 x y)
  8355  	// cond: shiftIsBounded(v)
  8356  	// result: (SRA (SignExt16to64 x) y)
  8357  	for {
  8358  		x := v_0
  8359  		y := v_1
  8360  		if !(shiftIsBounded(v)) {
  8361  			break
  8362  		}
  8363  		v.reset(OpRISCV64SRA)
  8364  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  8365  		v0.AddArg(x)
  8366  		v.AddArg2(v0, y)
  8367  		return true
  8368  	}
  8369  	return false
  8370  }
  8371  func rewriteValueRISCV64_OpRsh32Ux16(v *Value) bool {
  8372  	v_1 := v.Args[1]
  8373  	v_0 := v.Args[0]
  8374  	b := v.Block
  8375  	typ := &b.Func.Config.Types
  8376  	// match: (Rsh32Ux16 <t> x y)
  8377  	// cond: !shiftIsBounded(v)
  8378  	// result: (AND (SRLW <t> x y) (Neg32 <t> (SLTIU <t> [32] (ZeroExt16to64 y))))
  8379  	for {
  8380  		t := v.Type
  8381  		x := v_0
  8382  		y := v_1
  8383  		if !(!shiftIsBounded(v)) {
  8384  			break
  8385  		}
  8386  		v.reset(OpRISCV64AND)
  8387  		v0 := b.NewValue0(v.Pos, OpRISCV64SRLW, t)
  8388  		v0.AddArg2(x, y)
  8389  		v1 := b.NewValue0(v.Pos, OpNeg32, t)
  8390  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  8391  		v2.AuxInt = int64ToAuxInt(32)
  8392  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8393  		v3.AddArg(y)
  8394  		v2.AddArg(v3)
  8395  		v1.AddArg(v2)
  8396  		v.AddArg2(v0, v1)
  8397  		return true
  8398  	}
  8399  	// match: (Rsh32Ux16 x y)
  8400  	// cond: shiftIsBounded(v)
  8401  	// result: (SRLW x y)
  8402  	for {
  8403  		x := v_0
  8404  		y := v_1
  8405  		if !(shiftIsBounded(v)) {
  8406  			break
  8407  		}
  8408  		v.reset(OpRISCV64SRLW)
  8409  		v.AddArg2(x, y)
  8410  		return true
  8411  	}
  8412  	return false
  8413  }
  8414  func rewriteValueRISCV64_OpRsh32Ux32(v *Value) bool {
  8415  	v_1 := v.Args[1]
  8416  	v_0 := v.Args[0]
  8417  	b := v.Block
  8418  	typ := &b.Func.Config.Types
  8419  	// match: (Rsh32Ux32 <t> x y)
  8420  	// cond: !shiftIsBounded(v)
  8421  	// result: (AND (SRLW <t> x y) (Neg32 <t> (SLTIU <t> [32] (ZeroExt32to64 y))))
  8422  	for {
  8423  		t := v.Type
  8424  		x := v_0
  8425  		y := v_1
  8426  		if !(!shiftIsBounded(v)) {
  8427  			break
  8428  		}
  8429  		v.reset(OpRISCV64AND)
  8430  		v0 := b.NewValue0(v.Pos, OpRISCV64SRLW, t)
  8431  		v0.AddArg2(x, y)
  8432  		v1 := b.NewValue0(v.Pos, OpNeg32, t)
  8433  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  8434  		v2.AuxInt = int64ToAuxInt(32)
  8435  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8436  		v3.AddArg(y)
  8437  		v2.AddArg(v3)
  8438  		v1.AddArg(v2)
  8439  		v.AddArg2(v0, v1)
  8440  		return true
  8441  	}
  8442  	// match: (Rsh32Ux32 x y)
  8443  	// cond: shiftIsBounded(v)
  8444  	// result: (SRLW x y)
  8445  	for {
  8446  		x := v_0
  8447  		y := v_1
  8448  		if !(shiftIsBounded(v)) {
  8449  			break
  8450  		}
  8451  		v.reset(OpRISCV64SRLW)
  8452  		v.AddArg2(x, y)
  8453  		return true
  8454  	}
  8455  	return false
  8456  }
  8457  func rewriteValueRISCV64_OpRsh32Ux64(v *Value) bool {
  8458  	v_1 := v.Args[1]
  8459  	v_0 := v.Args[0]
  8460  	b := v.Block
  8461  	// match: (Rsh32Ux64 <t> x y)
  8462  	// cond: !shiftIsBounded(v)
  8463  	// result: (AND (SRLW <t> x y) (Neg32 <t> (SLTIU <t> [32] y)))
  8464  	for {
  8465  		t := v.Type
  8466  		x := v_0
  8467  		y := v_1
  8468  		if !(!shiftIsBounded(v)) {
  8469  			break
  8470  		}
  8471  		v.reset(OpRISCV64AND)
  8472  		v0 := b.NewValue0(v.Pos, OpRISCV64SRLW, t)
  8473  		v0.AddArg2(x, y)
  8474  		v1 := b.NewValue0(v.Pos, OpNeg32, t)
  8475  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  8476  		v2.AuxInt = int64ToAuxInt(32)
  8477  		v2.AddArg(y)
  8478  		v1.AddArg(v2)
  8479  		v.AddArg2(v0, v1)
  8480  		return true
  8481  	}
  8482  	// match: (Rsh32Ux64 x y)
  8483  	// cond: shiftIsBounded(v)
  8484  	// result: (SRLW x y)
  8485  	for {
  8486  		x := v_0
  8487  		y := v_1
  8488  		if !(shiftIsBounded(v)) {
  8489  			break
  8490  		}
  8491  		v.reset(OpRISCV64SRLW)
  8492  		v.AddArg2(x, y)
  8493  		return true
  8494  	}
  8495  	return false
  8496  }
  8497  func rewriteValueRISCV64_OpRsh32Ux8(v *Value) bool {
  8498  	v_1 := v.Args[1]
  8499  	v_0 := v.Args[0]
  8500  	b := v.Block
  8501  	typ := &b.Func.Config.Types
  8502  	// match: (Rsh32Ux8 <t> x y)
  8503  	// cond: !shiftIsBounded(v)
  8504  	// result: (AND (SRLW <t> x y) (Neg32 <t> (SLTIU <t> [32] (ZeroExt8to64 y))))
  8505  	for {
  8506  		t := v.Type
  8507  		x := v_0
  8508  		y := v_1
  8509  		if !(!shiftIsBounded(v)) {
  8510  			break
  8511  		}
  8512  		v.reset(OpRISCV64AND)
  8513  		v0 := b.NewValue0(v.Pos, OpRISCV64SRLW, t)
  8514  		v0.AddArg2(x, y)
  8515  		v1 := b.NewValue0(v.Pos, OpNeg32, t)
  8516  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  8517  		v2.AuxInt = int64ToAuxInt(32)
  8518  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8519  		v3.AddArg(y)
  8520  		v2.AddArg(v3)
  8521  		v1.AddArg(v2)
  8522  		v.AddArg2(v0, v1)
  8523  		return true
  8524  	}
  8525  	// match: (Rsh32Ux8 x y)
  8526  	// cond: shiftIsBounded(v)
  8527  	// result: (SRLW x y)
  8528  	for {
  8529  		x := v_0
  8530  		y := v_1
  8531  		if !(shiftIsBounded(v)) {
  8532  			break
  8533  		}
  8534  		v.reset(OpRISCV64SRLW)
  8535  		v.AddArg2(x, y)
  8536  		return true
  8537  	}
  8538  	return false
  8539  }
  8540  func rewriteValueRISCV64_OpRsh32x16(v *Value) bool {
  8541  	v_1 := v.Args[1]
  8542  	v_0 := v.Args[0]
  8543  	b := v.Block
  8544  	typ := &b.Func.Config.Types
  8545  	// match: (Rsh32x16 <t> x y)
  8546  	// cond: !shiftIsBounded(v)
  8547  	// result: (SRAW <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [32] (ZeroExt16to64 y)))))
  8548  	for {
  8549  		t := v.Type
  8550  		x := v_0
  8551  		y := v_1
  8552  		if !(!shiftIsBounded(v)) {
  8553  			break
  8554  		}
  8555  		v.reset(OpRISCV64SRAW)
  8556  		v.Type = t
  8557  		v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  8558  		v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  8559  		v1.AuxInt = int64ToAuxInt(-1)
  8560  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  8561  		v2.AuxInt = int64ToAuxInt(32)
  8562  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8563  		v3.AddArg(y)
  8564  		v2.AddArg(v3)
  8565  		v1.AddArg(v2)
  8566  		v0.AddArg2(y, v1)
  8567  		v.AddArg2(x, v0)
  8568  		return true
  8569  	}
  8570  	// match: (Rsh32x16 x y)
  8571  	// cond: shiftIsBounded(v)
  8572  	// result: (SRAW x y)
  8573  	for {
  8574  		x := v_0
  8575  		y := v_1
  8576  		if !(shiftIsBounded(v)) {
  8577  			break
  8578  		}
  8579  		v.reset(OpRISCV64SRAW)
  8580  		v.AddArg2(x, y)
  8581  		return true
  8582  	}
  8583  	return false
  8584  }
  8585  func rewriteValueRISCV64_OpRsh32x32(v *Value) bool {
  8586  	v_1 := v.Args[1]
  8587  	v_0 := v.Args[0]
  8588  	b := v.Block
  8589  	typ := &b.Func.Config.Types
  8590  	// match: (Rsh32x32 <t> x y)
  8591  	// cond: !shiftIsBounded(v)
  8592  	// result: (SRAW <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [32] (ZeroExt32to64 y)))))
  8593  	for {
  8594  		t := v.Type
  8595  		x := v_0
  8596  		y := v_1
  8597  		if !(!shiftIsBounded(v)) {
  8598  			break
  8599  		}
  8600  		v.reset(OpRISCV64SRAW)
  8601  		v.Type = t
  8602  		v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  8603  		v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  8604  		v1.AuxInt = int64ToAuxInt(-1)
  8605  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  8606  		v2.AuxInt = int64ToAuxInt(32)
  8607  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8608  		v3.AddArg(y)
  8609  		v2.AddArg(v3)
  8610  		v1.AddArg(v2)
  8611  		v0.AddArg2(y, v1)
  8612  		v.AddArg2(x, v0)
  8613  		return true
  8614  	}
  8615  	// match: (Rsh32x32 x y)
  8616  	// cond: shiftIsBounded(v)
  8617  	// result: (SRAW x y)
  8618  	for {
  8619  		x := v_0
  8620  		y := v_1
  8621  		if !(shiftIsBounded(v)) {
  8622  			break
  8623  		}
  8624  		v.reset(OpRISCV64SRAW)
  8625  		v.AddArg2(x, y)
  8626  		return true
  8627  	}
  8628  	return false
  8629  }
  8630  func rewriteValueRISCV64_OpRsh32x64(v *Value) bool {
  8631  	v_1 := v.Args[1]
  8632  	v_0 := v.Args[0]
  8633  	b := v.Block
  8634  	// match: (Rsh32x64 <t> x y)
  8635  	// cond: !shiftIsBounded(v)
  8636  	// result: (SRAW <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [32] y))))
  8637  	for {
  8638  		t := v.Type
  8639  		x := v_0
  8640  		y := v_1
  8641  		if !(!shiftIsBounded(v)) {
  8642  			break
  8643  		}
  8644  		v.reset(OpRISCV64SRAW)
  8645  		v.Type = t
  8646  		v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  8647  		v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  8648  		v1.AuxInt = int64ToAuxInt(-1)
  8649  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  8650  		v2.AuxInt = int64ToAuxInt(32)
  8651  		v2.AddArg(y)
  8652  		v1.AddArg(v2)
  8653  		v0.AddArg2(y, v1)
  8654  		v.AddArg2(x, v0)
  8655  		return true
  8656  	}
  8657  	// match: (Rsh32x64 x y)
  8658  	// cond: shiftIsBounded(v)
  8659  	// result: (SRAW x y)
  8660  	for {
  8661  		x := v_0
  8662  		y := v_1
  8663  		if !(shiftIsBounded(v)) {
  8664  			break
  8665  		}
  8666  		v.reset(OpRISCV64SRAW)
  8667  		v.AddArg2(x, y)
  8668  		return true
  8669  	}
  8670  	return false
  8671  }
  8672  func rewriteValueRISCV64_OpRsh32x8(v *Value) bool {
  8673  	v_1 := v.Args[1]
  8674  	v_0 := v.Args[0]
  8675  	b := v.Block
  8676  	typ := &b.Func.Config.Types
  8677  	// match: (Rsh32x8 <t> x y)
  8678  	// cond: !shiftIsBounded(v)
  8679  	// result: (SRAW <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [32] (ZeroExt8to64 y)))))
  8680  	for {
  8681  		t := v.Type
  8682  		x := v_0
  8683  		y := v_1
  8684  		if !(!shiftIsBounded(v)) {
  8685  			break
  8686  		}
  8687  		v.reset(OpRISCV64SRAW)
  8688  		v.Type = t
  8689  		v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  8690  		v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  8691  		v1.AuxInt = int64ToAuxInt(-1)
  8692  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  8693  		v2.AuxInt = int64ToAuxInt(32)
  8694  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8695  		v3.AddArg(y)
  8696  		v2.AddArg(v3)
  8697  		v1.AddArg(v2)
  8698  		v0.AddArg2(y, v1)
  8699  		v.AddArg2(x, v0)
  8700  		return true
  8701  	}
  8702  	// match: (Rsh32x8 x y)
  8703  	// cond: shiftIsBounded(v)
  8704  	// result: (SRAW x y)
  8705  	for {
  8706  		x := v_0
  8707  		y := v_1
  8708  		if !(shiftIsBounded(v)) {
  8709  			break
  8710  		}
  8711  		v.reset(OpRISCV64SRAW)
  8712  		v.AddArg2(x, y)
  8713  		return true
  8714  	}
  8715  	return false
  8716  }
  8717  func rewriteValueRISCV64_OpRsh64Ux16(v *Value) bool {
  8718  	v_1 := v.Args[1]
  8719  	v_0 := v.Args[0]
  8720  	b := v.Block
  8721  	typ := &b.Func.Config.Types
  8722  	// match: (Rsh64Ux16 <t> x y)
  8723  	// cond: !shiftIsBounded(v)
  8724  	// result: (AND (SRL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
  8725  	for {
  8726  		t := v.Type
  8727  		x := v_0
  8728  		y := v_1
  8729  		if !(!shiftIsBounded(v)) {
  8730  			break
  8731  		}
  8732  		v.reset(OpRISCV64AND)
  8733  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  8734  		v0.AddArg2(x, y)
  8735  		v1 := b.NewValue0(v.Pos, OpNeg64, t)
  8736  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  8737  		v2.AuxInt = int64ToAuxInt(64)
  8738  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8739  		v3.AddArg(y)
  8740  		v2.AddArg(v3)
  8741  		v1.AddArg(v2)
  8742  		v.AddArg2(v0, v1)
  8743  		return true
  8744  	}
  8745  	// match: (Rsh64Ux16 x y)
  8746  	// cond: shiftIsBounded(v)
  8747  	// result: (SRL x y)
  8748  	for {
  8749  		x := v_0
  8750  		y := v_1
  8751  		if !(shiftIsBounded(v)) {
  8752  			break
  8753  		}
  8754  		v.reset(OpRISCV64SRL)
  8755  		v.AddArg2(x, y)
  8756  		return true
  8757  	}
  8758  	return false
  8759  }
  8760  func rewriteValueRISCV64_OpRsh64Ux32(v *Value) bool {
  8761  	v_1 := v.Args[1]
  8762  	v_0 := v.Args[0]
  8763  	b := v.Block
  8764  	typ := &b.Func.Config.Types
  8765  	// match: (Rsh64Ux32 <t> x y)
  8766  	// cond: !shiftIsBounded(v)
  8767  	// result: (AND (SRL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
  8768  	for {
  8769  		t := v.Type
  8770  		x := v_0
  8771  		y := v_1
  8772  		if !(!shiftIsBounded(v)) {
  8773  			break
  8774  		}
  8775  		v.reset(OpRISCV64AND)
  8776  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  8777  		v0.AddArg2(x, y)
  8778  		v1 := b.NewValue0(v.Pos, OpNeg64, t)
  8779  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  8780  		v2.AuxInt = int64ToAuxInt(64)
  8781  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8782  		v3.AddArg(y)
  8783  		v2.AddArg(v3)
  8784  		v1.AddArg(v2)
  8785  		v.AddArg2(v0, v1)
  8786  		return true
  8787  	}
  8788  	// match: (Rsh64Ux32 x y)
  8789  	// cond: shiftIsBounded(v)
  8790  	// result: (SRL x y)
  8791  	for {
  8792  		x := v_0
  8793  		y := v_1
  8794  		if !(shiftIsBounded(v)) {
  8795  			break
  8796  		}
  8797  		v.reset(OpRISCV64SRL)
  8798  		v.AddArg2(x, y)
  8799  		return true
  8800  	}
  8801  	return false
  8802  }
  8803  func rewriteValueRISCV64_OpRsh64Ux64(v *Value) bool {
  8804  	v_1 := v.Args[1]
  8805  	v_0 := v.Args[0]
  8806  	b := v.Block
  8807  	// match: (Rsh64Ux64 <t> x y)
  8808  	// cond: !shiftIsBounded(v)
  8809  	// result: (AND (SRL <t> x y) (Neg64 <t> (SLTIU <t> [64] y)))
  8810  	for {
  8811  		t := v.Type
  8812  		x := v_0
  8813  		y := v_1
  8814  		if !(!shiftIsBounded(v)) {
  8815  			break
  8816  		}
  8817  		v.reset(OpRISCV64AND)
  8818  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  8819  		v0.AddArg2(x, y)
  8820  		v1 := b.NewValue0(v.Pos, OpNeg64, t)
  8821  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  8822  		v2.AuxInt = int64ToAuxInt(64)
  8823  		v2.AddArg(y)
  8824  		v1.AddArg(v2)
  8825  		v.AddArg2(v0, v1)
  8826  		return true
  8827  	}
  8828  	// match: (Rsh64Ux64 x y)
  8829  	// cond: shiftIsBounded(v)
  8830  	// result: (SRL x y)
  8831  	for {
  8832  		x := v_0
  8833  		y := v_1
  8834  		if !(shiftIsBounded(v)) {
  8835  			break
  8836  		}
  8837  		v.reset(OpRISCV64SRL)
  8838  		v.AddArg2(x, y)
  8839  		return true
  8840  	}
  8841  	return false
  8842  }
  8843  func rewriteValueRISCV64_OpRsh64Ux8(v *Value) bool {
  8844  	v_1 := v.Args[1]
  8845  	v_0 := v.Args[0]
  8846  	b := v.Block
  8847  	typ := &b.Func.Config.Types
  8848  	// match: (Rsh64Ux8 <t> x y)
  8849  	// cond: !shiftIsBounded(v)
  8850  	// result: (AND (SRL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
  8851  	for {
  8852  		t := v.Type
  8853  		x := v_0
  8854  		y := v_1
  8855  		if !(!shiftIsBounded(v)) {
  8856  			break
  8857  		}
  8858  		v.reset(OpRISCV64AND)
  8859  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  8860  		v0.AddArg2(x, y)
  8861  		v1 := b.NewValue0(v.Pos, OpNeg64, t)
  8862  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  8863  		v2.AuxInt = int64ToAuxInt(64)
  8864  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8865  		v3.AddArg(y)
  8866  		v2.AddArg(v3)
  8867  		v1.AddArg(v2)
  8868  		v.AddArg2(v0, v1)
  8869  		return true
  8870  	}
  8871  	// match: (Rsh64Ux8 x y)
  8872  	// cond: shiftIsBounded(v)
  8873  	// result: (SRL x y)
  8874  	for {
  8875  		x := v_0
  8876  		y := v_1
  8877  		if !(shiftIsBounded(v)) {
  8878  			break
  8879  		}
  8880  		v.reset(OpRISCV64SRL)
  8881  		v.AddArg2(x, y)
  8882  		return true
  8883  	}
  8884  	return false
  8885  }
  8886  func rewriteValueRISCV64_OpRsh64x16(v *Value) bool {
  8887  	v_1 := v.Args[1]
  8888  	v_0 := v.Args[0]
  8889  	b := v.Block
  8890  	typ := &b.Func.Config.Types
  8891  	// match: (Rsh64x16 <t> x y)
  8892  	// cond: !shiftIsBounded(v)
  8893  	// result: (SRA <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt16to64 y)))))
  8894  	for {
  8895  		t := v.Type
  8896  		x := v_0
  8897  		y := v_1
  8898  		if !(!shiftIsBounded(v)) {
  8899  			break
  8900  		}
  8901  		v.reset(OpRISCV64SRA)
  8902  		v.Type = t
  8903  		v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  8904  		v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  8905  		v1.AuxInt = int64ToAuxInt(-1)
  8906  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  8907  		v2.AuxInt = int64ToAuxInt(64)
  8908  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8909  		v3.AddArg(y)
  8910  		v2.AddArg(v3)
  8911  		v1.AddArg(v2)
  8912  		v0.AddArg2(y, v1)
  8913  		v.AddArg2(x, v0)
  8914  		return true
  8915  	}
  8916  	// match: (Rsh64x16 x y)
  8917  	// cond: shiftIsBounded(v)
  8918  	// result: (SRA x y)
  8919  	for {
  8920  		x := v_0
  8921  		y := v_1
  8922  		if !(shiftIsBounded(v)) {
  8923  			break
  8924  		}
  8925  		v.reset(OpRISCV64SRA)
  8926  		v.AddArg2(x, y)
  8927  		return true
  8928  	}
  8929  	return false
  8930  }
  8931  func rewriteValueRISCV64_OpRsh64x32(v *Value) bool {
  8932  	v_1 := v.Args[1]
  8933  	v_0 := v.Args[0]
  8934  	b := v.Block
  8935  	typ := &b.Func.Config.Types
  8936  	// match: (Rsh64x32 <t> x y)
  8937  	// cond: !shiftIsBounded(v)
  8938  	// result: (SRA <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt32to64 y)))))
  8939  	for {
  8940  		t := v.Type
  8941  		x := v_0
  8942  		y := v_1
  8943  		if !(!shiftIsBounded(v)) {
  8944  			break
  8945  		}
  8946  		v.reset(OpRISCV64SRA)
  8947  		v.Type = t
  8948  		v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  8949  		v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  8950  		v1.AuxInt = int64ToAuxInt(-1)
  8951  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  8952  		v2.AuxInt = int64ToAuxInt(64)
  8953  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8954  		v3.AddArg(y)
  8955  		v2.AddArg(v3)
  8956  		v1.AddArg(v2)
  8957  		v0.AddArg2(y, v1)
  8958  		v.AddArg2(x, v0)
  8959  		return true
  8960  	}
  8961  	// match: (Rsh64x32 x y)
  8962  	// cond: shiftIsBounded(v)
  8963  	// result: (SRA x y)
  8964  	for {
  8965  		x := v_0
  8966  		y := v_1
  8967  		if !(shiftIsBounded(v)) {
  8968  			break
  8969  		}
  8970  		v.reset(OpRISCV64SRA)
  8971  		v.AddArg2(x, y)
  8972  		return true
  8973  	}
  8974  	return false
  8975  }
  8976  func rewriteValueRISCV64_OpRsh64x64(v *Value) bool {
  8977  	v_1 := v.Args[1]
  8978  	v_0 := v.Args[0]
  8979  	b := v.Block
  8980  	// match: (Rsh64x64 <t> x y)
  8981  	// cond: !shiftIsBounded(v)
  8982  	// result: (SRA <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] y))))
  8983  	for {
  8984  		t := v.Type
  8985  		x := v_0
  8986  		y := v_1
  8987  		if !(!shiftIsBounded(v)) {
  8988  			break
  8989  		}
  8990  		v.reset(OpRISCV64SRA)
  8991  		v.Type = t
  8992  		v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  8993  		v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  8994  		v1.AuxInt = int64ToAuxInt(-1)
  8995  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  8996  		v2.AuxInt = int64ToAuxInt(64)
  8997  		v2.AddArg(y)
  8998  		v1.AddArg(v2)
  8999  		v0.AddArg2(y, v1)
  9000  		v.AddArg2(x, v0)
  9001  		return true
  9002  	}
  9003  	// match: (Rsh64x64 x y)
  9004  	// cond: shiftIsBounded(v)
  9005  	// result: (SRA x y)
  9006  	for {
  9007  		x := v_0
  9008  		y := v_1
  9009  		if !(shiftIsBounded(v)) {
  9010  			break
  9011  		}
  9012  		v.reset(OpRISCV64SRA)
  9013  		v.AddArg2(x, y)
  9014  		return true
  9015  	}
  9016  	return false
  9017  }
  9018  func rewriteValueRISCV64_OpRsh64x8(v *Value) bool {
  9019  	v_1 := v.Args[1]
  9020  	v_0 := v.Args[0]
  9021  	b := v.Block
  9022  	typ := &b.Func.Config.Types
  9023  	// match: (Rsh64x8 <t> x y)
  9024  	// cond: !shiftIsBounded(v)
  9025  	// result: (SRA <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt8to64 y)))))
  9026  	for {
  9027  		t := v.Type
  9028  		x := v_0
  9029  		y := v_1
  9030  		if !(!shiftIsBounded(v)) {
  9031  			break
  9032  		}
  9033  		v.reset(OpRISCV64SRA)
  9034  		v.Type = t
  9035  		v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  9036  		v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  9037  		v1.AuxInt = int64ToAuxInt(-1)
  9038  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  9039  		v2.AuxInt = int64ToAuxInt(64)
  9040  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  9041  		v3.AddArg(y)
  9042  		v2.AddArg(v3)
  9043  		v1.AddArg(v2)
  9044  		v0.AddArg2(y, v1)
  9045  		v.AddArg2(x, v0)
  9046  		return true
  9047  	}
  9048  	// match: (Rsh64x8 x y)
  9049  	// cond: shiftIsBounded(v)
  9050  	// result: (SRA x y)
  9051  	for {
  9052  		x := v_0
  9053  		y := v_1
  9054  		if !(shiftIsBounded(v)) {
  9055  			break
  9056  		}
  9057  		v.reset(OpRISCV64SRA)
  9058  		v.AddArg2(x, y)
  9059  		return true
  9060  	}
  9061  	return false
  9062  }
  9063  func rewriteValueRISCV64_OpRsh8Ux16(v *Value) bool {
  9064  	v_1 := v.Args[1]
  9065  	v_0 := v.Args[0]
  9066  	b := v.Block
  9067  	typ := &b.Func.Config.Types
  9068  	// match: (Rsh8Ux16 <t> x y)
  9069  	// cond: !shiftIsBounded(v)
  9070  	// result: (AND (SRL <t> (ZeroExt8to64 x) y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
  9071  	for {
  9072  		t := v.Type
  9073  		x := v_0
  9074  		y := v_1
  9075  		if !(!shiftIsBounded(v)) {
  9076  			break
  9077  		}
  9078  		v.reset(OpRISCV64AND)
  9079  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  9080  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  9081  		v1.AddArg(x)
  9082  		v0.AddArg2(v1, y)
  9083  		v2 := b.NewValue0(v.Pos, OpNeg8, t)
  9084  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  9085  		v3.AuxInt = int64ToAuxInt(64)
  9086  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  9087  		v4.AddArg(y)
  9088  		v3.AddArg(v4)
  9089  		v2.AddArg(v3)
  9090  		v.AddArg2(v0, v2)
  9091  		return true
  9092  	}
  9093  	// match: (Rsh8Ux16 x y)
  9094  	// cond: shiftIsBounded(v)
  9095  	// result: (SRL (ZeroExt8to64 x) y)
  9096  	for {
  9097  		x := v_0
  9098  		y := v_1
  9099  		if !(shiftIsBounded(v)) {
  9100  			break
  9101  		}
  9102  		v.reset(OpRISCV64SRL)
  9103  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  9104  		v0.AddArg(x)
  9105  		v.AddArg2(v0, y)
  9106  		return true
  9107  	}
  9108  	return false
  9109  }
  9110  func rewriteValueRISCV64_OpRsh8Ux32(v *Value) bool {
  9111  	v_1 := v.Args[1]
  9112  	v_0 := v.Args[0]
  9113  	b := v.Block
  9114  	typ := &b.Func.Config.Types
  9115  	// match: (Rsh8Ux32 <t> x y)
  9116  	// cond: !shiftIsBounded(v)
  9117  	// result: (AND (SRL <t> (ZeroExt8to64 x) y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
  9118  	for {
  9119  		t := v.Type
  9120  		x := v_0
  9121  		y := v_1
  9122  		if !(!shiftIsBounded(v)) {
  9123  			break
  9124  		}
  9125  		v.reset(OpRISCV64AND)
  9126  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  9127  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  9128  		v1.AddArg(x)
  9129  		v0.AddArg2(v1, y)
  9130  		v2 := b.NewValue0(v.Pos, OpNeg8, t)
  9131  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  9132  		v3.AuxInt = int64ToAuxInt(64)
  9133  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  9134  		v4.AddArg(y)
  9135  		v3.AddArg(v4)
  9136  		v2.AddArg(v3)
  9137  		v.AddArg2(v0, v2)
  9138  		return true
  9139  	}
  9140  	// match: (Rsh8Ux32 x y)
  9141  	// cond: shiftIsBounded(v)
  9142  	// result: (SRL (ZeroExt8to64 x) y)
  9143  	for {
  9144  		x := v_0
  9145  		y := v_1
  9146  		if !(shiftIsBounded(v)) {
  9147  			break
  9148  		}
  9149  		v.reset(OpRISCV64SRL)
  9150  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  9151  		v0.AddArg(x)
  9152  		v.AddArg2(v0, y)
  9153  		return true
  9154  	}
  9155  	return false
  9156  }
  9157  func rewriteValueRISCV64_OpRsh8Ux64(v *Value) bool {
  9158  	v_1 := v.Args[1]
  9159  	v_0 := v.Args[0]
  9160  	b := v.Block
  9161  	typ := &b.Func.Config.Types
  9162  	// match: (Rsh8Ux64 <t> x y)
  9163  	// cond: !shiftIsBounded(v)
  9164  	// result: (AND (SRL <t> (ZeroExt8to64 x) y) (Neg8 <t> (SLTIU <t> [64] y)))
  9165  	for {
  9166  		t := v.Type
  9167  		x := v_0
  9168  		y := v_1
  9169  		if !(!shiftIsBounded(v)) {
  9170  			break
  9171  		}
  9172  		v.reset(OpRISCV64AND)
  9173  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  9174  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  9175  		v1.AddArg(x)
  9176  		v0.AddArg2(v1, y)
  9177  		v2 := b.NewValue0(v.Pos, OpNeg8, t)
  9178  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  9179  		v3.AuxInt = int64ToAuxInt(64)
  9180  		v3.AddArg(y)
  9181  		v2.AddArg(v3)
  9182  		v.AddArg2(v0, v2)
  9183  		return true
  9184  	}
  9185  	// match: (Rsh8Ux64 x y)
  9186  	// cond: shiftIsBounded(v)
  9187  	// result: (SRL (ZeroExt8to64 x) y)
  9188  	for {
  9189  		x := v_0
  9190  		y := v_1
  9191  		if !(shiftIsBounded(v)) {
  9192  			break
  9193  		}
  9194  		v.reset(OpRISCV64SRL)
  9195  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  9196  		v0.AddArg(x)
  9197  		v.AddArg2(v0, y)
  9198  		return true
  9199  	}
  9200  	return false
  9201  }
  9202  func rewriteValueRISCV64_OpRsh8Ux8(v *Value) bool {
  9203  	v_1 := v.Args[1]
  9204  	v_0 := v.Args[0]
  9205  	b := v.Block
  9206  	typ := &b.Func.Config.Types
  9207  	// match: (Rsh8Ux8 <t> x y)
  9208  	// cond: !shiftIsBounded(v)
  9209  	// result: (AND (SRL <t> (ZeroExt8to64 x) y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
  9210  	for {
  9211  		t := v.Type
  9212  		x := v_0
  9213  		y := v_1
  9214  		if !(!shiftIsBounded(v)) {
  9215  			break
  9216  		}
  9217  		v.reset(OpRISCV64AND)
  9218  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  9219  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  9220  		v1.AddArg(x)
  9221  		v0.AddArg2(v1, y)
  9222  		v2 := b.NewValue0(v.Pos, OpNeg8, t)
  9223  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  9224  		v3.AuxInt = int64ToAuxInt(64)
  9225  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  9226  		v4.AddArg(y)
  9227  		v3.AddArg(v4)
  9228  		v2.AddArg(v3)
  9229  		v.AddArg2(v0, v2)
  9230  		return true
  9231  	}
  9232  	// match: (Rsh8Ux8 x y)
  9233  	// cond: shiftIsBounded(v)
  9234  	// result: (SRL (ZeroExt8to64 x) y)
  9235  	for {
  9236  		x := v_0
  9237  		y := v_1
  9238  		if !(shiftIsBounded(v)) {
  9239  			break
  9240  		}
  9241  		v.reset(OpRISCV64SRL)
  9242  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  9243  		v0.AddArg(x)
  9244  		v.AddArg2(v0, y)
  9245  		return true
  9246  	}
  9247  	return false
  9248  }
  9249  func rewriteValueRISCV64_OpRsh8x16(v *Value) bool {
  9250  	v_1 := v.Args[1]
  9251  	v_0 := v.Args[0]
  9252  	b := v.Block
  9253  	typ := &b.Func.Config.Types
  9254  	// match: (Rsh8x16 <t> x y)
  9255  	// cond: !shiftIsBounded(v)
  9256  	// result: (SRA <t> (SignExt8to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt16to64 y)))))
  9257  	for {
  9258  		t := v.Type
  9259  		x := v_0
  9260  		y := v_1
  9261  		if !(!shiftIsBounded(v)) {
  9262  			break
  9263  		}
  9264  		v.reset(OpRISCV64SRA)
  9265  		v.Type = t
  9266  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  9267  		v0.AddArg(x)
  9268  		v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  9269  		v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  9270  		v2.AuxInt = int64ToAuxInt(-1)
  9271  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  9272  		v3.AuxInt = int64ToAuxInt(64)
  9273  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  9274  		v4.AddArg(y)
  9275  		v3.AddArg(v4)
  9276  		v2.AddArg(v3)
  9277  		v1.AddArg2(y, v2)
  9278  		v.AddArg2(v0, v1)
  9279  		return true
  9280  	}
  9281  	// match: (Rsh8x16 x y)
  9282  	// cond: shiftIsBounded(v)
  9283  	// result: (SRA (SignExt8to64 x) y)
  9284  	for {
  9285  		x := v_0
  9286  		y := v_1
  9287  		if !(shiftIsBounded(v)) {
  9288  			break
  9289  		}
  9290  		v.reset(OpRISCV64SRA)
  9291  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  9292  		v0.AddArg(x)
  9293  		v.AddArg2(v0, y)
  9294  		return true
  9295  	}
  9296  	return false
  9297  }
  9298  func rewriteValueRISCV64_OpRsh8x32(v *Value) bool {
  9299  	v_1 := v.Args[1]
  9300  	v_0 := v.Args[0]
  9301  	b := v.Block
  9302  	typ := &b.Func.Config.Types
  9303  	// match: (Rsh8x32 <t> x y)
  9304  	// cond: !shiftIsBounded(v)
  9305  	// result: (SRA <t> (SignExt8to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt32to64 y)))))
  9306  	for {
  9307  		t := v.Type
  9308  		x := v_0
  9309  		y := v_1
  9310  		if !(!shiftIsBounded(v)) {
  9311  			break
  9312  		}
  9313  		v.reset(OpRISCV64SRA)
  9314  		v.Type = t
  9315  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  9316  		v0.AddArg(x)
  9317  		v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  9318  		v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  9319  		v2.AuxInt = int64ToAuxInt(-1)
  9320  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  9321  		v3.AuxInt = int64ToAuxInt(64)
  9322  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  9323  		v4.AddArg(y)
  9324  		v3.AddArg(v4)
  9325  		v2.AddArg(v3)
  9326  		v1.AddArg2(y, v2)
  9327  		v.AddArg2(v0, v1)
  9328  		return true
  9329  	}
  9330  	// match: (Rsh8x32 x y)
  9331  	// cond: shiftIsBounded(v)
  9332  	// result: (SRA (SignExt8to64 x) y)
  9333  	for {
  9334  		x := v_0
  9335  		y := v_1
  9336  		if !(shiftIsBounded(v)) {
  9337  			break
  9338  		}
  9339  		v.reset(OpRISCV64SRA)
  9340  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  9341  		v0.AddArg(x)
  9342  		v.AddArg2(v0, y)
  9343  		return true
  9344  	}
  9345  	return false
  9346  }
  9347  func rewriteValueRISCV64_OpRsh8x64(v *Value) bool {
  9348  	v_1 := v.Args[1]
  9349  	v_0 := v.Args[0]
  9350  	b := v.Block
  9351  	typ := &b.Func.Config.Types
  9352  	// match: (Rsh8x64 <t> x y)
  9353  	// cond: !shiftIsBounded(v)
  9354  	// result: (SRA <t> (SignExt8to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] y))))
  9355  	for {
  9356  		t := v.Type
  9357  		x := v_0
  9358  		y := v_1
  9359  		if !(!shiftIsBounded(v)) {
  9360  			break
  9361  		}
  9362  		v.reset(OpRISCV64SRA)
  9363  		v.Type = t
  9364  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  9365  		v0.AddArg(x)
  9366  		v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  9367  		v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  9368  		v2.AuxInt = int64ToAuxInt(-1)
  9369  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  9370  		v3.AuxInt = int64ToAuxInt(64)
  9371  		v3.AddArg(y)
  9372  		v2.AddArg(v3)
  9373  		v1.AddArg2(y, v2)
  9374  		v.AddArg2(v0, v1)
  9375  		return true
  9376  	}
  9377  	// match: (Rsh8x64 x y)
  9378  	// cond: shiftIsBounded(v)
  9379  	// result: (SRA (SignExt8to64 x) y)
  9380  	for {
  9381  		x := v_0
  9382  		y := v_1
  9383  		if !(shiftIsBounded(v)) {
  9384  			break
  9385  		}
  9386  		v.reset(OpRISCV64SRA)
  9387  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  9388  		v0.AddArg(x)
  9389  		v.AddArg2(v0, y)
  9390  		return true
  9391  	}
  9392  	return false
  9393  }
  9394  func rewriteValueRISCV64_OpRsh8x8(v *Value) bool {
  9395  	v_1 := v.Args[1]
  9396  	v_0 := v.Args[0]
  9397  	b := v.Block
  9398  	typ := &b.Func.Config.Types
  9399  	// match: (Rsh8x8 <t> x y)
  9400  	// cond: !shiftIsBounded(v)
  9401  	// result: (SRA <t> (SignExt8to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt8to64 y)))))
  9402  	for {
  9403  		t := v.Type
  9404  		x := v_0
  9405  		y := v_1
  9406  		if !(!shiftIsBounded(v)) {
  9407  			break
  9408  		}
  9409  		v.reset(OpRISCV64SRA)
  9410  		v.Type = t
  9411  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  9412  		v0.AddArg(x)
  9413  		v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  9414  		v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  9415  		v2.AuxInt = int64ToAuxInt(-1)
  9416  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  9417  		v3.AuxInt = int64ToAuxInt(64)
  9418  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  9419  		v4.AddArg(y)
  9420  		v3.AddArg(v4)
  9421  		v2.AddArg(v3)
  9422  		v1.AddArg2(y, v2)
  9423  		v.AddArg2(v0, v1)
  9424  		return true
  9425  	}
  9426  	// match: (Rsh8x8 x y)
  9427  	// cond: shiftIsBounded(v)
  9428  	// result: (SRA (SignExt8to64 x) y)
  9429  	for {
  9430  		x := v_0
  9431  		y := v_1
  9432  		if !(shiftIsBounded(v)) {
  9433  			break
  9434  		}
  9435  		v.reset(OpRISCV64SRA)
  9436  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  9437  		v0.AddArg(x)
  9438  		v.AddArg2(v0, y)
  9439  		return true
  9440  	}
  9441  	return false
  9442  }
  9443  func rewriteValueRISCV64_OpSelect0(v *Value) bool {
  9444  	v_0 := v.Args[0]
  9445  	b := v.Block
  9446  	typ := &b.Func.Config.Types
  9447  	// match: (Select0 (Add64carry x y c))
  9448  	// result: (ADD (ADD <typ.UInt64> x y) c)
  9449  	for {
  9450  		if v_0.Op != OpAdd64carry {
  9451  			break
  9452  		}
  9453  		c := v_0.Args[2]
  9454  		x := v_0.Args[0]
  9455  		y := v_0.Args[1]
  9456  		v.reset(OpRISCV64ADD)
  9457  		v0 := b.NewValue0(v.Pos, OpRISCV64ADD, typ.UInt64)
  9458  		v0.AddArg2(x, y)
  9459  		v.AddArg2(v0, c)
  9460  		return true
  9461  	}
  9462  	// match: (Select0 (Sub64borrow x y c))
  9463  	// result: (SUB (SUB <typ.UInt64> x y) c)
  9464  	for {
  9465  		if v_0.Op != OpSub64borrow {
  9466  			break
  9467  		}
  9468  		c := v_0.Args[2]
  9469  		x := v_0.Args[0]
  9470  		y := v_0.Args[1]
  9471  		v.reset(OpRISCV64SUB)
  9472  		v0 := b.NewValue0(v.Pos, OpRISCV64SUB, typ.UInt64)
  9473  		v0.AddArg2(x, y)
  9474  		v.AddArg2(v0, c)
  9475  		return true
  9476  	}
  9477  	// match: (Select0 m:(LoweredMuluhilo x y))
  9478  	// cond: m.Uses == 1
  9479  	// result: (MULHU x y)
  9480  	for {
  9481  		m := v_0
  9482  		if m.Op != OpRISCV64LoweredMuluhilo {
  9483  			break
  9484  		}
  9485  		y := m.Args[1]
  9486  		x := m.Args[0]
  9487  		if !(m.Uses == 1) {
  9488  			break
  9489  		}
  9490  		v.reset(OpRISCV64MULHU)
  9491  		v.AddArg2(x, y)
  9492  		return true
  9493  	}
  9494  	return false
  9495  }
  9496  func rewriteValueRISCV64_OpSelect1(v *Value) bool {
  9497  	v_0 := v.Args[0]
  9498  	b := v.Block
  9499  	typ := &b.Func.Config.Types
  9500  	// match: (Select1 (Add64carry x y c))
  9501  	// result: (OR (SLTU <typ.UInt64> s:(ADD <typ.UInt64> x y) x) (SLTU <typ.UInt64> (ADD <typ.UInt64> s c) s))
  9502  	for {
  9503  		if v_0.Op != OpAdd64carry {
  9504  			break
  9505  		}
  9506  		c := v_0.Args[2]
  9507  		x := v_0.Args[0]
  9508  		y := v_0.Args[1]
  9509  		v.reset(OpRISCV64OR)
  9510  		v0 := b.NewValue0(v.Pos, OpRISCV64SLTU, typ.UInt64)
  9511  		s := b.NewValue0(v.Pos, OpRISCV64ADD, typ.UInt64)
  9512  		s.AddArg2(x, y)
  9513  		v0.AddArg2(s, x)
  9514  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTU, typ.UInt64)
  9515  		v3 := b.NewValue0(v.Pos, OpRISCV64ADD, typ.UInt64)
  9516  		v3.AddArg2(s, c)
  9517  		v2.AddArg2(v3, s)
  9518  		v.AddArg2(v0, v2)
  9519  		return true
  9520  	}
  9521  	// match: (Select1 (Sub64borrow x y c))
  9522  	// result: (OR (SLTU <typ.UInt64> x s:(SUB <typ.UInt64> x y)) (SLTU <typ.UInt64> s (SUB <typ.UInt64> s c)))
  9523  	for {
  9524  		if v_0.Op != OpSub64borrow {
  9525  			break
  9526  		}
  9527  		c := v_0.Args[2]
  9528  		x := v_0.Args[0]
  9529  		y := v_0.Args[1]
  9530  		v.reset(OpRISCV64OR)
  9531  		v0 := b.NewValue0(v.Pos, OpRISCV64SLTU, typ.UInt64)
  9532  		s := b.NewValue0(v.Pos, OpRISCV64SUB, typ.UInt64)
  9533  		s.AddArg2(x, y)
  9534  		v0.AddArg2(x, s)
  9535  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTU, typ.UInt64)
  9536  		v3 := b.NewValue0(v.Pos, OpRISCV64SUB, typ.UInt64)
  9537  		v3.AddArg2(s, c)
  9538  		v2.AddArg2(s, v3)
  9539  		v.AddArg2(v0, v2)
  9540  		return true
  9541  	}
  9542  	// match: (Select1 m:(LoweredMuluhilo x y))
  9543  	// cond: m.Uses == 1
  9544  	// result: (MUL x y)
  9545  	for {
  9546  		m := v_0
  9547  		if m.Op != OpRISCV64LoweredMuluhilo {
  9548  			break
  9549  		}
  9550  		y := m.Args[1]
  9551  		x := m.Args[0]
  9552  		if !(m.Uses == 1) {
  9553  			break
  9554  		}
  9555  		v.reset(OpRISCV64MUL)
  9556  		v.AddArg2(x, y)
  9557  		return true
  9558  	}
  9559  	return false
  9560  }
  9561  func rewriteValueRISCV64_OpSlicemask(v *Value) bool {
  9562  	v_0 := v.Args[0]
  9563  	b := v.Block
  9564  	// match: (Slicemask <t> x)
  9565  	// result: (SRAI [63] (NEG <t> x))
  9566  	for {
  9567  		t := v.Type
  9568  		x := v_0
  9569  		v.reset(OpRISCV64SRAI)
  9570  		v.AuxInt = int64ToAuxInt(63)
  9571  		v0 := b.NewValue0(v.Pos, OpRISCV64NEG, t)
  9572  		v0.AddArg(x)
  9573  		v.AddArg(v0)
  9574  		return true
  9575  	}
  9576  }
  9577  func rewriteValueRISCV64_OpStore(v *Value) bool {
  9578  	v_2 := v.Args[2]
  9579  	v_1 := v.Args[1]
  9580  	v_0 := v.Args[0]
  9581  	// match: (Store {t} ptr val mem)
  9582  	// cond: t.Size() == 1
  9583  	// result: (MOVBstore ptr val mem)
  9584  	for {
  9585  		t := auxToType(v.Aux)
  9586  		ptr := v_0
  9587  		val := v_1
  9588  		mem := v_2
  9589  		if !(t.Size() == 1) {
  9590  			break
  9591  		}
  9592  		v.reset(OpRISCV64MOVBstore)
  9593  		v.AddArg3(ptr, val, mem)
  9594  		return true
  9595  	}
  9596  	// match: (Store {t} ptr val mem)
  9597  	// cond: t.Size() == 2
  9598  	// result: (MOVHstore ptr val mem)
  9599  	for {
  9600  		t := auxToType(v.Aux)
  9601  		ptr := v_0
  9602  		val := v_1
  9603  		mem := v_2
  9604  		if !(t.Size() == 2) {
  9605  			break
  9606  		}
  9607  		v.reset(OpRISCV64MOVHstore)
  9608  		v.AddArg3(ptr, val, mem)
  9609  		return true
  9610  	}
  9611  	// match: (Store {t} ptr val mem)
  9612  	// cond: t.Size() == 4 && !t.IsFloat()
  9613  	// result: (MOVWstore ptr val mem)
  9614  	for {
  9615  		t := auxToType(v.Aux)
  9616  		ptr := v_0
  9617  		val := v_1
  9618  		mem := v_2
  9619  		if !(t.Size() == 4 && !t.IsFloat()) {
  9620  			break
  9621  		}
  9622  		v.reset(OpRISCV64MOVWstore)
  9623  		v.AddArg3(ptr, val, mem)
  9624  		return true
  9625  	}
  9626  	// match: (Store {t} ptr val mem)
  9627  	// cond: t.Size() == 8 && !t.IsFloat()
  9628  	// result: (MOVDstore ptr val mem)
  9629  	for {
  9630  		t := auxToType(v.Aux)
  9631  		ptr := v_0
  9632  		val := v_1
  9633  		mem := v_2
  9634  		if !(t.Size() == 8 && !t.IsFloat()) {
  9635  			break
  9636  		}
  9637  		v.reset(OpRISCV64MOVDstore)
  9638  		v.AddArg3(ptr, val, mem)
  9639  		return true
  9640  	}
  9641  	// match: (Store {t} ptr val mem)
  9642  	// cond: t.Size() == 4 && t.IsFloat()
  9643  	// result: (FMOVWstore ptr val mem)
  9644  	for {
  9645  		t := auxToType(v.Aux)
  9646  		ptr := v_0
  9647  		val := v_1
  9648  		mem := v_2
  9649  		if !(t.Size() == 4 && t.IsFloat()) {
  9650  			break
  9651  		}
  9652  		v.reset(OpRISCV64FMOVWstore)
  9653  		v.AddArg3(ptr, val, mem)
  9654  		return true
  9655  	}
  9656  	// match: (Store {t} ptr val mem)
  9657  	// cond: t.Size() == 8 && t.IsFloat()
  9658  	// result: (FMOVDstore ptr val mem)
  9659  	for {
  9660  		t := auxToType(v.Aux)
  9661  		ptr := v_0
  9662  		val := v_1
  9663  		mem := v_2
  9664  		if !(t.Size() == 8 && t.IsFloat()) {
  9665  			break
  9666  		}
  9667  		v.reset(OpRISCV64FMOVDstore)
  9668  		v.AddArg3(ptr, val, mem)
  9669  		return true
  9670  	}
  9671  	return false
  9672  }
  9673  func rewriteValueRISCV64_OpZero(v *Value) bool {
  9674  	v_1 := v.Args[1]
  9675  	v_0 := v.Args[0]
  9676  	b := v.Block
  9677  	config := b.Func.Config
  9678  	typ := &b.Func.Config.Types
  9679  	// match: (Zero [0] _ mem)
  9680  	// result: mem
  9681  	for {
  9682  		if auxIntToInt64(v.AuxInt) != 0 {
  9683  			break
  9684  		}
  9685  		mem := v_1
  9686  		v.copyOf(mem)
  9687  		return true
  9688  	}
  9689  	// match: (Zero [1] ptr mem)
  9690  	// result: (MOVBstore ptr (MOVDconst [0]) mem)
  9691  	for {
  9692  		if auxIntToInt64(v.AuxInt) != 1 {
  9693  			break
  9694  		}
  9695  		ptr := v_0
  9696  		mem := v_1
  9697  		v.reset(OpRISCV64MOVBstore)
  9698  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9699  		v0.AuxInt = int64ToAuxInt(0)
  9700  		v.AddArg3(ptr, v0, mem)
  9701  		return true
  9702  	}
  9703  	// match: (Zero [2] {t} ptr mem)
  9704  	// cond: t.Alignment()%2 == 0
  9705  	// result: (MOVHstore ptr (MOVDconst [0]) mem)
  9706  	for {
  9707  		if auxIntToInt64(v.AuxInt) != 2 {
  9708  			break
  9709  		}
  9710  		t := auxToType(v.Aux)
  9711  		ptr := v_0
  9712  		mem := v_1
  9713  		if !(t.Alignment()%2 == 0) {
  9714  			break
  9715  		}
  9716  		v.reset(OpRISCV64MOVHstore)
  9717  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9718  		v0.AuxInt = int64ToAuxInt(0)
  9719  		v.AddArg3(ptr, v0, mem)
  9720  		return true
  9721  	}
  9722  	// match: (Zero [2] ptr mem)
  9723  	// result: (MOVBstore [1] ptr (MOVDconst [0]) (MOVBstore ptr (MOVDconst [0]) mem))
  9724  	for {
  9725  		if auxIntToInt64(v.AuxInt) != 2 {
  9726  			break
  9727  		}
  9728  		ptr := v_0
  9729  		mem := v_1
  9730  		v.reset(OpRISCV64MOVBstore)
  9731  		v.AuxInt = int32ToAuxInt(1)
  9732  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9733  		v0.AuxInt = int64ToAuxInt(0)
  9734  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  9735  		v1.AddArg3(ptr, v0, mem)
  9736  		v.AddArg3(ptr, v0, v1)
  9737  		return true
  9738  	}
  9739  	// match: (Zero [4] {t} ptr mem)
  9740  	// cond: t.Alignment()%4 == 0
  9741  	// result: (MOVWstore ptr (MOVDconst [0]) mem)
  9742  	for {
  9743  		if auxIntToInt64(v.AuxInt) != 4 {
  9744  			break
  9745  		}
  9746  		t := auxToType(v.Aux)
  9747  		ptr := v_0
  9748  		mem := v_1
  9749  		if !(t.Alignment()%4 == 0) {
  9750  			break
  9751  		}
  9752  		v.reset(OpRISCV64MOVWstore)
  9753  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9754  		v0.AuxInt = int64ToAuxInt(0)
  9755  		v.AddArg3(ptr, v0, mem)
  9756  		return true
  9757  	}
  9758  	// match: (Zero [4] {t} ptr mem)
  9759  	// cond: t.Alignment()%2 == 0
  9760  	// result: (MOVHstore [2] ptr (MOVDconst [0]) (MOVHstore ptr (MOVDconst [0]) mem))
  9761  	for {
  9762  		if auxIntToInt64(v.AuxInt) != 4 {
  9763  			break
  9764  		}
  9765  		t := auxToType(v.Aux)
  9766  		ptr := v_0
  9767  		mem := v_1
  9768  		if !(t.Alignment()%2 == 0) {
  9769  			break
  9770  		}
  9771  		v.reset(OpRISCV64MOVHstore)
  9772  		v.AuxInt = int32ToAuxInt(2)
  9773  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9774  		v0.AuxInt = int64ToAuxInt(0)
  9775  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  9776  		v1.AddArg3(ptr, v0, mem)
  9777  		v.AddArg3(ptr, v0, v1)
  9778  		return true
  9779  	}
  9780  	// match: (Zero [4] ptr mem)
  9781  	// result: (MOVBstore [3] ptr (MOVDconst [0]) (MOVBstore [2] ptr (MOVDconst [0]) (MOVBstore [1] ptr (MOVDconst [0]) (MOVBstore ptr (MOVDconst [0]) mem))))
  9782  	for {
  9783  		if auxIntToInt64(v.AuxInt) != 4 {
  9784  			break
  9785  		}
  9786  		ptr := v_0
  9787  		mem := v_1
  9788  		v.reset(OpRISCV64MOVBstore)
  9789  		v.AuxInt = int32ToAuxInt(3)
  9790  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9791  		v0.AuxInt = int64ToAuxInt(0)
  9792  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  9793  		v1.AuxInt = int32ToAuxInt(2)
  9794  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  9795  		v2.AuxInt = int32ToAuxInt(1)
  9796  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  9797  		v3.AddArg3(ptr, v0, mem)
  9798  		v2.AddArg3(ptr, v0, v3)
  9799  		v1.AddArg3(ptr, v0, v2)
  9800  		v.AddArg3(ptr, v0, v1)
  9801  		return true
  9802  	}
  9803  	// match: (Zero [8] {t} ptr mem)
  9804  	// cond: t.Alignment()%8 == 0
  9805  	// result: (MOVDstore ptr (MOVDconst [0]) mem)
  9806  	for {
  9807  		if auxIntToInt64(v.AuxInt) != 8 {
  9808  			break
  9809  		}
  9810  		t := auxToType(v.Aux)
  9811  		ptr := v_0
  9812  		mem := v_1
  9813  		if !(t.Alignment()%8 == 0) {
  9814  			break
  9815  		}
  9816  		v.reset(OpRISCV64MOVDstore)
  9817  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9818  		v0.AuxInt = int64ToAuxInt(0)
  9819  		v.AddArg3(ptr, v0, mem)
  9820  		return true
  9821  	}
  9822  	// match: (Zero [8] {t} ptr mem)
  9823  	// cond: t.Alignment()%4 == 0
  9824  	// result: (MOVWstore [4] ptr (MOVDconst [0]) (MOVWstore ptr (MOVDconst [0]) mem))
  9825  	for {
  9826  		if auxIntToInt64(v.AuxInt) != 8 {
  9827  			break
  9828  		}
  9829  		t := auxToType(v.Aux)
  9830  		ptr := v_0
  9831  		mem := v_1
  9832  		if !(t.Alignment()%4 == 0) {
  9833  			break
  9834  		}
  9835  		v.reset(OpRISCV64MOVWstore)
  9836  		v.AuxInt = int32ToAuxInt(4)
  9837  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9838  		v0.AuxInt = int64ToAuxInt(0)
  9839  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVWstore, types.TypeMem)
  9840  		v1.AddArg3(ptr, v0, mem)
  9841  		v.AddArg3(ptr, v0, v1)
  9842  		return true
  9843  	}
  9844  	// match: (Zero [8] {t} ptr mem)
  9845  	// cond: t.Alignment()%2 == 0
  9846  	// result: (MOVHstore [6] ptr (MOVDconst [0]) (MOVHstore [4] ptr (MOVDconst [0]) (MOVHstore [2] ptr (MOVDconst [0]) (MOVHstore ptr (MOVDconst [0]) mem))))
  9847  	for {
  9848  		if auxIntToInt64(v.AuxInt) != 8 {
  9849  			break
  9850  		}
  9851  		t := auxToType(v.Aux)
  9852  		ptr := v_0
  9853  		mem := v_1
  9854  		if !(t.Alignment()%2 == 0) {
  9855  			break
  9856  		}
  9857  		v.reset(OpRISCV64MOVHstore)
  9858  		v.AuxInt = int32ToAuxInt(6)
  9859  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9860  		v0.AuxInt = int64ToAuxInt(0)
  9861  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  9862  		v1.AuxInt = int32ToAuxInt(4)
  9863  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  9864  		v2.AuxInt = int32ToAuxInt(2)
  9865  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  9866  		v3.AddArg3(ptr, v0, mem)
  9867  		v2.AddArg3(ptr, v0, v3)
  9868  		v1.AddArg3(ptr, v0, v2)
  9869  		v.AddArg3(ptr, v0, v1)
  9870  		return true
  9871  	}
  9872  	// match: (Zero [3] ptr mem)
  9873  	// result: (MOVBstore [2] ptr (MOVDconst [0]) (MOVBstore [1] ptr (MOVDconst [0]) (MOVBstore ptr (MOVDconst [0]) mem)))
  9874  	for {
  9875  		if auxIntToInt64(v.AuxInt) != 3 {
  9876  			break
  9877  		}
  9878  		ptr := v_0
  9879  		mem := v_1
  9880  		v.reset(OpRISCV64MOVBstore)
  9881  		v.AuxInt = int32ToAuxInt(2)
  9882  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9883  		v0.AuxInt = int64ToAuxInt(0)
  9884  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  9885  		v1.AuxInt = int32ToAuxInt(1)
  9886  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  9887  		v2.AddArg3(ptr, v0, mem)
  9888  		v1.AddArg3(ptr, v0, v2)
  9889  		v.AddArg3(ptr, v0, v1)
  9890  		return true
  9891  	}
  9892  	// match: (Zero [6] {t} ptr mem)
  9893  	// cond: t.Alignment()%2 == 0
  9894  	// result: (MOVHstore [4] ptr (MOVDconst [0]) (MOVHstore [2] ptr (MOVDconst [0]) (MOVHstore ptr (MOVDconst [0]) mem)))
  9895  	for {
  9896  		if auxIntToInt64(v.AuxInt) != 6 {
  9897  			break
  9898  		}
  9899  		t := auxToType(v.Aux)
  9900  		ptr := v_0
  9901  		mem := v_1
  9902  		if !(t.Alignment()%2 == 0) {
  9903  			break
  9904  		}
  9905  		v.reset(OpRISCV64MOVHstore)
  9906  		v.AuxInt = int32ToAuxInt(4)
  9907  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9908  		v0.AuxInt = int64ToAuxInt(0)
  9909  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  9910  		v1.AuxInt = int32ToAuxInt(2)
  9911  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  9912  		v2.AddArg3(ptr, v0, mem)
  9913  		v1.AddArg3(ptr, v0, v2)
  9914  		v.AddArg3(ptr, v0, v1)
  9915  		return true
  9916  	}
  9917  	// match: (Zero [s] {t} ptr mem)
  9918  	// cond: s <= 24*moveSize(t.Alignment(), config)
  9919  	// result: (LoweredZero [makeValAndOff(int32(s),int32(t.Alignment()))] ptr mem)
  9920  	for {
  9921  		s := auxIntToInt64(v.AuxInt)
  9922  		t := auxToType(v.Aux)
  9923  		ptr := v_0
  9924  		mem := v_1
  9925  		if !(s <= 24*moveSize(t.Alignment(), config)) {
  9926  			break
  9927  		}
  9928  		v.reset(OpRISCV64LoweredZero)
  9929  		v.AuxInt = valAndOffToAuxInt(makeValAndOff(int32(s), int32(t.Alignment())))
  9930  		v.AddArg2(ptr, mem)
  9931  		return true
  9932  	}
  9933  	// match: (Zero [s] {t} ptr mem)
  9934  	// cond: s > 24*moveSize(t.Alignment(), config)
  9935  	// result: (LoweredZeroLoop [makeValAndOff(int32(s),int32(t.Alignment()))] ptr mem)
  9936  	for {
  9937  		s := auxIntToInt64(v.AuxInt)
  9938  		t := auxToType(v.Aux)
  9939  		ptr := v_0
  9940  		mem := v_1
  9941  		if !(s > 24*moveSize(t.Alignment(), config)) {
  9942  			break
  9943  		}
  9944  		v.reset(OpRISCV64LoweredZeroLoop)
  9945  		v.AuxInt = valAndOffToAuxInt(makeValAndOff(int32(s), int32(t.Alignment())))
  9946  		v.AddArg2(ptr, mem)
  9947  		return true
  9948  	}
  9949  	return false
  9950  }
  9951  func rewriteBlockRISCV64(b *Block) bool {
  9952  	typ := &b.Func.Config.Types
  9953  	switch b.Kind {
  9954  	case BlockRISCV64BEQ:
  9955  		// match: (BEQ (MOVDconst [0]) cond yes no)
  9956  		// result: (BEQZ cond yes no)
  9957  		for b.Controls[0].Op == OpRISCV64MOVDconst {
  9958  			v_0 := b.Controls[0]
  9959  			if auxIntToInt64(v_0.AuxInt) != 0 {
  9960  				break
  9961  			}
  9962  			cond := b.Controls[1]
  9963  			b.resetWithControl(BlockRISCV64BEQZ, cond)
  9964  			return true
  9965  		}
  9966  		// match: (BEQ cond (MOVDconst [0]) yes no)
  9967  		// result: (BEQZ cond yes no)
  9968  		for b.Controls[1].Op == OpRISCV64MOVDconst {
  9969  			cond := b.Controls[0]
  9970  			v_1 := b.Controls[1]
  9971  			if auxIntToInt64(v_1.AuxInt) != 0 {
  9972  				break
  9973  			}
  9974  			b.resetWithControl(BlockRISCV64BEQZ, cond)
  9975  			return true
  9976  		}
  9977  	case BlockRISCV64BEQZ:
  9978  		// match: (BEQZ (SEQZ x) yes no)
  9979  		// result: (BNEZ x yes no)
  9980  		for b.Controls[0].Op == OpRISCV64SEQZ {
  9981  			v_0 := b.Controls[0]
  9982  			x := v_0.Args[0]
  9983  			b.resetWithControl(BlockRISCV64BNEZ, x)
  9984  			return true
  9985  		}
  9986  		// match: (BEQZ (SNEZ x) yes no)
  9987  		// result: (BEQZ x yes no)
  9988  		for b.Controls[0].Op == OpRISCV64SNEZ {
  9989  			v_0 := b.Controls[0]
  9990  			x := v_0.Args[0]
  9991  			b.resetWithControl(BlockRISCV64BEQZ, x)
  9992  			return true
  9993  		}
  9994  		// match: (BEQZ (NEG x) yes no)
  9995  		// result: (BEQZ x yes no)
  9996  		for b.Controls[0].Op == OpRISCV64NEG {
  9997  			v_0 := b.Controls[0]
  9998  			x := v_0.Args[0]
  9999  			b.resetWithControl(BlockRISCV64BEQZ, x)
 10000  			return true
 10001  		}
 10002  		// match: (BEQZ (FNES <t> x y) yes no)
 10003  		// result: (BNEZ (FEQS <t> x y) yes no)
 10004  		for b.Controls[0].Op == OpRISCV64FNES {
 10005  			v_0 := b.Controls[0]
 10006  			t := v_0.Type
 10007  			_ = v_0.Args[1]
 10008  			v_0_0 := v_0.Args[0]
 10009  			v_0_1 := v_0.Args[1]
 10010  			for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 10011  				x := v_0_0
 10012  				y := v_0_1
 10013  				v0 := b.NewValue0(v_0.Pos, OpRISCV64FEQS, t)
 10014  				v0.AddArg2(x, y)
 10015  				b.resetWithControl(BlockRISCV64BNEZ, v0)
 10016  				return true
 10017  			}
 10018  		}
 10019  		// match: (BEQZ (FNED <t> x y) yes no)
 10020  		// result: (BNEZ (FEQD <t> x y) yes no)
 10021  		for b.Controls[0].Op == OpRISCV64FNED {
 10022  			v_0 := b.Controls[0]
 10023  			t := v_0.Type
 10024  			_ = v_0.Args[1]
 10025  			v_0_0 := v_0.Args[0]
 10026  			v_0_1 := v_0.Args[1]
 10027  			for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 10028  				x := v_0_0
 10029  				y := v_0_1
 10030  				v0 := b.NewValue0(v_0.Pos, OpRISCV64FEQD, t)
 10031  				v0.AddArg2(x, y)
 10032  				b.resetWithControl(BlockRISCV64BNEZ, v0)
 10033  				return true
 10034  			}
 10035  		}
 10036  		// match: (BEQZ (SUB x y) yes no)
 10037  		// result: (BEQ x y yes no)
 10038  		for b.Controls[0].Op == OpRISCV64SUB {
 10039  			v_0 := b.Controls[0]
 10040  			y := v_0.Args[1]
 10041  			x := v_0.Args[0]
 10042  			b.resetWithControl2(BlockRISCV64BEQ, x, y)
 10043  			return true
 10044  		}
 10045  		// match: (BEQZ (SLT x y) yes no)
 10046  		// result: (BGE x y yes no)
 10047  		for b.Controls[0].Op == OpRISCV64SLT {
 10048  			v_0 := b.Controls[0]
 10049  			y := v_0.Args[1]
 10050  			x := v_0.Args[0]
 10051  			b.resetWithControl2(BlockRISCV64BGE, x, y)
 10052  			return true
 10053  		}
 10054  		// match: (BEQZ (SLTU x y) yes no)
 10055  		// result: (BGEU x y yes no)
 10056  		for b.Controls[0].Op == OpRISCV64SLTU {
 10057  			v_0 := b.Controls[0]
 10058  			y := v_0.Args[1]
 10059  			x := v_0.Args[0]
 10060  			b.resetWithControl2(BlockRISCV64BGEU, x, y)
 10061  			return true
 10062  		}
 10063  		// match: (BEQZ (SLTI [x] y) yes no)
 10064  		// result: (BGE y (MOVDconst [x]) yes no)
 10065  		for b.Controls[0].Op == OpRISCV64SLTI {
 10066  			v_0 := b.Controls[0]
 10067  			x := auxIntToInt64(v_0.AuxInt)
 10068  			y := v_0.Args[0]
 10069  			v0 := b.NewValue0(b.Pos, OpRISCV64MOVDconst, typ.UInt64)
 10070  			v0.AuxInt = int64ToAuxInt(x)
 10071  			b.resetWithControl2(BlockRISCV64BGE, y, v0)
 10072  			return true
 10073  		}
 10074  		// match: (BEQZ (SLTIU [x] y) yes no)
 10075  		// result: (BGEU y (MOVDconst [x]) yes no)
 10076  		for b.Controls[0].Op == OpRISCV64SLTIU {
 10077  			v_0 := b.Controls[0]
 10078  			x := auxIntToInt64(v_0.AuxInt)
 10079  			y := v_0.Args[0]
 10080  			v0 := b.NewValue0(b.Pos, OpRISCV64MOVDconst, typ.UInt64)
 10081  			v0.AuxInt = int64ToAuxInt(x)
 10082  			b.resetWithControl2(BlockRISCV64BGEU, y, v0)
 10083  			return true
 10084  		}
 10085  		// match: (BEQZ (ANDI [c] (FCLASSD (FNEGD x))) yes no)
 10086  		// result: (BEQZ (ANDI <typ.Int64> [(c&0b11_0000_0000)|int64(bits.Reverse8(uint8(c))&0b1111_1111)] (FCLASSD x)) yes no)
 10087  		for b.Controls[0].Op == OpRISCV64ANDI {
 10088  			v_0 := b.Controls[0]
 10089  			c := auxIntToInt64(v_0.AuxInt)
 10090  			v_0_0 := v_0.Args[0]
 10091  			if v_0_0.Op != OpRISCV64FCLASSD {
 10092  				break
 10093  			}
 10094  			v_0_0_0 := v_0_0.Args[0]
 10095  			if v_0_0_0.Op != OpRISCV64FNEGD {
 10096  				break
 10097  			}
 10098  			x := v_0_0_0.Args[0]
 10099  			v0 := b.NewValue0(v_0.Pos, OpRISCV64ANDI, typ.Int64)
 10100  			v0.AuxInt = int64ToAuxInt((c & 0b11_0000_0000) | int64(bits.Reverse8(uint8(c))&0b1111_1111))
 10101  			v1 := b.NewValue0(v_0.Pos, OpRISCV64FCLASSD, typ.Int64)
 10102  			v1.AddArg(x)
 10103  			v0.AddArg(v1)
 10104  			b.resetWithControl(BlockRISCV64BEQZ, v0)
 10105  			return true
 10106  		}
 10107  		// match: (BEQZ (ANDI [c] (FCLASSD (FABSD x))) yes no)
 10108  		// result: (BEQZ (ANDI <typ.Int64> [(c&0b11_1111_0000)|int64(bits.Reverse8(uint8(c))&0b0000_1111)] (FCLASSD x)) yes no)
 10109  		for b.Controls[0].Op == OpRISCV64ANDI {
 10110  			v_0 := b.Controls[0]
 10111  			c := auxIntToInt64(v_0.AuxInt)
 10112  			v_0_0 := v_0.Args[0]
 10113  			if v_0_0.Op != OpRISCV64FCLASSD {
 10114  				break
 10115  			}
 10116  			v_0_0_0 := v_0_0.Args[0]
 10117  			if v_0_0_0.Op != OpRISCV64FABSD {
 10118  				break
 10119  			}
 10120  			x := v_0_0_0.Args[0]
 10121  			v0 := b.NewValue0(v_0.Pos, OpRISCV64ANDI, typ.Int64)
 10122  			v0.AuxInt = int64ToAuxInt((c & 0b11_1111_0000) | int64(bits.Reverse8(uint8(c))&0b0000_1111))
 10123  			v1 := b.NewValue0(v_0.Pos, OpRISCV64FCLASSD, typ.Int64)
 10124  			v1.AddArg(x)
 10125  			v0.AddArg(v1)
 10126  			b.resetWithControl(BlockRISCV64BEQZ, v0)
 10127  			return true
 10128  		}
 10129  	case BlockRISCV64BGE:
 10130  		// match: (BGE (MOVDconst [0]) cond yes no)
 10131  		// result: (BLEZ cond yes no)
 10132  		for b.Controls[0].Op == OpRISCV64MOVDconst {
 10133  			v_0 := b.Controls[0]
 10134  			if auxIntToInt64(v_0.AuxInt) != 0 {
 10135  				break
 10136  			}
 10137  			cond := b.Controls[1]
 10138  			b.resetWithControl(BlockRISCV64BLEZ, cond)
 10139  			return true
 10140  		}
 10141  		// match: (BGE cond (MOVDconst [0]) yes no)
 10142  		// result: (BGEZ cond yes no)
 10143  		for b.Controls[1].Op == OpRISCV64MOVDconst {
 10144  			cond := b.Controls[0]
 10145  			v_1 := b.Controls[1]
 10146  			if auxIntToInt64(v_1.AuxInt) != 0 {
 10147  				break
 10148  			}
 10149  			b.resetWithControl(BlockRISCV64BGEZ, cond)
 10150  			return true
 10151  		}
 10152  	case BlockRISCV64BGEU:
 10153  		// match: (BGEU (MOVDconst [0]) cond yes no)
 10154  		// result: (BEQZ cond yes no)
 10155  		for b.Controls[0].Op == OpRISCV64MOVDconst {
 10156  			v_0 := b.Controls[0]
 10157  			if auxIntToInt64(v_0.AuxInt) != 0 {
 10158  				break
 10159  			}
 10160  			cond := b.Controls[1]
 10161  			b.resetWithControl(BlockRISCV64BEQZ, cond)
 10162  			return true
 10163  		}
 10164  	case BlockRISCV64BLT:
 10165  		// match: (BLT (MOVDconst [0]) cond yes no)
 10166  		// result: (BGTZ cond yes no)
 10167  		for b.Controls[0].Op == OpRISCV64MOVDconst {
 10168  			v_0 := b.Controls[0]
 10169  			if auxIntToInt64(v_0.AuxInt) != 0 {
 10170  				break
 10171  			}
 10172  			cond := b.Controls[1]
 10173  			b.resetWithControl(BlockRISCV64BGTZ, cond)
 10174  			return true
 10175  		}
 10176  		// match: (BLT cond (MOVDconst [0]) yes no)
 10177  		// result: (BLTZ cond yes no)
 10178  		for b.Controls[1].Op == OpRISCV64MOVDconst {
 10179  			cond := b.Controls[0]
 10180  			v_1 := b.Controls[1]
 10181  			if auxIntToInt64(v_1.AuxInt) != 0 {
 10182  				break
 10183  			}
 10184  			b.resetWithControl(BlockRISCV64BLTZ, cond)
 10185  			return true
 10186  		}
 10187  	case BlockRISCV64BLTU:
 10188  		// match: (BLTU (MOVDconst [0]) cond yes no)
 10189  		// result: (BNEZ cond yes no)
 10190  		for b.Controls[0].Op == OpRISCV64MOVDconst {
 10191  			v_0 := b.Controls[0]
 10192  			if auxIntToInt64(v_0.AuxInt) != 0 {
 10193  				break
 10194  			}
 10195  			cond := b.Controls[1]
 10196  			b.resetWithControl(BlockRISCV64BNEZ, cond)
 10197  			return true
 10198  		}
 10199  	case BlockRISCV64BNE:
 10200  		// match: (BNE (MOVDconst [0]) cond yes no)
 10201  		// result: (BNEZ cond yes no)
 10202  		for b.Controls[0].Op == OpRISCV64MOVDconst {
 10203  			v_0 := b.Controls[0]
 10204  			if auxIntToInt64(v_0.AuxInt) != 0 {
 10205  				break
 10206  			}
 10207  			cond := b.Controls[1]
 10208  			b.resetWithControl(BlockRISCV64BNEZ, cond)
 10209  			return true
 10210  		}
 10211  		// match: (BNE cond (MOVDconst [0]) yes no)
 10212  		// result: (BNEZ cond yes no)
 10213  		for b.Controls[1].Op == OpRISCV64MOVDconst {
 10214  			cond := b.Controls[0]
 10215  			v_1 := b.Controls[1]
 10216  			if auxIntToInt64(v_1.AuxInt) != 0 {
 10217  				break
 10218  			}
 10219  			b.resetWithControl(BlockRISCV64BNEZ, cond)
 10220  			return true
 10221  		}
 10222  	case BlockRISCV64BNEZ:
 10223  		// match: (BNEZ (SEQZ x) yes no)
 10224  		// result: (BEQZ x yes no)
 10225  		for b.Controls[0].Op == OpRISCV64SEQZ {
 10226  			v_0 := b.Controls[0]
 10227  			x := v_0.Args[0]
 10228  			b.resetWithControl(BlockRISCV64BEQZ, x)
 10229  			return true
 10230  		}
 10231  		// match: (BNEZ (SNEZ x) yes no)
 10232  		// result: (BNEZ x yes no)
 10233  		for b.Controls[0].Op == OpRISCV64SNEZ {
 10234  			v_0 := b.Controls[0]
 10235  			x := v_0.Args[0]
 10236  			b.resetWithControl(BlockRISCV64BNEZ, x)
 10237  			return true
 10238  		}
 10239  		// match: (BNEZ (NEG x) yes no)
 10240  		// result: (BNEZ x yes no)
 10241  		for b.Controls[0].Op == OpRISCV64NEG {
 10242  			v_0 := b.Controls[0]
 10243  			x := v_0.Args[0]
 10244  			b.resetWithControl(BlockRISCV64BNEZ, x)
 10245  			return true
 10246  		}
 10247  		// match: (BNEZ (FNES <t> x y) yes no)
 10248  		// result: (BEQZ (FEQS <t> x y) yes no)
 10249  		for b.Controls[0].Op == OpRISCV64FNES {
 10250  			v_0 := b.Controls[0]
 10251  			t := v_0.Type
 10252  			_ = v_0.Args[1]
 10253  			v_0_0 := v_0.Args[0]
 10254  			v_0_1 := v_0.Args[1]
 10255  			for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 10256  				x := v_0_0
 10257  				y := v_0_1
 10258  				v0 := b.NewValue0(v_0.Pos, OpRISCV64FEQS, t)
 10259  				v0.AddArg2(x, y)
 10260  				b.resetWithControl(BlockRISCV64BEQZ, v0)
 10261  				return true
 10262  			}
 10263  		}
 10264  		// match: (BNEZ (FNED <t> x y) yes no)
 10265  		// result: (BEQZ (FEQD <t> x y) yes no)
 10266  		for b.Controls[0].Op == OpRISCV64FNED {
 10267  			v_0 := b.Controls[0]
 10268  			t := v_0.Type
 10269  			_ = v_0.Args[1]
 10270  			v_0_0 := v_0.Args[0]
 10271  			v_0_1 := v_0.Args[1]
 10272  			for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 10273  				x := v_0_0
 10274  				y := v_0_1
 10275  				v0 := b.NewValue0(v_0.Pos, OpRISCV64FEQD, t)
 10276  				v0.AddArg2(x, y)
 10277  				b.resetWithControl(BlockRISCV64BEQZ, v0)
 10278  				return true
 10279  			}
 10280  		}
 10281  		// match: (BNEZ (SUB x y) yes no)
 10282  		// result: (BNE x y yes no)
 10283  		for b.Controls[0].Op == OpRISCV64SUB {
 10284  			v_0 := b.Controls[0]
 10285  			y := v_0.Args[1]
 10286  			x := v_0.Args[0]
 10287  			b.resetWithControl2(BlockRISCV64BNE, x, y)
 10288  			return true
 10289  		}
 10290  		// match: (BNEZ (SLT x y) yes no)
 10291  		// result: (BLT x y yes no)
 10292  		for b.Controls[0].Op == OpRISCV64SLT {
 10293  			v_0 := b.Controls[0]
 10294  			y := v_0.Args[1]
 10295  			x := v_0.Args[0]
 10296  			b.resetWithControl2(BlockRISCV64BLT, x, y)
 10297  			return true
 10298  		}
 10299  		// match: (BNEZ (SLTU x y) yes no)
 10300  		// result: (BLTU x y yes no)
 10301  		for b.Controls[0].Op == OpRISCV64SLTU {
 10302  			v_0 := b.Controls[0]
 10303  			y := v_0.Args[1]
 10304  			x := v_0.Args[0]
 10305  			b.resetWithControl2(BlockRISCV64BLTU, x, y)
 10306  			return true
 10307  		}
 10308  		// match: (BNEZ (SLTI [x] y) yes no)
 10309  		// result: (BLT y (MOVDconst [x]) yes no)
 10310  		for b.Controls[0].Op == OpRISCV64SLTI {
 10311  			v_0 := b.Controls[0]
 10312  			x := auxIntToInt64(v_0.AuxInt)
 10313  			y := v_0.Args[0]
 10314  			v0 := b.NewValue0(b.Pos, OpRISCV64MOVDconst, typ.UInt64)
 10315  			v0.AuxInt = int64ToAuxInt(x)
 10316  			b.resetWithControl2(BlockRISCV64BLT, y, v0)
 10317  			return true
 10318  		}
 10319  		// match: (BNEZ (SLTIU [x] y) yes no)
 10320  		// result: (BLTU y (MOVDconst [x]) yes no)
 10321  		for b.Controls[0].Op == OpRISCV64SLTIU {
 10322  			v_0 := b.Controls[0]
 10323  			x := auxIntToInt64(v_0.AuxInt)
 10324  			y := v_0.Args[0]
 10325  			v0 := b.NewValue0(b.Pos, OpRISCV64MOVDconst, typ.UInt64)
 10326  			v0.AuxInt = int64ToAuxInt(x)
 10327  			b.resetWithControl2(BlockRISCV64BLTU, y, v0)
 10328  			return true
 10329  		}
 10330  		// match: (BNEZ (ANDI [c] (FCLASSD (FNEGD x))) yes no)
 10331  		// result: (BNEZ (ANDI <typ.Int64> [(c&0b11_0000_0000)|int64(bits.Reverse8(uint8(c))&0b1111_1111)] (FCLASSD x)) yes no)
 10332  		for b.Controls[0].Op == OpRISCV64ANDI {
 10333  			v_0 := b.Controls[0]
 10334  			c := auxIntToInt64(v_0.AuxInt)
 10335  			v_0_0 := v_0.Args[0]
 10336  			if v_0_0.Op != OpRISCV64FCLASSD {
 10337  				break
 10338  			}
 10339  			v_0_0_0 := v_0_0.Args[0]
 10340  			if v_0_0_0.Op != OpRISCV64FNEGD {
 10341  				break
 10342  			}
 10343  			x := v_0_0_0.Args[0]
 10344  			v0 := b.NewValue0(v_0.Pos, OpRISCV64ANDI, typ.Int64)
 10345  			v0.AuxInt = int64ToAuxInt((c & 0b11_0000_0000) | int64(bits.Reverse8(uint8(c))&0b1111_1111))
 10346  			v1 := b.NewValue0(v_0.Pos, OpRISCV64FCLASSD, typ.Int64)
 10347  			v1.AddArg(x)
 10348  			v0.AddArg(v1)
 10349  			b.resetWithControl(BlockRISCV64BNEZ, v0)
 10350  			return true
 10351  		}
 10352  		// match: (BNEZ (ANDI [c] (FCLASSD (FABSD x))) yes no)
 10353  		// result: (BNEZ (ANDI <typ.Int64> [(c&0b11_1111_0000)|int64(bits.Reverse8(uint8(c))&0b0000_1111)] (FCLASSD x)) yes no)
 10354  		for b.Controls[0].Op == OpRISCV64ANDI {
 10355  			v_0 := b.Controls[0]
 10356  			c := auxIntToInt64(v_0.AuxInt)
 10357  			v_0_0 := v_0.Args[0]
 10358  			if v_0_0.Op != OpRISCV64FCLASSD {
 10359  				break
 10360  			}
 10361  			v_0_0_0 := v_0_0.Args[0]
 10362  			if v_0_0_0.Op != OpRISCV64FABSD {
 10363  				break
 10364  			}
 10365  			x := v_0_0_0.Args[0]
 10366  			v0 := b.NewValue0(v_0.Pos, OpRISCV64ANDI, typ.Int64)
 10367  			v0.AuxInt = int64ToAuxInt((c & 0b11_1111_0000) | int64(bits.Reverse8(uint8(c))&0b0000_1111))
 10368  			v1 := b.NewValue0(v_0.Pos, OpRISCV64FCLASSD, typ.Int64)
 10369  			v1.AddArg(x)
 10370  			v0.AddArg(v1)
 10371  			b.resetWithControl(BlockRISCV64BNEZ, v0)
 10372  			return true
 10373  		}
 10374  	case BlockIf:
 10375  		// match: (If cond yes no)
 10376  		// result: (BNEZ (MOVBUreg <typ.UInt64> cond) yes no)
 10377  		for {
 10378  			cond := b.Controls[0]
 10379  			v0 := b.NewValue0(cond.Pos, OpRISCV64MOVBUreg, typ.UInt64)
 10380  			v0.AddArg(cond)
 10381  			b.resetWithControl(BlockRISCV64BNEZ, v0)
 10382  			return true
 10383  		}
 10384  	}
 10385  	return false
 10386  }
 10387  

View as plain text