Source file src/cmd/compile/internal/ssa/rewriteARM64latelower.go

     1  // Code generated from _gen/ARM64latelower.rules using 'go generate'; DO NOT EDIT.
     2  
     3  package ssa
     4  
     5  func rewriteValueARM64latelower(v *Value) bool {
     6  	switch v.Op {
     7  	case OpARM64ADDSconstflags:
     8  		return rewriteValueARM64latelower_OpARM64ADDSconstflags(v)
     9  	case OpARM64ADDconst:
    10  		return rewriteValueARM64latelower_OpARM64ADDconst(v)
    11  	case OpARM64ANDconst:
    12  		return rewriteValueARM64latelower_OpARM64ANDconst(v)
    13  	case OpARM64CMNWconst:
    14  		return rewriteValueARM64latelower_OpARM64CMNWconst(v)
    15  	case OpARM64CMNconst:
    16  		return rewriteValueARM64latelower_OpARM64CMNconst(v)
    17  	case OpARM64CMPWconst:
    18  		return rewriteValueARM64latelower_OpARM64CMPWconst(v)
    19  	case OpARM64CMPconst:
    20  		return rewriteValueARM64latelower_OpARM64CMPconst(v)
    21  	case OpARM64MOVBUreg:
    22  		return rewriteValueARM64latelower_OpARM64MOVBUreg(v)
    23  	case OpARM64MOVBreg:
    24  		return rewriteValueARM64latelower_OpARM64MOVBreg(v)
    25  	case OpARM64MOVDconst:
    26  		return rewriteValueARM64latelower_OpARM64MOVDconst(v)
    27  	case OpARM64MOVDnop:
    28  		return rewriteValueARM64latelower_OpARM64MOVDnop(v)
    29  	case OpARM64MOVDreg:
    30  		return rewriteValueARM64latelower_OpARM64MOVDreg(v)
    31  	case OpARM64MOVHUreg:
    32  		return rewriteValueARM64latelower_OpARM64MOVHUreg(v)
    33  	case OpARM64MOVHreg:
    34  		return rewriteValueARM64latelower_OpARM64MOVHreg(v)
    35  	case OpARM64MOVWUreg:
    36  		return rewriteValueARM64latelower_OpARM64MOVWUreg(v)
    37  	case OpARM64MOVWreg:
    38  		return rewriteValueARM64latelower_OpARM64MOVWreg(v)
    39  	case OpARM64ORconst:
    40  		return rewriteValueARM64latelower_OpARM64ORconst(v)
    41  	case OpARM64SUBconst:
    42  		return rewriteValueARM64latelower_OpARM64SUBconst(v)
    43  	case OpARM64TSTWconst:
    44  		return rewriteValueARM64latelower_OpARM64TSTWconst(v)
    45  	case OpARM64TSTconst:
    46  		return rewriteValueARM64latelower_OpARM64TSTconst(v)
    47  	case OpARM64XORconst:
    48  		return rewriteValueARM64latelower_OpARM64XORconst(v)
    49  	}
    50  	return false
    51  }
    52  func rewriteValueARM64latelower_OpARM64ADDSconstflags(v *Value) bool {
    53  	v_0 := v.Args[0]
    54  	b := v.Block
    55  	typ := &b.Func.Config.Types
    56  	// match: (ADDSconstflags [c] x)
    57  	// cond: !isARM64addcon(c)
    58  	// result: (ADDSflags x (MOVDconst [c]))
    59  	for {
    60  		c := auxIntToInt64(v.AuxInt)
    61  		x := v_0
    62  		if !(!isARM64addcon(c)) {
    63  			break
    64  		}
    65  		v.reset(OpARM64ADDSflags)
    66  		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
    67  		v0.AuxInt = int64ToAuxInt(c)
    68  		v.AddArg2(x, v0)
    69  		return true
    70  	}
    71  	return false
    72  }
    73  func rewriteValueARM64latelower_OpARM64ADDconst(v *Value) bool {
    74  	v_0 := v.Args[0]
    75  	b := v.Block
    76  	typ := &b.Func.Config.Types
    77  	// match: (ADDconst [c] x)
    78  	// cond: !isARM64addcon(c)
    79  	// result: (ADD x (MOVDconst [c]))
    80  	for {
    81  		c := auxIntToInt64(v.AuxInt)
    82  		x := v_0
    83  		if !(!isARM64addcon(c)) {
    84  			break
    85  		}
    86  		v.reset(OpARM64ADD)
    87  		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
    88  		v0.AuxInt = int64ToAuxInt(c)
    89  		v.AddArg2(x, v0)
    90  		return true
    91  	}
    92  	return false
    93  }
    94  func rewriteValueARM64latelower_OpARM64ANDconst(v *Value) bool {
    95  	v_0 := v.Args[0]
    96  	b := v.Block
    97  	typ := &b.Func.Config.Types
    98  	// match: (ANDconst [c] x)
    99  	// cond: !isARM64bitcon(uint64(c))
   100  	// result: (AND x (MOVDconst [c]))
   101  	for {
   102  		c := auxIntToInt64(v.AuxInt)
   103  		x := v_0
   104  		if !(!isARM64bitcon(uint64(c))) {
   105  			break
   106  		}
   107  		v.reset(OpARM64AND)
   108  		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
   109  		v0.AuxInt = int64ToAuxInt(c)
   110  		v.AddArg2(x, v0)
   111  		return true
   112  	}
   113  	return false
   114  }
   115  func rewriteValueARM64latelower_OpARM64CMNWconst(v *Value) bool {
   116  	v_0 := v.Args[0]
   117  	b := v.Block
   118  	typ := &b.Func.Config.Types
   119  	// match: (CMNWconst [c] x)
   120  	// cond: !isARM64addcon(int64(c))
   121  	// result: (CMNW x (MOVDconst [int64(c)]))
   122  	for {
   123  		c := auxIntToInt32(v.AuxInt)
   124  		x := v_0
   125  		if !(!isARM64addcon(int64(c))) {
   126  			break
   127  		}
   128  		v.reset(OpARM64CMNW)
   129  		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
   130  		v0.AuxInt = int64ToAuxInt(int64(c))
   131  		v.AddArg2(x, v0)
   132  		return true
   133  	}
   134  	return false
   135  }
   136  func rewriteValueARM64latelower_OpARM64CMNconst(v *Value) bool {
   137  	v_0 := v.Args[0]
   138  	b := v.Block
   139  	typ := &b.Func.Config.Types
   140  	// match: (CMNconst [c] x)
   141  	// cond: !isARM64addcon(c)
   142  	// result: (CMN x (MOVDconst [c]))
   143  	for {
   144  		c := auxIntToInt64(v.AuxInt)
   145  		x := v_0
   146  		if !(!isARM64addcon(c)) {
   147  			break
   148  		}
   149  		v.reset(OpARM64CMN)
   150  		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
   151  		v0.AuxInt = int64ToAuxInt(c)
   152  		v.AddArg2(x, v0)
   153  		return true
   154  	}
   155  	return false
   156  }
   157  func rewriteValueARM64latelower_OpARM64CMPWconst(v *Value) bool {
   158  	v_0 := v.Args[0]
   159  	b := v.Block
   160  	typ := &b.Func.Config.Types
   161  	// match: (CMPWconst [c] x)
   162  	// cond: !isARM64addcon(int64(c))
   163  	// result: (CMPW x (MOVDconst [int64(c)]))
   164  	for {
   165  		c := auxIntToInt32(v.AuxInt)
   166  		x := v_0
   167  		if !(!isARM64addcon(int64(c))) {
   168  			break
   169  		}
   170  		v.reset(OpARM64CMPW)
   171  		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
   172  		v0.AuxInt = int64ToAuxInt(int64(c))
   173  		v.AddArg2(x, v0)
   174  		return true
   175  	}
   176  	return false
   177  }
   178  func rewriteValueARM64latelower_OpARM64CMPconst(v *Value) bool {
   179  	v_0 := v.Args[0]
   180  	b := v.Block
   181  	typ := &b.Func.Config.Types
   182  	// match: (CMPconst [c] x)
   183  	// cond: !isARM64addcon(c)
   184  	// result: (CMP x (MOVDconst [c]))
   185  	for {
   186  		c := auxIntToInt64(v.AuxInt)
   187  		x := v_0
   188  		if !(!isARM64addcon(c)) {
   189  			break
   190  		}
   191  		v.reset(OpARM64CMP)
   192  		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
   193  		v0.AuxInt = int64ToAuxInt(c)
   194  		v.AddArg2(x, v0)
   195  		return true
   196  	}
   197  	return false
   198  }
   199  func rewriteValueARM64latelower_OpARM64MOVBUreg(v *Value) bool {
   200  	v_0 := v.Args[0]
   201  	// match: (MOVBUreg x:(Equal _))
   202  	// result: x
   203  	for {
   204  		x := v_0
   205  		if x.Op != OpARM64Equal {
   206  			break
   207  		}
   208  		v.copyOf(x)
   209  		return true
   210  	}
   211  	// match: (MOVBUreg x:(NotEqual _))
   212  	// result: x
   213  	for {
   214  		x := v_0
   215  		if x.Op != OpARM64NotEqual {
   216  			break
   217  		}
   218  		v.copyOf(x)
   219  		return true
   220  	}
   221  	// match: (MOVBUreg x:(LessThan _))
   222  	// result: x
   223  	for {
   224  		x := v_0
   225  		if x.Op != OpARM64LessThan {
   226  			break
   227  		}
   228  		v.copyOf(x)
   229  		return true
   230  	}
   231  	// match: (MOVBUreg x:(LessThanU _))
   232  	// result: x
   233  	for {
   234  		x := v_0
   235  		if x.Op != OpARM64LessThanU {
   236  			break
   237  		}
   238  		v.copyOf(x)
   239  		return true
   240  	}
   241  	// match: (MOVBUreg x:(LessThanF _))
   242  	// result: x
   243  	for {
   244  		x := v_0
   245  		if x.Op != OpARM64LessThanF {
   246  			break
   247  		}
   248  		v.copyOf(x)
   249  		return true
   250  	}
   251  	// match: (MOVBUreg x:(LessEqual _))
   252  	// result: x
   253  	for {
   254  		x := v_0
   255  		if x.Op != OpARM64LessEqual {
   256  			break
   257  		}
   258  		v.copyOf(x)
   259  		return true
   260  	}
   261  	// match: (MOVBUreg x:(LessEqualU _))
   262  	// result: x
   263  	for {
   264  		x := v_0
   265  		if x.Op != OpARM64LessEqualU {
   266  			break
   267  		}
   268  		v.copyOf(x)
   269  		return true
   270  	}
   271  	// match: (MOVBUreg x:(LessEqualF _))
   272  	// result: x
   273  	for {
   274  		x := v_0
   275  		if x.Op != OpARM64LessEqualF {
   276  			break
   277  		}
   278  		v.copyOf(x)
   279  		return true
   280  	}
   281  	// match: (MOVBUreg x:(GreaterThan _))
   282  	// result: x
   283  	for {
   284  		x := v_0
   285  		if x.Op != OpARM64GreaterThan {
   286  			break
   287  		}
   288  		v.copyOf(x)
   289  		return true
   290  	}
   291  	// match: (MOVBUreg x:(GreaterThanU _))
   292  	// result: x
   293  	for {
   294  		x := v_0
   295  		if x.Op != OpARM64GreaterThanU {
   296  			break
   297  		}
   298  		v.copyOf(x)
   299  		return true
   300  	}
   301  	// match: (MOVBUreg x:(GreaterThanF _))
   302  	// result: x
   303  	for {
   304  		x := v_0
   305  		if x.Op != OpARM64GreaterThanF {
   306  			break
   307  		}
   308  		v.copyOf(x)
   309  		return true
   310  	}
   311  	// match: (MOVBUreg x:(GreaterEqual _))
   312  	// result: x
   313  	for {
   314  		x := v_0
   315  		if x.Op != OpARM64GreaterEqual {
   316  			break
   317  		}
   318  		v.copyOf(x)
   319  		return true
   320  	}
   321  	// match: (MOVBUreg x:(GreaterEqualU _))
   322  	// result: x
   323  	for {
   324  		x := v_0
   325  		if x.Op != OpARM64GreaterEqualU {
   326  			break
   327  		}
   328  		v.copyOf(x)
   329  		return true
   330  	}
   331  	// match: (MOVBUreg x:(GreaterEqualF _))
   332  	// result: x
   333  	for {
   334  		x := v_0
   335  		if x.Op != OpARM64GreaterEqualF {
   336  			break
   337  		}
   338  		v.copyOf(x)
   339  		return true
   340  	}
   341  	// match: (MOVBUreg x:(MOVBUload _ _))
   342  	// result: (MOVDreg x)
   343  	for {
   344  		x := v_0
   345  		if x.Op != OpARM64MOVBUload {
   346  			break
   347  		}
   348  		v.reset(OpARM64MOVDreg)
   349  		v.AddArg(x)
   350  		return true
   351  	}
   352  	// match: (MOVBUreg x:(MOVBUloadidx _ _ _))
   353  	// result: (MOVDreg x)
   354  	for {
   355  		x := v_0
   356  		if x.Op != OpARM64MOVBUloadidx {
   357  			break
   358  		}
   359  		v.reset(OpARM64MOVDreg)
   360  		v.AddArg(x)
   361  		return true
   362  	}
   363  	// match: (MOVBUreg x:(MOVBUreg _))
   364  	// result: (MOVDreg x)
   365  	for {
   366  		x := v_0
   367  		if x.Op != OpARM64MOVBUreg {
   368  			break
   369  		}
   370  		v.reset(OpARM64MOVDreg)
   371  		v.AddArg(x)
   372  		return true
   373  	}
   374  	return false
   375  }
   376  func rewriteValueARM64latelower_OpARM64MOVBreg(v *Value) bool {
   377  	v_0 := v.Args[0]
   378  	// match: (MOVBreg x:(MOVBload _ _))
   379  	// result: (MOVDreg x)
   380  	for {
   381  		x := v_0
   382  		if x.Op != OpARM64MOVBload {
   383  			break
   384  		}
   385  		v.reset(OpARM64MOVDreg)
   386  		v.AddArg(x)
   387  		return true
   388  	}
   389  	// match: (MOVBreg x:(MOVBloadidx _ _ _))
   390  	// result: (MOVDreg x)
   391  	for {
   392  		x := v_0
   393  		if x.Op != OpARM64MOVBloadidx {
   394  			break
   395  		}
   396  		v.reset(OpARM64MOVDreg)
   397  		v.AddArg(x)
   398  		return true
   399  	}
   400  	// match: (MOVBreg x:(MOVBreg _))
   401  	// result: (MOVDreg x)
   402  	for {
   403  		x := v_0
   404  		if x.Op != OpARM64MOVBreg {
   405  			break
   406  		}
   407  		v.reset(OpARM64MOVDreg)
   408  		v.AddArg(x)
   409  		return true
   410  	}
   411  	return false
   412  }
   413  func rewriteValueARM64latelower_OpARM64MOVDconst(v *Value) bool {
   414  	// match: (MOVDconst [0])
   415  	// result: (ZERO)
   416  	for {
   417  		if auxIntToInt64(v.AuxInt) != 0 {
   418  			break
   419  		}
   420  		v.reset(OpARM64ZERO)
   421  		return true
   422  	}
   423  	return false
   424  }
   425  func rewriteValueARM64latelower_OpARM64MOVDnop(v *Value) bool {
   426  	v_0 := v.Args[0]
   427  	// match: (MOVDnop (MOVDconst [c]))
   428  	// result: (MOVDconst [c])
   429  	for {
   430  		if v_0.Op != OpARM64MOVDconst {
   431  			break
   432  		}
   433  		c := auxIntToInt64(v_0.AuxInt)
   434  		v.reset(OpARM64MOVDconst)
   435  		v.AuxInt = int64ToAuxInt(c)
   436  		return true
   437  	}
   438  	return false
   439  }
   440  func rewriteValueARM64latelower_OpARM64MOVDreg(v *Value) bool {
   441  	v_0 := v.Args[0]
   442  	// match: (MOVDreg x)
   443  	// cond: x.Uses == 1
   444  	// result: (MOVDnop x)
   445  	for {
   446  		x := v_0
   447  		if !(x.Uses == 1) {
   448  			break
   449  		}
   450  		v.reset(OpARM64MOVDnop)
   451  		v.AddArg(x)
   452  		return true
   453  	}
   454  	return false
   455  }
   456  func rewriteValueARM64latelower_OpARM64MOVHUreg(v *Value) bool {
   457  	v_0 := v.Args[0]
   458  	// match: (MOVHUreg x:(MOVBUload _ _))
   459  	// result: (MOVDreg x)
   460  	for {
   461  		x := v_0
   462  		if x.Op != OpARM64MOVBUload {
   463  			break
   464  		}
   465  		v.reset(OpARM64MOVDreg)
   466  		v.AddArg(x)
   467  		return true
   468  	}
   469  	// match: (MOVHUreg x:(MOVHUload _ _))
   470  	// result: (MOVDreg x)
   471  	for {
   472  		x := v_0
   473  		if x.Op != OpARM64MOVHUload {
   474  			break
   475  		}
   476  		v.reset(OpARM64MOVDreg)
   477  		v.AddArg(x)
   478  		return true
   479  	}
   480  	// match: (MOVHUreg x:(MOVBUloadidx _ _ _))
   481  	// result: (MOVDreg x)
   482  	for {
   483  		x := v_0
   484  		if x.Op != OpARM64MOVBUloadidx {
   485  			break
   486  		}
   487  		v.reset(OpARM64MOVDreg)
   488  		v.AddArg(x)
   489  		return true
   490  	}
   491  	// match: (MOVHUreg x:(MOVHUloadidx _ _ _))
   492  	// result: (MOVDreg x)
   493  	for {
   494  		x := v_0
   495  		if x.Op != OpARM64MOVHUloadidx {
   496  			break
   497  		}
   498  		v.reset(OpARM64MOVDreg)
   499  		v.AddArg(x)
   500  		return true
   501  	}
   502  	// match: (MOVHUreg x:(MOVHUloadidx2 _ _ _))
   503  	// result: (MOVDreg x)
   504  	for {
   505  		x := v_0
   506  		if x.Op != OpARM64MOVHUloadidx2 {
   507  			break
   508  		}
   509  		v.reset(OpARM64MOVDreg)
   510  		v.AddArg(x)
   511  		return true
   512  	}
   513  	// match: (MOVHUreg x:(MOVBUreg _))
   514  	// result: (MOVDreg x)
   515  	for {
   516  		x := v_0
   517  		if x.Op != OpARM64MOVBUreg {
   518  			break
   519  		}
   520  		v.reset(OpARM64MOVDreg)
   521  		v.AddArg(x)
   522  		return true
   523  	}
   524  	// match: (MOVHUreg x:(MOVHUreg _))
   525  	// result: (MOVDreg x)
   526  	for {
   527  		x := v_0
   528  		if x.Op != OpARM64MOVHUreg {
   529  			break
   530  		}
   531  		v.reset(OpARM64MOVDreg)
   532  		v.AddArg(x)
   533  		return true
   534  	}
   535  	return false
   536  }
   537  func rewriteValueARM64latelower_OpARM64MOVHreg(v *Value) bool {
   538  	v_0 := v.Args[0]
   539  	// match: (MOVHreg x:(MOVBload _ _))
   540  	// result: (MOVDreg x)
   541  	for {
   542  		x := v_0
   543  		if x.Op != OpARM64MOVBload {
   544  			break
   545  		}
   546  		v.reset(OpARM64MOVDreg)
   547  		v.AddArg(x)
   548  		return true
   549  	}
   550  	// match: (MOVHreg x:(MOVBUload _ _))
   551  	// result: (MOVDreg x)
   552  	for {
   553  		x := v_0
   554  		if x.Op != OpARM64MOVBUload {
   555  			break
   556  		}
   557  		v.reset(OpARM64MOVDreg)
   558  		v.AddArg(x)
   559  		return true
   560  	}
   561  	// match: (MOVHreg x:(MOVHload _ _))
   562  	// result: (MOVDreg x)
   563  	for {
   564  		x := v_0
   565  		if x.Op != OpARM64MOVHload {
   566  			break
   567  		}
   568  		v.reset(OpARM64MOVDreg)
   569  		v.AddArg(x)
   570  		return true
   571  	}
   572  	// match: (MOVHreg x:(MOVBloadidx _ _ _))
   573  	// result: (MOVDreg x)
   574  	for {
   575  		x := v_0
   576  		if x.Op != OpARM64MOVBloadidx {
   577  			break
   578  		}
   579  		v.reset(OpARM64MOVDreg)
   580  		v.AddArg(x)
   581  		return true
   582  	}
   583  	// match: (MOVHreg x:(MOVBUloadidx _ _ _))
   584  	// result: (MOVDreg x)
   585  	for {
   586  		x := v_0
   587  		if x.Op != OpARM64MOVBUloadidx {
   588  			break
   589  		}
   590  		v.reset(OpARM64MOVDreg)
   591  		v.AddArg(x)
   592  		return true
   593  	}
   594  	// match: (MOVHreg x:(MOVHloadidx _ _ _))
   595  	// result: (MOVDreg x)
   596  	for {
   597  		x := v_0
   598  		if x.Op != OpARM64MOVHloadidx {
   599  			break
   600  		}
   601  		v.reset(OpARM64MOVDreg)
   602  		v.AddArg(x)
   603  		return true
   604  	}
   605  	// match: (MOVHreg x:(MOVHloadidx2 _ _ _))
   606  	// result: (MOVDreg x)
   607  	for {
   608  		x := v_0
   609  		if x.Op != OpARM64MOVHloadidx2 {
   610  			break
   611  		}
   612  		v.reset(OpARM64MOVDreg)
   613  		v.AddArg(x)
   614  		return true
   615  	}
   616  	// match: (MOVHreg x:(MOVBreg _))
   617  	// result: (MOVDreg x)
   618  	for {
   619  		x := v_0
   620  		if x.Op != OpARM64MOVBreg {
   621  			break
   622  		}
   623  		v.reset(OpARM64MOVDreg)
   624  		v.AddArg(x)
   625  		return true
   626  	}
   627  	// match: (MOVHreg x:(MOVBUreg _))
   628  	// result: (MOVDreg x)
   629  	for {
   630  		x := v_0
   631  		if x.Op != OpARM64MOVBUreg {
   632  			break
   633  		}
   634  		v.reset(OpARM64MOVDreg)
   635  		v.AddArg(x)
   636  		return true
   637  	}
   638  	// match: (MOVHreg x:(MOVHreg _))
   639  	// result: (MOVDreg x)
   640  	for {
   641  		x := v_0
   642  		if x.Op != OpARM64MOVHreg {
   643  			break
   644  		}
   645  		v.reset(OpARM64MOVDreg)
   646  		v.AddArg(x)
   647  		return true
   648  	}
   649  	return false
   650  }
   651  func rewriteValueARM64latelower_OpARM64MOVWUreg(v *Value) bool {
   652  	v_0 := v.Args[0]
   653  	// match: (MOVWUreg x)
   654  	// cond: zeroUpper32Bits(x, 3)
   655  	// result: x
   656  	for {
   657  		x := v_0
   658  		if !(zeroUpper32Bits(x, 3)) {
   659  			break
   660  		}
   661  		v.copyOf(x)
   662  		return true
   663  	}
   664  	// match: (MOVWUreg x:(MOVBUload _ _))
   665  	// result: (MOVDreg x)
   666  	for {
   667  		x := v_0
   668  		if x.Op != OpARM64MOVBUload {
   669  			break
   670  		}
   671  		v.reset(OpARM64MOVDreg)
   672  		v.AddArg(x)
   673  		return true
   674  	}
   675  	// match: (MOVWUreg x:(MOVHUload _ _))
   676  	// result: (MOVDreg x)
   677  	for {
   678  		x := v_0
   679  		if x.Op != OpARM64MOVHUload {
   680  			break
   681  		}
   682  		v.reset(OpARM64MOVDreg)
   683  		v.AddArg(x)
   684  		return true
   685  	}
   686  	// match: (MOVWUreg x:(MOVWUload _ _))
   687  	// result: (MOVDreg x)
   688  	for {
   689  		x := v_0
   690  		if x.Op != OpARM64MOVWUload {
   691  			break
   692  		}
   693  		v.reset(OpARM64MOVDreg)
   694  		v.AddArg(x)
   695  		return true
   696  	}
   697  	// match: (MOVWUreg x:(MOVBUloadidx _ _ _))
   698  	// result: (MOVDreg x)
   699  	for {
   700  		x := v_0
   701  		if x.Op != OpARM64MOVBUloadidx {
   702  			break
   703  		}
   704  		v.reset(OpARM64MOVDreg)
   705  		v.AddArg(x)
   706  		return true
   707  	}
   708  	// match: (MOVWUreg x:(MOVHUloadidx _ _ _))
   709  	// result: (MOVDreg x)
   710  	for {
   711  		x := v_0
   712  		if x.Op != OpARM64MOVHUloadidx {
   713  			break
   714  		}
   715  		v.reset(OpARM64MOVDreg)
   716  		v.AddArg(x)
   717  		return true
   718  	}
   719  	// match: (MOVWUreg x:(MOVWUloadidx _ _ _))
   720  	// result: (MOVDreg x)
   721  	for {
   722  		x := v_0
   723  		if x.Op != OpARM64MOVWUloadidx {
   724  			break
   725  		}
   726  		v.reset(OpARM64MOVDreg)
   727  		v.AddArg(x)
   728  		return true
   729  	}
   730  	// match: (MOVWUreg x:(MOVHUloadidx2 _ _ _))
   731  	// result: (MOVDreg x)
   732  	for {
   733  		x := v_0
   734  		if x.Op != OpARM64MOVHUloadidx2 {
   735  			break
   736  		}
   737  		v.reset(OpARM64MOVDreg)
   738  		v.AddArg(x)
   739  		return true
   740  	}
   741  	// match: (MOVWUreg x:(MOVWUloadidx4 _ _ _))
   742  	// result: (MOVDreg x)
   743  	for {
   744  		x := v_0
   745  		if x.Op != OpARM64MOVWUloadidx4 {
   746  			break
   747  		}
   748  		v.reset(OpARM64MOVDreg)
   749  		v.AddArg(x)
   750  		return true
   751  	}
   752  	// match: (MOVWUreg x:(MOVBUreg _))
   753  	// result: (MOVDreg x)
   754  	for {
   755  		x := v_0
   756  		if x.Op != OpARM64MOVBUreg {
   757  			break
   758  		}
   759  		v.reset(OpARM64MOVDreg)
   760  		v.AddArg(x)
   761  		return true
   762  	}
   763  	// match: (MOVWUreg x:(MOVHUreg _))
   764  	// result: (MOVDreg x)
   765  	for {
   766  		x := v_0
   767  		if x.Op != OpARM64MOVHUreg {
   768  			break
   769  		}
   770  		v.reset(OpARM64MOVDreg)
   771  		v.AddArg(x)
   772  		return true
   773  	}
   774  	// match: (MOVWUreg x:(MOVWUreg _))
   775  	// result: (MOVDreg x)
   776  	for {
   777  		x := v_0
   778  		if x.Op != OpARM64MOVWUreg {
   779  			break
   780  		}
   781  		v.reset(OpARM64MOVDreg)
   782  		v.AddArg(x)
   783  		return true
   784  	}
   785  	return false
   786  }
   787  func rewriteValueARM64latelower_OpARM64MOVWreg(v *Value) bool {
   788  	v_0 := v.Args[0]
   789  	// match: (MOVWreg x:(MOVBload _ _))
   790  	// result: (MOVDreg x)
   791  	for {
   792  		x := v_0
   793  		if x.Op != OpARM64MOVBload {
   794  			break
   795  		}
   796  		v.reset(OpARM64MOVDreg)
   797  		v.AddArg(x)
   798  		return true
   799  	}
   800  	// match: (MOVWreg x:(MOVBUload _ _))
   801  	// result: (MOVDreg x)
   802  	for {
   803  		x := v_0
   804  		if x.Op != OpARM64MOVBUload {
   805  			break
   806  		}
   807  		v.reset(OpARM64MOVDreg)
   808  		v.AddArg(x)
   809  		return true
   810  	}
   811  	// match: (MOVWreg x:(MOVHload _ _))
   812  	// result: (MOVDreg x)
   813  	for {
   814  		x := v_0
   815  		if x.Op != OpARM64MOVHload {
   816  			break
   817  		}
   818  		v.reset(OpARM64MOVDreg)
   819  		v.AddArg(x)
   820  		return true
   821  	}
   822  	// match: (MOVWreg x:(MOVHUload _ _))
   823  	// result: (MOVDreg x)
   824  	for {
   825  		x := v_0
   826  		if x.Op != OpARM64MOVHUload {
   827  			break
   828  		}
   829  		v.reset(OpARM64MOVDreg)
   830  		v.AddArg(x)
   831  		return true
   832  	}
   833  	// match: (MOVWreg x:(MOVWload _ _))
   834  	// result: (MOVDreg x)
   835  	for {
   836  		x := v_0
   837  		if x.Op != OpARM64MOVWload {
   838  			break
   839  		}
   840  		v.reset(OpARM64MOVDreg)
   841  		v.AddArg(x)
   842  		return true
   843  	}
   844  	// match: (MOVWreg x:(MOVBloadidx _ _ _))
   845  	// result: (MOVDreg x)
   846  	for {
   847  		x := v_0
   848  		if x.Op != OpARM64MOVBloadidx {
   849  			break
   850  		}
   851  		v.reset(OpARM64MOVDreg)
   852  		v.AddArg(x)
   853  		return true
   854  	}
   855  	// match: (MOVWreg x:(MOVBUloadidx _ _ _))
   856  	// result: (MOVDreg x)
   857  	for {
   858  		x := v_0
   859  		if x.Op != OpARM64MOVBUloadidx {
   860  			break
   861  		}
   862  		v.reset(OpARM64MOVDreg)
   863  		v.AddArg(x)
   864  		return true
   865  	}
   866  	// match: (MOVWreg x:(MOVHloadidx _ _ _))
   867  	// result: (MOVDreg x)
   868  	for {
   869  		x := v_0
   870  		if x.Op != OpARM64MOVHloadidx {
   871  			break
   872  		}
   873  		v.reset(OpARM64MOVDreg)
   874  		v.AddArg(x)
   875  		return true
   876  	}
   877  	// match: (MOVWreg x:(MOVHUloadidx _ _ _))
   878  	// result: (MOVDreg x)
   879  	for {
   880  		x := v_0
   881  		if x.Op != OpARM64MOVHUloadidx {
   882  			break
   883  		}
   884  		v.reset(OpARM64MOVDreg)
   885  		v.AddArg(x)
   886  		return true
   887  	}
   888  	// match: (MOVWreg x:(MOVWloadidx _ _ _))
   889  	// result: (MOVDreg x)
   890  	for {
   891  		x := v_0
   892  		if x.Op != OpARM64MOVWloadidx {
   893  			break
   894  		}
   895  		v.reset(OpARM64MOVDreg)
   896  		v.AddArg(x)
   897  		return true
   898  	}
   899  	// match: (MOVWreg x:(MOVHloadidx2 _ _ _))
   900  	// result: (MOVDreg x)
   901  	for {
   902  		x := v_0
   903  		if x.Op != OpARM64MOVHloadidx2 {
   904  			break
   905  		}
   906  		v.reset(OpARM64MOVDreg)
   907  		v.AddArg(x)
   908  		return true
   909  	}
   910  	// match: (MOVWreg x:(MOVHUloadidx2 _ _ _))
   911  	// result: (MOVDreg x)
   912  	for {
   913  		x := v_0
   914  		if x.Op != OpARM64MOVHUloadidx2 {
   915  			break
   916  		}
   917  		v.reset(OpARM64MOVDreg)
   918  		v.AddArg(x)
   919  		return true
   920  	}
   921  	// match: (MOVWreg x:(MOVWloadidx4 _ _ _))
   922  	// result: (MOVDreg x)
   923  	for {
   924  		x := v_0
   925  		if x.Op != OpARM64MOVWloadidx4 {
   926  			break
   927  		}
   928  		v.reset(OpARM64MOVDreg)
   929  		v.AddArg(x)
   930  		return true
   931  	}
   932  	// match: (MOVWreg x:(MOVBreg _))
   933  	// result: (MOVDreg x)
   934  	for {
   935  		x := v_0
   936  		if x.Op != OpARM64MOVBreg {
   937  			break
   938  		}
   939  		v.reset(OpARM64MOVDreg)
   940  		v.AddArg(x)
   941  		return true
   942  	}
   943  	// match: (MOVWreg x:(MOVBUreg _))
   944  	// result: (MOVDreg x)
   945  	for {
   946  		x := v_0
   947  		if x.Op != OpARM64MOVBUreg {
   948  			break
   949  		}
   950  		v.reset(OpARM64MOVDreg)
   951  		v.AddArg(x)
   952  		return true
   953  	}
   954  	// match: (MOVWreg x:(MOVHreg _))
   955  	// result: (MOVDreg x)
   956  	for {
   957  		x := v_0
   958  		if x.Op != OpARM64MOVHreg {
   959  			break
   960  		}
   961  		v.reset(OpARM64MOVDreg)
   962  		v.AddArg(x)
   963  		return true
   964  	}
   965  	// match: (MOVWreg x:(MOVWreg _))
   966  	// result: (MOVDreg x)
   967  	for {
   968  		x := v_0
   969  		if x.Op != OpARM64MOVWreg {
   970  			break
   971  		}
   972  		v.reset(OpARM64MOVDreg)
   973  		v.AddArg(x)
   974  		return true
   975  	}
   976  	return false
   977  }
   978  func rewriteValueARM64latelower_OpARM64ORconst(v *Value) bool {
   979  	v_0 := v.Args[0]
   980  	b := v.Block
   981  	typ := &b.Func.Config.Types
   982  	// match: (ORconst [c] x)
   983  	// cond: !isARM64bitcon(uint64(c))
   984  	// result: (OR x (MOVDconst [c]))
   985  	for {
   986  		c := auxIntToInt64(v.AuxInt)
   987  		x := v_0
   988  		if !(!isARM64bitcon(uint64(c))) {
   989  			break
   990  		}
   991  		v.reset(OpARM64OR)
   992  		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
   993  		v0.AuxInt = int64ToAuxInt(c)
   994  		v.AddArg2(x, v0)
   995  		return true
   996  	}
   997  	return false
   998  }
   999  func rewriteValueARM64latelower_OpARM64SUBconst(v *Value) bool {
  1000  	v_0 := v.Args[0]
  1001  	b := v.Block
  1002  	typ := &b.Func.Config.Types
  1003  	// match: (SUBconst [c] x)
  1004  	// cond: !isARM64addcon(c)
  1005  	// result: (SUB x (MOVDconst [c]))
  1006  	for {
  1007  		c := auxIntToInt64(v.AuxInt)
  1008  		x := v_0
  1009  		if !(!isARM64addcon(c)) {
  1010  			break
  1011  		}
  1012  		v.reset(OpARM64SUB)
  1013  		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
  1014  		v0.AuxInt = int64ToAuxInt(c)
  1015  		v.AddArg2(x, v0)
  1016  		return true
  1017  	}
  1018  	return false
  1019  }
  1020  func rewriteValueARM64latelower_OpARM64TSTWconst(v *Value) bool {
  1021  	v_0 := v.Args[0]
  1022  	b := v.Block
  1023  	typ := &b.Func.Config.Types
  1024  	// match: (TSTWconst [c] x)
  1025  	// cond: !isARM64bitcon(uint64(c)|uint64(c)<<32)
  1026  	// result: (TSTW x (MOVDconst [int64(c)]))
  1027  	for {
  1028  		c := auxIntToInt32(v.AuxInt)
  1029  		x := v_0
  1030  		if !(!isARM64bitcon(uint64(c) | uint64(c)<<32)) {
  1031  			break
  1032  		}
  1033  		v.reset(OpARM64TSTW)
  1034  		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
  1035  		v0.AuxInt = int64ToAuxInt(int64(c))
  1036  		v.AddArg2(x, v0)
  1037  		return true
  1038  	}
  1039  	return false
  1040  }
  1041  func rewriteValueARM64latelower_OpARM64TSTconst(v *Value) bool {
  1042  	v_0 := v.Args[0]
  1043  	b := v.Block
  1044  	typ := &b.Func.Config.Types
  1045  	// match: (TSTconst [c] x)
  1046  	// cond: !isARM64bitcon(uint64(c))
  1047  	// result: (TST x (MOVDconst [c]))
  1048  	for {
  1049  		c := auxIntToInt64(v.AuxInt)
  1050  		x := v_0
  1051  		if !(!isARM64bitcon(uint64(c))) {
  1052  			break
  1053  		}
  1054  		v.reset(OpARM64TST)
  1055  		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
  1056  		v0.AuxInt = int64ToAuxInt(c)
  1057  		v.AddArg2(x, v0)
  1058  		return true
  1059  	}
  1060  	return false
  1061  }
  1062  func rewriteValueARM64latelower_OpARM64XORconst(v *Value) bool {
  1063  	v_0 := v.Args[0]
  1064  	b := v.Block
  1065  	typ := &b.Func.Config.Types
  1066  	// match: (XORconst [c] x)
  1067  	// cond: !isARM64bitcon(uint64(c))
  1068  	// result: (XOR x (MOVDconst [c]))
  1069  	for {
  1070  		c := auxIntToInt64(v.AuxInt)
  1071  		x := v_0
  1072  		if !(!isARM64bitcon(uint64(c))) {
  1073  			break
  1074  		}
  1075  		v.reset(OpARM64XOR)
  1076  		v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
  1077  		v0.AuxInt = int64ToAuxInt(c)
  1078  		v.AddArg2(x, v0)
  1079  		return true
  1080  	}
  1081  	return false
  1082  }
  1083  func rewriteBlockARM64latelower(b *Block) bool {
  1084  	return false
  1085  }
  1086  

View as plain text