Source file src/cmd/compile/internal/loong64/ssa.go

     1  // Copyright 2022 The Go Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  package loong64
     6  
     7  import (
     8  	"math"
     9  
    10  	"cmd/compile/internal/base"
    11  	"cmd/compile/internal/ir"
    12  	"cmd/compile/internal/logopt"
    13  	"cmd/compile/internal/objw"
    14  	"cmd/compile/internal/ssa"
    15  	"cmd/compile/internal/ssagen"
    16  	"cmd/compile/internal/types"
    17  	"cmd/internal/obj"
    18  	"cmd/internal/obj/loong64"
    19  )
    20  
    21  // isFPreg reports whether r is an FP register.
    22  func isFPreg(r int16) bool {
    23  	return loong64.REG_F0 <= r && r <= loong64.REG_F31
    24  }
    25  
    26  // loadByType returns the load instruction of the given type.
    27  func loadByType(t *types.Type, r int16) obj.As {
    28  	if isFPreg(r) {
    29  		if t.Size() == 4 {
    30  			return loong64.AMOVF
    31  		} else {
    32  			return loong64.AMOVD
    33  		}
    34  	} else {
    35  		switch t.Size() {
    36  		case 1:
    37  			if t.IsSigned() {
    38  				return loong64.AMOVB
    39  			} else {
    40  				return loong64.AMOVBU
    41  			}
    42  		case 2:
    43  			if t.IsSigned() {
    44  				return loong64.AMOVH
    45  			} else {
    46  				return loong64.AMOVHU
    47  			}
    48  		case 4:
    49  			if t.IsSigned() {
    50  				return loong64.AMOVW
    51  			} else {
    52  				return loong64.AMOVWU
    53  			}
    54  		case 8:
    55  			return loong64.AMOVV
    56  		}
    57  	}
    58  	panic("bad load type")
    59  }
    60  
    61  // storeByType returns the store instruction of the given type.
    62  func storeByType(t *types.Type, r int16) obj.As {
    63  	if isFPreg(r) {
    64  		if t.Size() == 4 {
    65  			return loong64.AMOVF
    66  		} else {
    67  			return loong64.AMOVD
    68  		}
    69  	} else {
    70  		switch t.Size() {
    71  		case 1:
    72  			return loong64.AMOVB
    73  		case 2:
    74  			return loong64.AMOVH
    75  		case 4:
    76  			return loong64.AMOVW
    77  		case 8:
    78  			return loong64.AMOVV
    79  		}
    80  	}
    81  	panic("bad store type")
    82  }
    83  
    84  // largestMove returns the largest move instruction possible and its size,
    85  // given the alignment of the total size of the move.
    86  //
    87  // e.g., a 16-byte move may use MOVV, but an 11-byte move must use MOVB.
    88  //
    89  // Note that the moves may not be on naturally aligned addresses depending on
    90  // the source and destination.
    91  //
    92  // This matches the calculation in ssa.moveSize.
    93  func largestMove(alignment int64) (obj.As, int64) {
    94  	switch {
    95  	case alignment%8 == 0:
    96  		return loong64.AMOVV, 8
    97  	case alignment%4 == 0:
    98  		return loong64.AMOVW, 4
    99  	case alignment%2 == 0:
   100  		return loong64.AMOVH, 2
   101  	default:
   102  		return loong64.AMOVB, 1
   103  	}
   104  }
   105  
   106  func ssaGenValue(s *ssagen.State, v *ssa.Value) {
   107  	switch v.Op {
   108  	case ssa.OpCopy, ssa.OpLOONG64MOVVreg:
   109  		if v.Type.IsMemory() {
   110  			return
   111  		}
   112  		x := v.Args[0].Reg()
   113  		y := v.Reg()
   114  		if x == y {
   115  			return
   116  		}
   117  		as := loong64.AMOVV
   118  		if isFPreg(x) && isFPreg(y) {
   119  			as = loong64.AMOVD
   120  		}
   121  		p := s.Prog(as)
   122  		p.From.Type = obj.TYPE_REG
   123  		p.From.Reg = x
   124  		p.To.Type = obj.TYPE_REG
   125  		p.To.Reg = y
   126  	case ssa.OpLOONG64MOVVnop,
   127  		ssa.OpLOONG64LoweredRound32F,
   128  		ssa.OpLOONG64LoweredRound64F:
   129  		// nothing to do
   130  	case ssa.OpLoadReg:
   131  		if v.Type.IsFlags() {
   132  			v.Fatalf("load flags not implemented: %v", v.LongString())
   133  			return
   134  		}
   135  		r := v.Reg()
   136  		p := s.Prog(loadByType(v.Type, r))
   137  		ssagen.AddrAuto(&p.From, v.Args[0])
   138  		p.To.Type = obj.TYPE_REG
   139  		p.To.Reg = r
   140  	case ssa.OpStoreReg:
   141  		if v.Type.IsFlags() {
   142  			v.Fatalf("store flags not implemented: %v", v.LongString())
   143  			return
   144  		}
   145  		r := v.Args[0].Reg()
   146  		p := s.Prog(storeByType(v.Type, r))
   147  		p.From.Type = obj.TYPE_REG
   148  		p.From.Reg = r
   149  		ssagen.AddrAuto(&p.To, v)
   150  	case ssa.OpArgIntReg, ssa.OpArgFloatReg:
   151  		// The assembler needs to wrap the entry safepoint/stack growth code with spill/unspill
   152  		// The loop only runs once.
   153  		for _, a := range v.Block.Func.RegArgs {
   154  			// Pass the spill/unspill information along to the assembler, offset by size of
   155  			// the saved LR slot.
   156  			addr := ssagen.SpillSlotAddr(a, loong64.REGSP, base.Ctxt.Arch.FixedFrameSize)
   157  			s.FuncInfo().AddSpill(
   158  				obj.RegSpill{Reg: a.Reg, Addr: addr, Unspill: loadByType(a.Type, a.Reg), Spill: storeByType(a.Type, a.Reg)})
   159  		}
   160  		v.Block.Func.RegArgs = nil
   161  		ssagen.CheckArgReg(v)
   162  	case ssa.OpLOONG64ADDV,
   163  		ssa.OpLOONG64SUBV,
   164  		ssa.OpLOONG64AND,
   165  		ssa.OpLOONG64OR,
   166  		ssa.OpLOONG64XOR,
   167  		ssa.OpLOONG64NOR,
   168  		ssa.OpLOONG64SLL,
   169  		ssa.OpLOONG64SLLV,
   170  		ssa.OpLOONG64SRL,
   171  		ssa.OpLOONG64SRLV,
   172  		ssa.OpLOONG64SRA,
   173  		ssa.OpLOONG64SRAV,
   174  		ssa.OpLOONG64ROTR,
   175  		ssa.OpLOONG64ROTRV,
   176  		ssa.OpLOONG64ADDF,
   177  		ssa.OpLOONG64ADDD,
   178  		ssa.OpLOONG64SUBF,
   179  		ssa.OpLOONG64SUBD,
   180  		ssa.OpLOONG64MULF,
   181  		ssa.OpLOONG64MULD,
   182  		ssa.OpLOONG64DIVF,
   183  		ssa.OpLOONG64DIVD,
   184  		ssa.OpLOONG64MULV, ssa.OpLOONG64MULHV, ssa.OpLOONG64MULHVU,
   185  		ssa.OpLOONG64DIVV, ssa.OpLOONG64REMV, ssa.OpLOONG64DIVVU, ssa.OpLOONG64REMVU,
   186  		ssa.OpLOONG64FCOPYSGD:
   187  		p := s.Prog(v.Op.Asm())
   188  		p.From.Type = obj.TYPE_REG
   189  		p.From.Reg = v.Args[1].Reg()
   190  		p.Reg = v.Args[0].Reg()
   191  		p.To.Type = obj.TYPE_REG
   192  		p.To.Reg = v.Reg()
   193  
   194  	case ssa.OpLOONG64BSTRPICKV,
   195  		ssa.OpLOONG64BSTRPICKW:
   196  		p := s.Prog(v.Op.Asm())
   197  		p.From.Type = obj.TYPE_CONST
   198  		if v.Op == ssa.OpLOONG64BSTRPICKW {
   199  			p.From.Offset = v.AuxInt >> 5
   200  			p.AddRestSourceConst(v.AuxInt & 0x1f)
   201  		} else {
   202  			p.From.Offset = v.AuxInt >> 6
   203  			p.AddRestSourceConst(v.AuxInt & 0x3f)
   204  		}
   205  		p.Reg = v.Args[0].Reg()
   206  		p.To.Type = obj.TYPE_REG
   207  		p.To.Reg = v.Reg()
   208  
   209  	case ssa.OpLOONG64FMINF,
   210  		ssa.OpLOONG64FMIND,
   211  		ssa.OpLOONG64FMAXF,
   212  		ssa.OpLOONG64FMAXD:
   213  		// ADDD Rarg0, Rarg1, Rout
   214  		// CMPEQD Rarg0, Rarg0, FCC0
   215  		// bceqz FCC0, end
   216  		// CMPEQD Rarg1, Rarg1, FCC0
   217  		// bceqz FCC0, end
   218  		// F(MIN|MAX)(F|D)
   219  
   220  		r0 := v.Args[0].Reg()
   221  		r1 := v.Args[1].Reg()
   222  		out := v.Reg()
   223  		add, fcmp := loong64.AADDD, loong64.ACMPEQD
   224  		if v.Op == ssa.OpLOONG64FMINF || v.Op == ssa.OpLOONG64FMAXF {
   225  			add = loong64.AADDF
   226  			fcmp = loong64.ACMPEQF
   227  		}
   228  		p1 := s.Prog(add)
   229  		p1.From.Type = obj.TYPE_REG
   230  		p1.From.Reg = r0
   231  		p1.Reg = r1
   232  		p1.To.Type = obj.TYPE_REG
   233  		p1.To.Reg = out
   234  
   235  		p2 := s.Prog(fcmp)
   236  		p2.From.Type = obj.TYPE_REG
   237  		p2.From.Reg = r0
   238  		p2.Reg = r0
   239  		p2.To.Type = obj.TYPE_REG
   240  		p2.To.Reg = loong64.REG_FCC0
   241  
   242  		p3 := s.Prog(loong64.ABFPF)
   243  		p3.To.Type = obj.TYPE_BRANCH
   244  
   245  		p4 := s.Prog(fcmp)
   246  		p4.From.Type = obj.TYPE_REG
   247  		p4.From.Reg = r1
   248  		p4.Reg = r1
   249  		p4.To.Type = obj.TYPE_REG
   250  		p4.To.Reg = loong64.REG_FCC0
   251  
   252  		p5 := s.Prog(loong64.ABFPF)
   253  		p5.To.Type = obj.TYPE_BRANCH
   254  
   255  		p6 := s.Prog(v.Op.Asm())
   256  		p6.From.Type = obj.TYPE_REG
   257  		p6.From.Reg = r1
   258  		p6.Reg = r0
   259  		p6.To.Type = obj.TYPE_REG
   260  		p6.To.Reg = out
   261  
   262  		nop := s.Prog(obj.ANOP)
   263  		p3.To.SetTarget(nop)
   264  		p5.To.SetTarget(nop)
   265  
   266  	case ssa.OpLOONG64SGT,
   267  		ssa.OpLOONG64SGTU:
   268  		p := s.Prog(v.Op.Asm())
   269  		p.From.Type = obj.TYPE_REG
   270  		p.From.Reg = v.Args[0].Reg()
   271  		p.Reg = v.Args[1].Reg()
   272  		p.To.Type = obj.TYPE_REG
   273  		p.To.Reg = v.Reg()
   274  	case ssa.OpLOONG64ADDVconst,
   275  		ssa.OpLOONG64SUBVconst,
   276  		ssa.OpLOONG64ANDconst,
   277  		ssa.OpLOONG64ORconst,
   278  		ssa.OpLOONG64XORconst,
   279  		ssa.OpLOONG64NORconst,
   280  		ssa.OpLOONG64SLLconst,
   281  		ssa.OpLOONG64SLLVconst,
   282  		ssa.OpLOONG64SRLconst,
   283  		ssa.OpLOONG64SRLVconst,
   284  		ssa.OpLOONG64SRAconst,
   285  		ssa.OpLOONG64SRAVconst,
   286  		ssa.OpLOONG64ROTRconst,
   287  		ssa.OpLOONG64ROTRVconst,
   288  		ssa.OpLOONG64SGTconst,
   289  		ssa.OpLOONG64SGTUconst:
   290  		p := s.Prog(v.Op.Asm())
   291  		p.From.Type = obj.TYPE_CONST
   292  		p.From.Offset = v.AuxInt
   293  		p.Reg = v.Args[0].Reg()
   294  		p.To.Type = obj.TYPE_REG
   295  		p.To.Reg = v.Reg()
   296  	case ssa.OpLOONG64MOVVconst:
   297  		r := v.Reg()
   298  		p := s.Prog(v.Op.Asm())
   299  		p.From.Type = obj.TYPE_CONST
   300  		p.From.Offset = v.AuxInt
   301  		p.To.Type = obj.TYPE_REG
   302  		p.To.Reg = r
   303  		if isFPreg(r) {
   304  			// cannot move into FP or special registers, use TMP as intermediate
   305  			p.To.Reg = loong64.REGTMP
   306  			p = s.Prog(loong64.AMOVV)
   307  			p.From.Type = obj.TYPE_REG
   308  			p.From.Reg = loong64.REGTMP
   309  			p.To.Type = obj.TYPE_REG
   310  			p.To.Reg = r
   311  		}
   312  	case ssa.OpLOONG64MOVFconst,
   313  		ssa.OpLOONG64MOVDconst:
   314  		p := s.Prog(v.Op.Asm())
   315  		p.From.Type = obj.TYPE_FCONST
   316  		p.From.Val = math.Float64frombits(uint64(v.AuxInt))
   317  		p.To.Type = obj.TYPE_REG
   318  		p.To.Reg = v.Reg()
   319  	case ssa.OpLOONG64CMPEQF,
   320  		ssa.OpLOONG64CMPEQD,
   321  		ssa.OpLOONG64CMPGEF,
   322  		ssa.OpLOONG64CMPGED,
   323  		ssa.OpLOONG64CMPGTF,
   324  		ssa.OpLOONG64CMPGTD:
   325  		p := s.Prog(v.Op.Asm())
   326  		p.From.Type = obj.TYPE_REG
   327  		p.From.Reg = v.Args[0].Reg()
   328  		p.Reg = v.Args[1].Reg()
   329  		p.To.Type = obj.TYPE_REG
   330  		p.To.Reg = loong64.REG_FCC0
   331  
   332  	case ssa.OpLOONG64FMADDF,
   333  		ssa.OpLOONG64FMADDD,
   334  		ssa.OpLOONG64FMSUBF,
   335  		ssa.OpLOONG64FMSUBD,
   336  		ssa.OpLOONG64FNMADDF,
   337  		ssa.OpLOONG64FNMADDD,
   338  		ssa.OpLOONG64FNMSUBF,
   339  		ssa.OpLOONG64FNMSUBD:
   340  		p := s.Prog(v.Op.Asm())
   341  		// r=(FMA x y z) -> FMADDD z, y, x, r
   342  		// the SSA operand order is for taking advantage of
   343  		// commutativity (that only applies for the first two operands)
   344  		r := v.Reg()
   345  		x := v.Args[0].Reg()
   346  		y := v.Args[1].Reg()
   347  		z := v.Args[2].Reg()
   348  		p.From.Type = obj.TYPE_REG
   349  		p.From.Reg = z
   350  		p.Reg = y
   351  		p.AddRestSourceReg(x)
   352  		p.To.Type = obj.TYPE_REG
   353  		p.To.Reg = r
   354  
   355  	case ssa.OpLOONG64MOVVaddr:
   356  		p := s.Prog(loong64.AMOVV)
   357  		p.From.Type = obj.TYPE_ADDR
   358  		p.From.Reg = v.Args[0].Reg()
   359  		var wantreg string
   360  		// MOVV $sym+off(base), R
   361  		// the assembler expands it as the following:
   362  		// - base is SP: add constant offset to SP (R3)
   363  		// when constant is large, tmp register (R30) may be used
   364  		// - base is SB: load external address with relocation
   365  		switch v.Aux.(type) {
   366  		default:
   367  			v.Fatalf("aux is of unknown type %T", v.Aux)
   368  		case *obj.LSym:
   369  			wantreg = "SB"
   370  			ssagen.AddAux(&p.From, v)
   371  		case *ir.Name:
   372  			wantreg = "SP"
   373  			ssagen.AddAux(&p.From, v)
   374  		case nil:
   375  			// No sym, just MOVV $off(SP), R
   376  			wantreg = "SP"
   377  			p.From.Offset = v.AuxInt
   378  		}
   379  		if reg := v.Args[0].RegName(); reg != wantreg {
   380  			v.Fatalf("bad reg %s for symbol type %T, want %s", reg, v.Aux, wantreg)
   381  		}
   382  		p.To.Type = obj.TYPE_REG
   383  		p.To.Reg = v.Reg()
   384  
   385  	case ssa.OpLOONG64MOVBloadidx,
   386  		ssa.OpLOONG64MOVBUloadidx,
   387  		ssa.OpLOONG64MOVHloadidx,
   388  		ssa.OpLOONG64MOVHUloadidx,
   389  		ssa.OpLOONG64MOVWloadidx,
   390  		ssa.OpLOONG64MOVWUloadidx,
   391  		ssa.OpLOONG64MOVVloadidx,
   392  		ssa.OpLOONG64MOVFloadidx,
   393  		ssa.OpLOONG64MOVDloadidx:
   394  		p := s.Prog(v.Op.Asm())
   395  		p.From.Type = obj.TYPE_MEM
   396  		p.From.Name = obj.NAME_NONE
   397  		p.From.Reg = v.Args[0].Reg()
   398  		p.From.Index = v.Args[1].Reg()
   399  		p.To.Type = obj.TYPE_REG
   400  		p.To.Reg = v.Reg()
   401  
   402  	case ssa.OpLOONG64MOVBstoreidx,
   403  		ssa.OpLOONG64MOVHstoreidx,
   404  		ssa.OpLOONG64MOVWstoreidx,
   405  		ssa.OpLOONG64MOVVstoreidx,
   406  		ssa.OpLOONG64MOVFstoreidx,
   407  		ssa.OpLOONG64MOVDstoreidx:
   408  		p := s.Prog(v.Op.Asm())
   409  		p.From.Type = obj.TYPE_REG
   410  		p.From.Reg = v.Args[2].Reg()
   411  		p.To.Type = obj.TYPE_MEM
   412  		p.To.Name = obj.NAME_NONE
   413  		p.To.Reg = v.Args[0].Reg()
   414  		p.To.Index = v.Args[1].Reg()
   415  
   416  	case ssa.OpLOONG64MOVBstorezeroidx,
   417  		ssa.OpLOONG64MOVHstorezeroidx,
   418  		ssa.OpLOONG64MOVWstorezeroidx,
   419  		ssa.OpLOONG64MOVVstorezeroidx:
   420  		p := s.Prog(v.Op.Asm())
   421  		p.From.Type = obj.TYPE_REG
   422  		p.From.Reg = loong64.REGZERO
   423  		p.To.Type = obj.TYPE_MEM
   424  		p.To.Name = obj.NAME_NONE
   425  		p.To.Reg = v.Args[0].Reg()
   426  		p.To.Index = v.Args[1].Reg()
   427  
   428  	case ssa.OpLOONG64MOVBload,
   429  		ssa.OpLOONG64MOVBUload,
   430  		ssa.OpLOONG64MOVHload,
   431  		ssa.OpLOONG64MOVHUload,
   432  		ssa.OpLOONG64MOVWload,
   433  		ssa.OpLOONG64MOVWUload,
   434  		ssa.OpLOONG64MOVVload,
   435  		ssa.OpLOONG64MOVFload,
   436  		ssa.OpLOONG64MOVDload:
   437  		p := s.Prog(v.Op.Asm())
   438  		p.From.Type = obj.TYPE_MEM
   439  		p.From.Reg = v.Args[0].Reg()
   440  		ssagen.AddAux(&p.From, v)
   441  		p.To.Type = obj.TYPE_REG
   442  		p.To.Reg = v.Reg()
   443  	case ssa.OpLOONG64MOVBstore,
   444  		ssa.OpLOONG64MOVHstore,
   445  		ssa.OpLOONG64MOVWstore,
   446  		ssa.OpLOONG64MOVVstore,
   447  		ssa.OpLOONG64MOVFstore,
   448  		ssa.OpLOONG64MOVDstore:
   449  		p := s.Prog(v.Op.Asm())
   450  		p.From.Type = obj.TYPE_REG
   451  		p.From.Reg = v.Args[1].Reg()
   452  		p.To.Type = obj.TYPE_MEM
   453  		p.To.Reg = v.Args[0].Reg()
   454  		ssagen.AddAux(&p.To, v)
   455  	case ssa.OpLOONG64MOVBstorezero,
   456  		ssa.OpLOONG64MOVHstorezero,
   457  		ssa.OpLOONG64MOVWstorezero,
   458  		ssa.OpLOONG64MOVVstorezero:
   459  		p := s.Prog(v.Op.Asm())
   460  		p.From.Type = obj.TYPE_REG
   461  		p.From.Reg = loong64.REGZERO
   462  		p.To.Type = obj.TYPE_MEM
   463  		p.To.Reg = v.Args[0].Reg()
   464  		ssagen.AddAux(&p.To, v)
   465  	case ssa.OpLOONG64MOVBreg,
   466  		ssa.OpLOONG64MOVBUreg,
   467  		ssa.OpLOONG64MOVHreg,
   468  		ssa.OpLOONG64MOVHUreg,
   469  		ssa.OpLOONG64MOVWreg,
   470  		ssa.OpLOONG64MOVWUreg:
   471  		a := v.Args[0]
   472  		for a.Op == ssa.OpCopy || a.Op == ssa.OpLOONG64MOVVreg {
   473  			a = a.Args[0]
   474  		}
   475  		if a.Op == ssa.OpLoadReg && loong64.REG_R0 <= a.Reg() && a.Reg() <= loong64.REG_R31 {
   476  			// LoadReg from a narrower type does an extension, except loading
   477  			// to a floating point register. So only eliminate the extension
   478  			// if it is loaded to an integer register.
   479  
   480  			t := a.Type
   481  			switch {
   482  			case v.Op == ssa.OpLOONG64MOVBreg && t.Size() == 1 && t.IsSigned(),
   483  				v.Op == ssa.OpLOONG64MOVBUreg && t.Size() == 1 && !t.IsSigned(),
   484  				v.Op == ssa.OpLOONG64MOVHreg && t.Size() == 2 && t.IsSigned(),
   485  				v.Op == ssa.OpLOONG64MOVHUreg && t.Size() == 2 && !t.IsSigned(),
   486  				v.Op == ssa.OpLOONG64MOVWreg && t.Size() == 4 && t.IsSigned(),
   487  				v.Op == ssa.OpLOONG64MOVWUreg && t.Size() == 4 && !t.IsSigned():
   488  				// arg is a proper-typed load, already zero/sign-extended, don't extend again
   489  				if v.Reg() == v.Args[0].Reg() {
   490  					return
   491  				}
   492  				p := s.Prog(loong64.AMOVV)
   493  				p.From.Type = obj.TYPE_REG
   494  				p.From.Reg = v.Args[0].Reg()
   495  				p.To.Type = obj.TYPE_REG
   496  				p.To.Reg = v.Reg()
   497  				return
   498  			default:
   499  			}
   500  		}
   501  		fallthrough
   502  
   503  	case ssa.OpLOONG64MOVWF,
   504  		ssa.OpLOONG64MOVWD,
   505  		ssa.OpLOONG64TRUNCFW,
   506  		ssa.OpLOONG64TRUNCDW,
   507  		ssa.OpLOONG64MOVVF,
   508  		ssa.OpLOONG64MOVVD,
   509  		ssa.OpLOONG64TRUNCFV,
   510  		ssa.OpLOONG64TRUNCDV,
   511  		ssa.OpLOONG64MOVFD,
   512  		ssa.OpLOONG64MOVDF,
   513  		ssa.OpLOONG64MOVWfpgp,
   514  		ssa.OpLOONG64MOVWgpfp,
   515  		ssa.OpLOONG64MOVVfpgp,
   516  		ssa.OpLOONG64MOVVgpfp,
   517  		ssa.OpLOONG64NEGF,
   518  		ssa.OpLOONG64NEGD,
   519  		ssa.OpLOONG64CLZW,
   520  		ssa.OpLOONG64CLZV,
   521  		ssa.OpLOONG64CTZW,
   522  		ssa.OpLOONG64CTZV,
   523  		ssa.OpLOONG64SQRTD,
   524  		ssa.OpLOONG64SQRTF,
   525  		ssa.OpLOONG64REVB2H,
   526  		ssa.OpLOONG64REVB2W,
   527  		ssa.OpLOONG64REVBV,
   528  		ssa.OpLOONG64BITREV4B,
   529  		ssa.OpLOONG64BITREVW,
   530  		ssa.OpLOONG64BITREVV,
   531  		ssa.OpLOONG64ABSD:
   532  		p := s.Prog(v.Op.Asm())
   533  		p.From.Type = obj.TYPE_REG
   534  		p.From.Reg = v.Args[0].Reg()
   535  		p.To.Type = obj.TYPE_REG
   536  		p.To.Reg = v.Reg()
   537  
   538  	case ssa.OpLOONG64VPCNT64,
   539  		ssa.OpLOONG64VPCNT32,
   540  		ssa.OpLOONG64VPCNT16:
   541  		p := s.Prog(v.Op.Asm())
   542  		p.From.Type = obj.TYPE_REG
   543  		p.From.Reg = ((v.Args[0].Reg() - loong64.REG_F0) & 31) + loong64.REG_V0
   544  		p.To.Type = obj.TYPE_REG
   545  		p.To.Reg = ((v.Reg() - loong64.REG_F0) & 31) + loong64.REG_V0
   546  
   547  	case ssa.OpLOONG64NEGV:
   548  		// SUB from REGZERO
   549  		p := s.Prog(loong64.ASUBVU)
   550  		p.From.Type = obj.TYPE_REG
   551  		p.From.Reg = v.Args[0].Reg()
   552  		p.Reg = loong64.REGZERO
   553  		p.To.Type = obj.TYPE_REG
   554  		p.To.Reg = v.Reg()
   555  
   556  	case ssa.OpLOONG64DUFFZERO:
   557  		// runtime.duffzero expects start address in R20
   558  		p := s.Prog(obj.ADUFFZERO)
   559  		p.To.Type = obj.TYPE_MEM
   560  		p.To.Name = obj.NAME_EXTERN
   561  		p.To.Sym = ir.Syms.Duffzero
   562  		p.To.Offset = v.AuxInt
   563  	case ssa.OpLOONG64LoweredZero:
   564  		// MOVx	R0, (Rarg0)
   565  		// ADDV	$sz, Rarg0
   566  		// BGEU	Rarg1, Rarg0, -2(PC)
   567  		mov, sz := largestMove(v.AuxInt)
   568  		p := s.Prog(mov)
   569  		p.From.Type = obj.TYPE_REG
   570  		p.From.Reg = loong64.REGZERO
   571  		p.To.Type = obj.TYPE_MEM
   572  		p.To.Reg = v.Args[0].Reg()
   573  
   574  		p2 := s.Prog(loong64.AADDVU)
   575  		p2.From.Type = obj.TYPE_CONST
   576  		p2.From.Offset = sz
   577  		p2.To.Type = obj.TYPE_REG
   578  		p2.To.Reg = v.Args[0].Reg()
   579  
   580  		p3 := s.Prog(loong64.ABGEU)
   581  		p3.From.Type = obj.TYPE_REG
   582  		p3.From.Reg = v.Args[1].Reg()
   583  		p3.Reg = v.Args[0].Reg()
   584  		p3.To.Type = obj.TYPE_BRANCH
   585  		p3.To.SetTarget(p)
   586  
   587  	case ssa.OpLOONG64DUFFCOPY:
   588  		p := s.Prog(obj.ADUFFCOPY)
   589  		p.To.Type = obj.TYPE_MEM
   590  		p.To.Name = obj.NAME_EXTERN
   591  		p.To.Sym = ir.Syms.Duffcopy
   592  		p.To.Offset = v.AuxInt
   593  	case ssa.OpLOONG64LoweredMove:
   594  		// MOVx	(Rarg1), Rtmp
   595  		// MOVx	Rtmp, (Rarg0)
   596  		// ADDV	$sz, Rarg1
   597  		// ADDV	$sz, Rarg0
   598  		// BGEU	Rarg2, Rarg0, -4(PC)
   599  		mov, sz := largestMove(v.AuxInt)
   600  		p := s.Prog(mov)
   601  		p.From.Type = obj.TYPE_MEM
   602  		p.From.Reg = v.Args[1].Reg()
   603  		p.To.Type = obj.TYPE_REG
   604  		p.To.Reg = loong64.REGTMP
   605  
   606  		p2 := s.Prog(mov)
   607  		p2.From.Type = obj.TYPE_REG
   608  		p2.From.Reg = loong64.REGTMP
   609  		p2.To.Type = obj.TYPE_MEM
   610  		p2.To.Reg = v.Args[0].Reg()
   611  
   612  		p3 := s.Prog(loong64.AADDVU)
   613  		p3.From.Type = obj.TYPE_CONST
   614  		p3.From.Offset = sz
   615  		p3.To.Type = obj.TYPE_REG
   616  		p3.To.Reg = v.Args[1].Reg()
   617  
   618  		p4 := s.Prog(loong64.AADDVU)
   619  		p4.From.Type = obj.TYPE_CONST
   620  		p4.From.Offset = sz
   621  		p4.To.Type = obj.TYPE_REG
   622  		p4.To.Reg = v.Args[0].Reg()
   623  
   624  		p5 := s.Prog(loong64.ABGEU)
   625  		p5.From.Type = obj.TYPE_REG
   626  		p5.From.Reg = v.Args[2].Reg()
   627  		p5.Reg = v.Args[1].Reg()
   628  		p5.To.Type = obj.TYPE_BRANCH
   629  		p5.To.SetTarget(p)
   630  
   631  	case ssa.OpLOONG64CALLstatic, ssa.OpLOONG64CALLclosure, ssa.OpLOONG64CALLinter:
   632  		s.Call(v)
   633  	case ssa.OpLOONG64CALLtail:
   634  		s.TailCall(v)
   635  	case ssa.OpLOONG64LoweredWB:
   636  		p := s.Prog(obj.ACALL)
   637  		p.To.Type = obj.TYPE_MEM
   638  		p.To.Name = obj.NAME_EXTERN
   639  		// AuxInt encodes how many buffer entries we need.
   640  		p.To.Sym = ir.Syms.GCWriteBarrier[v.AuxInt-1]
   641  
   642  	case ssa.OpLOONG64LoweredPubBarrier:
   643  		// DBAR 0x1A
   644  		p := s.Prog(v.Op.Asm())
   645  		p.From.Type = obj.TYPE_CONST
   646  		p.From.Offset = 0x1A
   647  
   648  	case ssa.OpLOONG64LoweredPanicBoundsA, ssa.OpLOONG64LoweredPanicBoundsB, ssa.OpLOONG64LoweredPanicBoundsC:
   649  		p := s.Prog(obj.ACALL)
   650  		p.To.Type = obj.TYPE_MEM
   651  		p.To.Name = obj.NAME_EXTERN
   652  		p.To.Sym = ssagen.BoundsCheckFunc[v.AuxInt]
   653  		s.UseArgs(16) // space used in callee args area by assembly stubs
   654  	case ssa.OpLOONG64LoweredAtomicLoad8, ssa.OpLOONG64LoweredAtomicLoad32, ssa.OpLOONG64LoweredAtomicLoad64:
   655  		// MOVB	(Rarg0), Rout
   656  		// DBAR	0x14
   657  		as := loong64.AMOVV
   658  		switch v.Op {
   659  		case ssa.OpLOONG64LoweredAtomicLoad8:
   660  			as = loong64.AMOVB
   661  		case ssa.OpLOONG64LoweredAtomicLoad32:
   662  			as = loong64.AMOVW
   663  		}
   664  		p := s.Prog(as)
   665  		p.From.Type = obj.TYPE_MEM
   666  		p.From.Reg = v.Args[0].Reg()
   667  		p.To.Type = obj.TYPE_REG
   668  		p.To.Reg = v.Reg0()
   669  		p1 := s.Prog(loong64.ADBAR)
   670  		p1.From.Type = obj.TYPE_CONST
   671  		p1.From.Offset = 0x14
   672  
   673  	case ssa.OpLOONG64LoweredAtomicStore8,
   674  		ssa.OpLOONG64LoweredAtomicStore32,
   675  		ssa.OpLOONG64LoweredAtomicStore64:
   676  		// DBAR 0x12
   677  		// MOVx (Rarg1), Rout
   678  		// DBAR 0x18
   679  		movx := loong64.AMOVV
   680  		switch v.Op {
   681  		case ssa.OpLOONG64LoweredAtomicStore8:
   682  			movx = loong64.AMOVB
   683  		case ssa.OpLOONG64LoweredAtomicStore32:
   684  			movx = loong64.AMOVW
   685  		}
   686  		p := s.Prog(loong64.ADBAR)
   687  		p.From.Type = obj.TYPE_CONST
   688  		p.From.Offset = 0x12
   689  
   690  		p1 := s.Prog(movx)
   691  		p1.From.Type = obj.TYPE_REG
   692  		p1.From.Reg = v.Args[1].Reg()
   693  		p1.To.Type = obj.TYPE_MEM
   694  		p1.To.Reg = v.Args[0].Reg()
   695  
   696  		p2 := s.Prog(loong64.ADBAR)
   697  		p2.From.Type = obj.TYPE_CONST
   698  		p2.From.Offset = 0x18
   699  
   700  	case ssa.OpLOONG64LoweredAtomicStore8Variant,
   701  		ssa.OpLOONG64LoweredAtomicStore32Variant,
   702  		ssa.OpLOONG64LoweredAtomicStore64Variant:
   703  		//AMSWAPx  Rarg1, (Rarg0), Rout
   704  		amswapx := loong64.AAMSWAPDBV
   705  		switch v.Op {
   706  		case ssa.OpLOONG64LoweredAtomicStore32Variant:
   707  			amswapx = loong64.AAMSWAPDBW
   708  		case ssa.OpLOONG64LoweredAtomicStore8Variant:
   709  			amswapx = loong64.AAMSWAPDBB
   710  		}
   711  		p := s.Prog(amswapx)
   712  		p.From.Type = obj.TYPE_REG
   713  		p.From.Reg = v.Args[1].Reg()
   714  		p.To.Type = obj.TYPE_MEM
   715  		p.To.Reg = v.Args[0].Reg()
   716  		p.RegTo2 = loong64.REGZERO
   717  
   718  	case ssa.OpLOONG64LoweredAtomicExchange32, ssa.OpLOONG64LoweredAtomicExchange64:
   719  		// AMSWAPx	Rarg1, (Rarg0), Rout
   720  		amswapx := loong64.AAMSWAPDBV
   721  		if v.Op == ssa.OpLOONG64LoweredAtomicExchange32 {
   722  			amswapx = loong64.AAMSWAPDBW
   723  		}
   724  		p := s.Prog(amswapx)
   725  		p.From.Type = obj.TYPE_REG
   726  		p.From.Reg = v.Args[1].Reg()
   727  		p.To.Type = obj.TYPE_MEM
   728  		p.To.Reg = v.Args[0].Reg()
   729  		p.RegTo2 = v.Reg0()
   730  
   731  	case ssa.OpLOONG64LoweredAtomicExchange8Variant:
   732  		// AMSWAPDBB	Rarg1, (Rarg0), Rout
   733  		p := s.Prog(loong64.AAMSWAPDBB)
   734  		p.From.Type = obj.TYPE_REG
   735  		p.From.Reg = v.Args[1].Reg()
   736  		p.To.Type = obj.TYPE_MEM
   737  		p.To.Reg = v.Args[0].Reg()
   738  		p.RegTo2 = v.Reg0()
   739  
   740  	case ssa.OpLOONG64LoweredAtomicAdd32, ssa.OpLOONG64LoweredAtomicAdd64:
   741  		// AMADDx  Rarg1, (Rarg0), Rout
   742  		// ADDV    Rarg1, Rout, Rout
   743  		amaddx := loong64.AAMADDDBV
   744  		addx := loong64.AADDV
   745  		if v.Op == ssa.OpLOONG64LoweredAtomicAdd32 {
   746  			amaddx = loong64.AAMADDDBW
   747  		}
   748  		p := s.Prog(amaddx)
   749  		p.From.Type = obj.TYPE_REG
   750  		p.From.Reg = v.Args[1].Reg()
   751  		p.To.Type = obj.TYPE_MEM
   752  		p.To.Reg = v.Args[0].Reg()
   753  		p.RegTo2 = v.Reg0()
   754  
   755  		p1 := s.Prog(addx)
   756  		p1.From.Type = obj.TYPE_REG
   757  		p1.From.Reg = v.Args[1].Reg()
   758  		p1.Reg = v.Reg0()
   759  		p1.To.Type = obj.TYPE_REG
   760  		p1.To.Reg = v.Reg0()
   761  
   762  	case ssa.OpLOONG64LoweredAtomicCas32, ssa.OpLOONG64LoweredAtomicCas64:
   763  		// MOVV $0, Rout
   764  		// DBAR 0x14
   765  		// LL	(Rarg0), Rtmp
   766  		// BNE	Rtmp, Rarg1, 4(PC)
   767  		// MOVV Rarg2, Rout
   768  		// SC	Rout, (Rarg0)
   769  		// BEQ	Rout, -4(PC)
   770  		// DBAR 0x12
   771  		ll := loong64.ALLV
   772  		sc := loong64.ASCV
   773  		if v.Op == ssa.OpLOONG64LoweredAtomicCas32 {
   774  			ll = loong64.ALL
   775  			sc = loong64.ASC
   776  		}
   777  
   778  		p := s.Prog(loong64.AMOVV)
   779  		p.From.Type = obj.TYPE_REG
   780  		p.From.Reg = loong64.REGZERO
   781  		p.To.Type = obj.TYPE_REG
   782  		p.To.Reg = v.Reg0()
   783  
   784  		p1 := s.Prog(loong64.ADBAR)
   785  		p1.From.Type = obj.TYPE_CONST
   786  		p1.From.Offset = 0x14
   787  
   788  		p2 := s.Prog(ll)
   789  		p2.From.Type = obj.TYPE_MEM
   790  		p2.From.Reg = v.Args[0].Reg()
   791  		p2.To.Type = obj.TYPE_REG
   792  		p2.To.Reg = loong64.REGTMP
   793  
   794  		p3 := s.Prog(loong64.ABNE)
   795  		p3.From.Type = obj.TYPE_REG
   796  		p3.From.Reg = v.Args[1].Reg()
   797  		p3.Reg = loong64.REGTMP
   798  		p3.To.Type = obj.TYPE_BRANCH
   799  
   800  		p4 := s.Prog(loong64.AMOVV)
   801  		p4.From.Type = obj.TYPE_REG
   802  		p4.From.Reg = v.Args[2].Reg()
   803  		p4.To.Type = obj.TYPE_REG
   804  		p4.To.Reg = v.Reg0()
   805  
   806  		p5 := s.Prog(sc)
   807  		p5.From.Type = obj.TYPE_REG
   808  		p5.From.Reg = v.Reg0()
   809  		p5.To.Type = obj.TYPE_MEM
   810  		p5.To.Reg = v.Args[0].Reg()
   811  
   812  		p6 := s.Prog(loong64.ABEQ)
   813  		p6.From.Type = obj.TYPE_REG
   814  		p6.From.Reg = v.Reg0()
   815  		p6.To.Type = obj.TYPE_BRANCH
   816  		p6.To.SetTarget(p2)
   817  
   818  		p7 := s.Prog(loong64.ADBAR)
   819  		p7.From.Type = obj.TYPE_CONST
   820  		p7.From.Offset = 0x12
   821  		p3.To.SetTarget(p7)
   822  
   823  	case ssa.OpLOONG64LoweredAtomicAnd32,
   824  		ssa.OpLOONG64LoweredAtomicOr32:
   825  		// AM{AND,OR}DBx  Rarg1, (Rarg0), RegZero
   826  		p := s.Prog(v.Op.Asm())
   827  		p.From.Type = obj.TYPE_REG
   828  		p.From.Reg = v.Args[1].Reg()
   829  		p.To.Type = obj.TYPE_MEM
   830  		p.To.Reg = v.Args[0].Reg()
   831  		p.RegTo2 = loong64.REGZERO
   832  
   833  	case ssa.OpLOONG64LoweredAtomicAnd32value,
   834  		ssa.OpLOONG64LoweredAtomicAnd64value,
   835  		ssa.OpLOONG64LoweredAtomicOr64value,
   836  		ssa.OpLOONG64LoweredAtomicOr32value:
   837  		// AM{AND,OR}DBx  Rarg1, (Rarg0), Rout
   838  		p := s.Prog(v.Op.Asm())
   839  		p.From.Type = obj.TYPE_REG
   840  		p.From.Reg = v.Args[1].Reg()
   841  		p.To.Type = obj.TYPE_MEM
   842  		p.To.Reg = v.Args[0].Reg()
   843  		p.RegTo2 = v.Reg0()
   844  
   845  	case ssa.OpLOONG64LoweredAtomicCas64Variant, ssa.OpLOONG64LoweredAtomicCas32Variant:
   846  		// MOVV         $0, Rout
   847  		// MOVV         Rarg1, Rtmp
   848  		// AMCASDBx     Rarg2, (Rarg0), Rtmp
   849  		// BNE          Rarg1, Rtmp, 2(PC)
   850  		// MOVV         $1, Rout
   851  		// NOP
   852  
   853  		amcasx := loong64.AAMCASDBV
   854  		if v.Op == ssa.OpLOONG64LoweredAtomicCas32Variant {
   855  			amcasx = loong64.AAMCASDBW
   856  		}
   857  
   858  		p := s.Prog(loong64.AMOVV)
   859  		p.From.Type = obj.TYPE_REG
   860  		p.From.Reg = loong64.REGZERO
   861  		p.To.Type = obj.TYPE_REG
   862  		p.To.Reg = v.Reg0()
   863  
   864  		p1 := s.Prog(loong64.AMOVV)
   865  		p1.From.Type = obj.TYPE_REG
   866  		p1.From.Reg = v.Args[1].Reg()
   867  		p1.To.Type = obj.TYPE_REG
   868  		p1.To.Reg = loong64.REGTMP
   869  
   870  		p2 := s.Prog(amcasx)
   871  		p2.From.Type = obj.TYPE_REG
   872  		p2.From.Reg = v.Args[2].Reg()
   873  		p2.To.Type = obj.TYPE_MEM
   874  		p2.To.Reg = v.Args[0].Reg()
   875  		p2.RegTo2 = loong64.REGTMP
   876  
   877  		p3 := s.Prog(loong64.ABNE)
   878  		p3.From.Type = obj.TYPE_REG
   879  		p3.From.Reg = v.Args[1].Reg()
   880  		p3.Reg = loong64.REGTMP
   881  		p3.To.Type = obj.TYPE_BRANCH
   882  
   883  		p4 := s.Prog(loong64.AMOVV)
   884  		p4.From.Type = obj.TYPE_CONST
   885  		p4.From.Offset = 0x1
   886  		p4.To.Type = obj.TYPE_REG
   887  		p4.To.Reg = v.Reg0()
   888  
   889  		p5 := s.Prog(obj.ANOP)
   890  		p3.To.SetTarget(p5)
   891  
   892  	case ssa.OpLOONG64LoweredNilCheck:
   893  		// Issue a load which will fault if arg is nil.
   894  		p := s.Prog(loong64.AMOVB)
   895  		p.From.Type = obj.TYPE_MEM
   896  		p.From.Reg = v.Args[0].Reg()
   897  		ssagen.AddAux(&p.From, v)
   898  		p.To.Type = obj.TYPE_REG
   899  		p.To.Reg = loong64.REGTMP
   900  		if logopt.Enabled() {
   901  			logopt.LogOpt(v.Pos, "nilcheck", "genssa", v.Block.Func.Name)
   902  		}
   903  		if base.Debug.Nil != 0 && v.Pos.Line() > 1 { // v.Pos.Line()==1 in generated wrappers
   904  			base.WarnfAt(v.Pos, "generated nil check")
   905  		}
   906  	case ssa.OpLOONG64FPFlagTrue,
   907  		ssa.OpLOONG64FPFlagFalse:
   908  		// MOVV	$0, r
   909  		// BFPF	2(PC)
   910  		// MOVV	$1, r
   911  		branch := loong64.ABFPF
   912  		if v.Op == ssa.OpLOONG64FPFlagFalse {
   913  			branch = loong64.ABFPT
   914  		}
   915  		p := s.Prog(loong64.AMOVV)
   916  		p.From.Type = obj.TYPE_REG
   917  		p.From.Reg = loong64.REGZERO
   918  		p.To.Type = obj.TYPE_REG
   919  		p.To.Reg = v.Reg()
   920  		p2 := s.Prog(branch)
   921  		p2.To.Type = obj.TYPE_BRANCH
   922  		p3 := s.Prog(loong64.AMOVV)
   923  		p3.From.Type = obj.TYPE_CONST
   924  		p3.From.Offset = 1
   925  		p3.To.Type = obj.TYPE_REG
   926  		p3.To.Reg = v.Reg()
   927  		p4 := s.Prog(obj.ANOP) // not a machine instruction, for branch to land
   928  		p2.To.SetTarget(p4)
   929  	case ssa.OpLOONG64LoweredGetClosurePtr:
   930  		// Closure pointer is R22 (loong64.REGCTXT).
   931  		ssagen.CheckLoweredGetClosurePtr(v)
   932  	case ssa.OpLOONG64LoweredGetCallerSP:
   933  		// caller's SP is FixedFrameSize below the address of the first arg
   934  		p := s.Prog(loong64.AMOVV)
   935  		p.From.Type = obj.TYPE_ADDR
   936  		p.From.Offset = -base.Ctxt.Arch.FixedFrameSize
   937  		p.From.Name = obj.NAME_PARAM
   938  		p.To.Type = obj.TYPE_REG
   939  		p.To.Reg = v.Reg()
   940  	case ssa.OpLOONG64LoweredGetCallerPC:
   941  		p := s.Prog(obj.AGETCALLERPC)
   942  		p.To.Type = obj.TYPE_REG
   943  		p.To.Reg = v.Reg()
   944  	case ssa.OpLOONG64MASKEQZ, ssa.OpLOONG64MASKNEZ:
   945  		p := s.Prog(v.Op.Asm())
   946  		p.From.Type = obj.TYPE_REG
   947  		p.From.Reg = v.Args[1].Reg()
   948  		p.Reg = v.Args[0].Reg()
   949  		p.To.Type = obj.TYPE_REG
   950  		p.To.Reg = v.Reg()
   951  	case ssa.OpClobber, ssa.OpClobberReg:
   952  		// TODO: implement for clobberdead experiment. Nop is ok for now.
   953  	default:
   954  		v.Fatalf("genValue not implemented: %s", v.LongString())
   955  	}
   956  }
   957  
   958  var blockJump = map[ssa.BlockKind]struct {
   959  	asm, invasm obj.As
   960  }{
   961  	ssa.BlockLOONG64EQ:   {loong64.ABEQ, loong64.ABNE},
   962  	ssa.BlockLOONG64NE:   {loong64.ABNE, loong64.ABEQ},
   963  	ssa.BlockLOONG64LTZ:  {loong64.ABLTZ, loong64.ABGEZ},
   964  	ssa.BlockLOONG64GEZ:  {loong64.ABGEZ, loong64.ABLTZ},
   965  	ssa.BlockLOONG64LEZ:  {loong64.ABLEZ, loong64.ABGTZ},
   966  	ssa.BlockLOONG64GTZ:  {loong64.ABGTZ, loong64.ABLEZ},
   967  	ssa.BlockLOONG64FPT:  {loong64.ABFPT, loong64.ABFPF},
   968  	ssa.BlockLOONG64FPF:  {loong64.ABFPF, loong64.ABFPT},
   969  	ssa.BlockLOONG64BEQ:  {loong64.ABEQ, loong64.ABNE},
   970  	ssa.BlockLOONG64BNE:  {loong64.ABNE, loong64.ABEQ},
   971  	ssa.BlockLOONG64BGE:  {loong64.ABGE, loong64.ABLT},
   972  	ssa.BlockLOONG64BLT:  {loong64.ABLT, loong64.ABGE},
   973  	ssa.BlockLOONG64BLTU: {loong64.ABLTU, loong64.ABGEU},
   974  	ssa.BlockLOONG64BGEU: {loong64.ABGEU, loong64.ABLTU},
   975  }
   976  
   977  func ssaGenBlock(s *ssagen.State, b, next *ssa.Block) {
   978  	switch b.Kind {
   979  	case ssa.BlockPlain, ssa.BlockDefer:
   980  		if b.Succs[0].Block() != next {
   981  			p := s.Prog(obj.AJMP)
   982  			p.To.Type = obj.TYPE_BRANCH
   983  			s.Branches = append(s.Branches, ssagen.Branch{P: p, B: b.Succs[0].Block()})
   984  		}
   985  	case ssa.BlockExit, ssa.BlockRetJmp:
   986  	case ssa.BlockRet:
   987  		s.Prog(obj.ARET)
   988  	case ssa.BlockLOONG64EQ, ssa.BlockLOONG64NE,
   989  		ssa.BlockLOONG64LTZ, ssa.BlockLOONG64GEZ,
   990  		ssa.BlockLOONG64LEZ, ssa.BlockLOONG64GTZ,
   991  		ssa.BlockLOONG64BEQ, ssa.BlockLOONG64BNE,
   992  		ssa.BlockLOONG64BLT, ssa.BlockLOONG64BGE,
   993  		ssa.BlockLOONG64BLTU, ssa.BlockLOONG64BGEU,
   994  		ssa.BlockLOONG64FPT, ssa.BlockLOONG64FPF:
   995  		jmp := blockJump[b.Kind]
   996  		var p *obj.Prog
   997  		switch next {
   998  		case b.Succs[0].Block():
   999  			p = s.Br(jmp.invasm, b.Succs[1].Block())
  1000  		case b.Succs[1].Block():
  1001  			p = s.Br(jmp.asm, b.Succs[0].Block())
  1002  		default:
  1003  			if b.Likely != ssa.BranchUnlikely {
  1004  				p = s.Br(jmp.asm, b.Succs[0].Block())
  1005  				s.Br(obj.AJMP, b.Succs[1].Block())
  1006  			} else {
  1007  				p = s.Br(jmp.invasm, b.Succs[1].Block())
  1008  				s.Br(obj.AJMP, b.Succs[0].Block())
  1009  			}
  1010  		}
  1011  		switch b.Kind {
  1012  		case ssa.BlockLOONG64BEQ, ssa.BlockLOONG64BNE,
  1013  			ssa.BlockLOONG64BGE, ssa.BlockLOONG64BLT,
  1014  			ssa.BlockLOONG64BGEU, ssa.BlockLOONG64BLTU:
  1015  			p.From.Type = obj.TYPE_REG
  1016  			p.From.Reg = b.Controls[0].Reg()
  1017  			p.Reg = b.Controls[1].Reg()
  1018  		case ssa.BlockLOONG64EQ, ssa.BlockLOONG64NE,
  1019  			ssa.BlockLOONG64LTZ, ssa.BlockLOONG64GEZ,
  1020  			ssa.BlockLOONG64LEZ, ssa.BlockLOONG64GTZ,
  1021  			ssa.BlockLOONG64FPT, ssa.BlockLOONG64FPF:
  1022  			if !b.Controls[0].Type.IsFlags() {
  1023  				p.From.Type = obj.TYPE_REG
  1024  				p.From.Reg = b.Controls[0].Reg()
  1025  			}
  1026  		}
  1027  	default:
  1028  		b.Fatalf("branch not implemented: %s", b.LongString())
  1029  	}
  1030  }
  1031  
  1032  func loadRegResult(s *ssagen.State, f *ssa.Func, t *types.Type, reg int16, n *ir.Name, off int64) *obj.Prog {
  1033  	p := s.Prog(loadByType(t, reg))
  1034  	p.From.Type = obj.TYPE_MEM
  1035  	p.From.Name = obj.NAME_AUTO
  1036  	p.From.Sym = n.Linksym()
  1037  	p.From.Offset = n.FrameOffset() + off
  1038  	p.To.Type = obj.TYPE_REG
  1039  	p.To.Reg = reg
  1040  	return p
  1041  }
  1042  
  1043  func spillArgReg(pp *objw.Progs, p *obj.Prog, f *ssa.Func, t *types.Type, reg int16, n *ir.Name, off int64) *obj.Prog {
  1044  	p = pp.Append(p, storeByType(t, reg), obj.TYPE_REG, reg, 0, obj.TYPE_MEM, 0, n.FrameOffset()+off)
  1045  	p.To.Name = obj.NAME_PARAM
  1046  	p.To.Sym = n.Linksym()
  1047  	p.Pos = p.Pos.WithNotStmt()
  1048  	return p
  1049  }
  1050  

View as plain text