Source file src/cmd/compile/internal/loong64/ssa.go

     1  // Copyright 2022 The Go Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  package loong64
     6  
     7  import (
     8  	"math"
     9  
    10  	"cmd/compile/internal/base"
    11  	"cmd/compile/internal/ir"
    12  	"cmd/compile/internal/logopt"
    13  	"cmd/compile/internal/objw"
    14  	"cmd/compile/internal/ssa"
    15  	"cmd/compile/internal/ssagen"
    16  	"cmd/compile/internal/types"
    17  	"cmd/internal/obj"
    18  	"cmd/internal/obj/loong64"
    19  )
    20  
    21  // isFPreg reports whether r is an FP register.
    22  func isFPreg(r int16) bool {
    23  	return loong64.REG_F0 <= r && r <= loong64.REG_F31
    24  }
    25  
    26  // loadByType returns the load instruction of the given type.
    27  func loadByType(t *types.Type, r int16) obj.As {
    28  	if isFPreg(r) {
    29  		if t.Size() == 4 {
    30  			return loong64.AMOVF
    31  		} else {
    32  			return loong64.AMOVD
    33  		}
    34  	} else {
    35  		switch t.Size() {
    36  		case 1:
    37  			if t.IsSigned() {
    38  				return loong64.AMOVB
    39  			} else {
    40  				return loong64.AMOVBU
    41  			}
    42  		case 2:
    43  			if t.IsSigned() {
    44  				return loong64.AMOVH
    45  			} else {
    46  				return loong64.AMOVHU
    47  			}
    48  		case 4:
    49  			if t.IsSigned() {
    50  				return loong64.AMOVW
    51  			} else {
    52  				return loong64.AMOVWU
    53  			}
    54  		case 8:
    55  			return loong64.AMOVV
    56  		}
    57  	}
    58  	panic("bad load type")
    59  }
    60  
    61  // storeByType returns the store instruction of the given type.
    62  func storeByType(t *types.Type, r int16) obj.As {
    63  	if isFPreg(r) {
    64  		if t.Size() == 4 {
    65  			return loong64.AMOVF
    66  		} else {
    67  			return loong64.AMOVD
    68  		}
    69  	} else {
    70  		switch t.Size() {
    71  		case 1:
    72  			return loong64.AMOVB
    73  		case 2:
    74  			return loong64.AMOVH
    75  		case 4:
    76  			return loong64.AMOVW
    77  		case 8:
    78  			return loong64.AMOVV
    79  		}
    80  	}
    81  	panic("bad store type")
    82  }
    83  
    84  // largestMove returns the largest move instruction possible and its size,
    85  // given the alignment of the total size of the move.
    86  //
    87  // e.g., a 16-byte move may use MOVV, but an 11-byte move must use MOVB.
    88  //
    89  // Note that the moves may not be on naturally aligned addresses depending on
    90  // the source and destination.
    91  //
    92  // This matches the calculation in ssa.moveSize.
    93  func largestMove(alignment int64) (obj.As, int64) {
    94  	switch {
    95  	case alignment%8 == 0:
    96  		return loong64.AMOVV, 8
    97  	case alignment%4 == 0:
    98  		return loong64.AMOVW, 4
    99  	case alignment%2 == 0:
   100  		return loong64.AMOVH, 2
   101  	default:
   102  		return loong64.AMOVB, 1
   103  	}
   104  }
   105  
   106  func ssaGenValue(s *ssagen.State, v *ssa.Value) {
   107  	switch v.Op {
   108  	case ssa.OpCopy, ssa.OpLOONG64MOVVreg:
   109  		if v.Type.IsMemory() {
   110  			return
   111  		}
   112  		x := v.Args[0].Reg()
   113  		y := v.Reg()
   114  		if x == y {
   115  			return
   116  		}
   117  		as := loong64.AMOVV
   118  		if isFPreg(x) && isFPreg(y) {
   119  			as = loong64.AMOVD
   120  		}
   121  		p := s.Prog(as)
   122  		p.From.Type = obj.TYPE_REG
   123  		p.From.Reg = x
   124  		p.To.Type = obj.TYPE_REG
   125  		p.To.Reg = y
   126  	case ssa.OpLOONG64MOVVnop,
   127  		ssa.OpLOONG64LoweredRound32F,
   128  		ssa.OpLOONG64LoweredRound64F:
   129  		// nothing to do
   130  	case ssa.OpLoadReg:
   131  		if v.Type.IsFlags() {
   132  			v.Fatalf("load flags not implemented: %v", v.LongString())
   133  			return
   134  		}
   135  		r := v.Reg()
   136  		p := s.Prog(loadByType(v.Type, r))
   137  		ssagen.AddrAuto(&p.From, v.Args[0])
   138  		p.To.Type = obj.TYPE_REG
   139  		p.To.Reg = r
   140  	case ssa.OpStoreReg:
   141  		if v.Type.IsFlags() {
   142  			v.Fatalf("store flags not implemented: %v", v.LongString())
   143  			return
   144  		}
   145  		r := v.Args[0].Reg()
   146  		p := s.Prog(storeByType(v.Type, r))
   147  		p.From.Type = obj.TYPE_REG
   148  		p.From.Reg = r
   149  		ssagen.AddrAuto(&p.To, v)
   150  	case ssa.OpArgIntReg, ssa.OpArgFloatReg:
   151  		// The assembler needs to wrap the entry safepoint/stack growth code with spill/unspill
   152  		// The loop only runs once.
   153  		for _, a := range v.Block.Func.RegArgs {
   154  			// Pass the spill/unspill information along to the assembler, offset by size of
   155  			// the saved LR slot.
   156  			addr := ssagen.SpillSlotAddr(a, loong64.REGSP, base.Ctxt.Arch.FixedFrameSize)
   157  			s.FuncInfo().AddSpill(
   158  				obj.RegSpill{Reg: a.Reg, Addr: addr, Unspill: loadByType(a.Type, a.Reg), Spill: storeByType(a.Type, a.Reg)})
   159  		}
   160  		v.Block.Func.RegArgs = nil
   161  		ssagen.CheckArgReg(v)
   162  	case ssa.OpLOONG64ADDV,
   163  		ssa.OpLOONG64SUBV,
   164  		ssa.OpLOONG64AND,
   165  		ssa.OpLOONG64OR,
   166  		ssa.OpLOONG64XOR,
   167  		ssa.OpLOONG64NOR,
   168  		ssa.OpLOONG64SLLV,
   169  		ssa.OpLOONG64SRLV,
   170  		ssa.OpLOONG64SRAV,
   171  		ssa.OpLOONG64ROTR,
   172  		ssa.OpLOONG64ROTRV,
   173  		ssa.OpLOONG64ADDF,
   174  		ssa.OpLOONG64ADDD,
   175  		ssa.OpLOONG64SUBF,
   176  		ssa.OpLOONG64SUBD,
   177  		ssa.OpLOONG64MULF,
   178  		ssa.OpLOONG64MULD,
   179  		ssa.OpLOONG64DIVF,
   180  		ssa.OpLOONG64DIVD,
   181  		ssa.OpLOONG64MULV, ssa.OpLOONG64MULHV, ssa.OpLOONG64MULHVU,
   182  		ssa.OpLOONG64DIVV, ssa.OpLOONG64REMV, ssa.OpLOONG64DIVVU, ssa.OpLOONG64REMVU,
   183  		ssa.OpLOONG64FCOPYSGD:
   184  		p := s.Prog(v.Op.Asm())
   185  		p.From.Type = obj.TYPE_REG
   186  		p.From.Reg = v.Args[1].Reg()
   187  		p.Reg = v.Args[0].Reg()
   188  		p.To.Type = obj.TYPE_REG
   189  		p.To.Reg = v.Reg()
   190  
   191  	case ssa.OpLOONG64BSTRPICKV,
   192  		ssa.OpLOONG64BSTRPICKW:
   193  		p := s.Prog(v.Op.Asm())
   194  		p.From.Type = obj.TYPE_CONST
   195  		if v.Op == ssa.OpLOONG64BSTRPICKW {
   196  			p.From.Offset = v.AuxInt >> 5
   197  			p.AddRestSourceConst(v.AuxInt & 0x1f)
   198  		} else {
   199  			p.From.Offset = v.AuxInt >> 6
   200  			p.AddRestSourceConst(v.AuxInt & 0x3f)
   201  		}
   202  		p.Reg = v.Args[0].Reg()
   203  		p.To.Type = obj.TYPE_REG
   204  		p.To.Reg = v.Reg()
   205  
   206  	case ssa.OpLOONG64FMINF,
   207  		ssa.OpLOONG64FMIND,
   208  		ssa.OpLOONG64FMAXF,
   209  		ssa.OpLOONG64FMAXD:
   210  		// ADDD Rarg0, Rarg1, Rout
   211  		// CMPEQD Rarg0, Rarg0, FCC0
   212  		// bceqz FCC0, end
   213  		// CMPEQD Rarg1, Rarg1, FCC0
   214  		// bceqz FCC0, end
   215  		// F(MIN|MAX)(F|D)
   216  
   217  		r0 := v.Args[0].Reg()
   218  		r1 := v.Args[1].Reg()
   219  		out := v.Reg()
   220  		add, fcmp := loong64.AADDD, loong64.ACMPEQD
   221  		if v.Op == ssa.OpLOONG64FMINF || v.Op == ssa.OpLOONG64FMAXF {
   222  			add = loong64.AADDF
   223  			fcmp = loong64.ACMPEQF
   224  		}
   225  		p1 := s.Prog(add)
   226  		p1.From.Type = obj.TYPE_REG
   227  		p1.From.Reg = r0
   228  		p1.Reg = r1
   229  		p1.To.Type = obj.TYPE_REG
   230  		p1.To.Reg = out
   231  
   232  		p2 := s.Prog(fcmp)
   233  		p2.From.Type = obj.TYPE_REG
   234  		p2.From.Reg = r0
   235  		p2.Reg = r0
   236  		p2.To.Type = obj.TYPE_REG
   237  		p2.To.Reg = loong64.REG_FCC0
   238  
   239  		p3 := s.Prog(loong64.ABFPF)
   240  		p3.To.Type = obj.TYPE_BRANCH
   241  
   242  		p4 := s.Prog(fcmp)
   243  		p4.From.Type = obj.TYPE_REG
   244  		p4.From.Reg = r1
   245  		p4.Reg = r1
   246  		p4.To.Type = obj.TYPE_REG
   247  		p4.To.Reg = loong64.REG_FCC0
   248  
   249  		p5 := s.Prog(loong64.ABFPF)
   250  		p5.To.Type = obj.TYPE_BRANCH
   251  
   252  		p6 := s.Prog(v.Op.Asm())
   253  		p6.From.Type = obj.TYPE_REG
   254  		p6.From.Reg = r1
   255  		p6.Reg = r0
   256  		p6.To.Type = obj.TYPE_REG
   257  		p6.To.Reg = out
   258  
   259  		nop := s.Prog(obj.ANOP)
   260  		p3.To.SetTarget(nop)
   261  		p5.To.SetTarget(nop)
   262  
   263  	case ssa.OpLOONG64SGT,
   264  		ssa.OpLOONG64SGTU:
   265  		p := s.Prog(v.Op.Asm())
   266  		p.From.Type = obj.TYPE_REG
   267  		p.From.Reg = v.Args[0].Reg()
   268  		p.Reg = v.Args[1].Reg()
   269  		p.To.Type = obj.TYPE_REG
   270  		p.To.Reg = v.Reg()
   271  	case ssa.OpLOONG64ADDVconst,
   272  		ssa.OpLOONG64SUBVconst,
   273  		ssa.OpLOONG64ANDconst,
   274  		ssa.OpLOONG64ORconst,
   275  		ssa.OpLOONG64XORconst,
   276  		ssa.OpLOONG64NORconst,
   277  		ssa.OpLOONG64SLLVconst,
   278  		ssa.OpLOONG64SRLVconst,
   279  		ssa.OpLOONG64SRAVconst,
   280  		ssa.OpLOONG64ROTRconst,
   281  		ssa.OpLOONG64ROTRVconst,
   282  		ssa.OpLOONG64SGTconst,
   283  		ssa.OpLOONG64SGTUconst:
   284  		p := s.Prog(v.Op.Asm())
   285  		p.From.Type = obj.TYPE_CONST
   286  		p.From.Offset = v.AuxInt
   287  		p.Reg = v.Args[0].Reg()
   288  		p.To.Type = obj.TYPE_REG
   289  		p.To.Reg = v.Reg()
   290  	case ssa.OpLOONG64MOVVconst:
   291  		r := v.Reg()
   292  		p := s.Prog(v.Op.Asm())
   293  		p.From.Type = obj.TYPE_CONST
   294  		p.From.Offset = v.AuxInt
   295  		p.To.Type = obj.TYPE_REG
   296  		p.To.Reg = r
   297  		if isFPreg(r) {
   298  			// cannot move into FP or special registers, use TMP as intermediate
   299  			p.To.Reg = loong64.REGTMP
   300  			p = s.Prog(loong64.AMOVV)
   301  			p.From.Type = obj.TYPE_REG
   302  			p.From.Reg = loong64.REGTMP
   303  			p.To.Type = obj.TYPE_REG
   304  			p.To.Reg = r
   305  		}
   306  	case ssa.OpLOONG64MOVFconst,
   307  		ssa.OpLOONG64MOVDconst:
   308  		p := s.Prog(v.Op.Asm())
   309  		p.From.Type = obj.TYPE_FCONST
   310  		p.From.Val = math.Float64frombits(uint64(v.AuxInt))
   311  		p.To.Type = obj.TYPE_REG
   312  		p.To.Reg = v.Reg()
   313  	case ssa.OpLOONG64CMPEQF,
   314  		ssa.OpLOONG64CMPEQD,
   315  		ssa.OpLOONG64CMPGEF,
   316  		ssa.OpLOONG64CMPGED,
   317  		ssa.OpLOONG64CMPGTF,
   318  		ssa.OpLOONG64CMPGTD:
   319  		p := s.Prog(v.Op.Asm())
   320  		p.From.Type = obj.TYPE_REG
   321  		p.From.Reg = v.Args[0].Reg()
   322  		p.Reg = v.Args[1].Reg()
   323  		p.To.Type = obj.TYPE_REG
   324  		p.To.Reg = loong64.REG_FCC0
   325  
   326  	case ssa.OpLOONG64FMADDF,
   327  		ssa.OpLOONG64FMADDD,
   328  		ssa.OpLOONG64FMSUBF,
   329  		ssa.OpLOONG64FMSUBD,
   330  		ssa.OpLOONG64FNMADDF,
   331  		ssa.OpLOONG64FNMADDD,
   332  		ssa.OpLOONG64FNMSUBF,
   333  		ssa.OpLOONG64FNMSUBD:
   334  		p := s.Prog(v.Op.Asm())
   335  		// r=(FMA x y z) -> FMADDD z, y, x, r
   336  		// the SSA operand order is for taking advantage of
   337  		// commutativity (that only applies for the first two operands)
   338  		r := v.Reg()
   339  		x := v.Args[0].Reg()
   340  		y := v.Args[1].Reg()
   341  		z := v.Args[2].Reg()
   342  		p.From.Type = obj.TYPE_REG
   343  		p.From.Reg = z
   344  		p.Reg = y
   345  		p.AddRestSourceReg(x)
   346  		p.To.Type = obj.TYPE_REG
   347  		p.To.Reg = r
   348  
   349  	case ssa.OpLOONG64MOVVaddr:
   350  		p := s.Prog(loong64.AMOVV)
   351  		p.From.Type = obj.TYPE_ADDR
   352  		p.From.Reg = v.Args[0].Reg()
   353  		var wantreg string
   354  		// MOVV $sym+off(base), R
   355  		// the assembler expands it as the following:
   356  		// - base is SP: add constant offset to SP (R3)
   357  		// when constant is large, tmp register (R30) may be used
   358  		// - base is SB: load external address with relocation
   359  		switch v.Aux.(type) {
   360  		default:
   361  			v.Fatalf("aux is of unknown type %T", v.Aux)
   362  		case *obj.LSym:
   363  			wantreg = "SB"
   364  			ssagen.AddAux(&p.From, v)
   365  		case *ir.Name:
   366  			wantreg = "SP"
   367  			ssagen.AddAux(&p.From, v)
   368  		case nil:
   369  			// No sym, just MOVV $off(SP), R
   370  			wantreg = "SP"
   371  			p.From.Offset = v.AuxInt
   372  		}
   373  		if reg := v.Args[0].RegName(); reg != wantreg {
   374  			v.Fatalf("bad reg %s for symbol type %T, want %s", reg, v.Aux, wantreg)
   375  		}
   376  		p.To.Type = obj.TYPE_REG
   377  		p.To.Reg = v.Reg()
   378  
   379  	case ssa.OpLOONG64MOVBloadidx,
   380  		ssa.OpLOONG64MOVBUloadidx,
   381  		ssa.OpLOONG64MOVHloadidx,
   382  		ssa.OpLOONG64MOVHUloadidx,
   383  		ssa.OpLOONG64MOVWloadidx,
   384  		ssa.OpLOONG64MOVWUloadidx,
   385  		ssa.OpLOONG64MOVVloadidx,
   386  		ssa.OpLOONG64MOVFloadidx,
   387  		ssa.OpLOONG64MOVDloadidx:
   388  		p := s.Prog(v.Op.Asm())
   389  		p.From.Type = obj.TYPE_MEM
   390  		p.From.Name = obj.NAME_NONE
   391  		p.From.Reg = v.Args[0].Reg()
   392  		p.From.Index = v.Args[1].Reg()
   393  		p.To.Type = obj.TYPE_REG
   394  		p.To.Reg = v.Reg()
   395  
   396  	case ssa.OpLOONG64MOVBstoreidx,
   397  		ssa.OpLOONG64MOVHstoreidx,
   398  		ssa.OpLOONG64MOVWstoreidx,
   399  		ssa.OpLOONG64MOVVstoreidx,
   400  		ssa.OpLOONG64MOVFstoreidx,
   401  		ssa.OpLOONG64MOVDstoreidx:
   402  		p := s.Prog(v.Op.Asm())
   403  		p.From.Type = obj.TYPE_REG
   404  		p.From.Reg = v.Args[2].Reg()
   405  		p.To.Type = obj.TYPE_MEM
   406  		p.To.Name = obj.NAME_NONE
   407  		p.To.Reg = v.Args[0].Reg()
   408  		p.To.Index = v.Args[1].Reg()
   409  
   410  	case ssa.OpLOONG64MOVBstorezeroidx,
   411  		ssa.OpLOONG64MOVHstorezeroidx,
   412  		ssa.OpLOONG64MOVWstorezeroidx,
   413  		ssa.OpLOONG64MOVVstorezeroidx:
   414  		p := s.Prog(v.Op.Asm())
   415  		p.From.Type = obj.TYPE_REG
   416  		p.From.Reg = loong64.REGZERO
   417  		p.To.Type = obj.TYPE_MEM
   418  		p.To.Name = obj.NAME_NONE
   419  		p.To.Reg = v.Args[0].Reg()
   420  		p.To.Index = v.Args[1].Reg()
   421  
   422  	case ssa.OpLOONG64MOVBload,
   423  		ssa.OpLOONG64MOVBUload,
   424  		ssa.OpLOONG64MOVHload,
   425  		ssa.OpLOONG64MOVHUload,
   426  		ssa.OpLOONG64MOVWload,
   427  		ssa.OpLOONG64MOVWUload,
   428  		ssa.OpLOONG64MOVVload,
   429  		ssa.OpLOONG64MOVFload,
   430  		ssa.OpLOONG64MOVDload:
   431  		p := s.Prog(v.Op.Asm())
   432  		p.From.Type = obj.TYPE_MEM
   433  		p.From.Reg = v.Args[0].Reg()
   434  		ssagen.AddAux(&p.From, v)
   435  		p.To.Type = obj.TYPE_REG
   436  		p.To.Reg = v.Reg()
   437  	case ssa.OpLOONG64MOVBstore,
   438  		ssa.OpLOONG64MOVHstore,
   439  		ssa.OpLOONG64MOVWstore,
   440  		ssa.OpLOONG64MOVVstore,
   441  		ssa.OpLOONG64MOVFstore,
   442  		ssa.OpLOONG64MOVDstore:
   443  		p := s.Prog(v.Op.Asm())
   444  		p.From.Type = obj.TYPE_REG
   445  		p.From.Reg = v.Args[1].Reg()
   446  		p.To.Type = obj.TYPE_MEM
   447  		p.To.Reg = v.Args[0].Reg()
   448  		ssagen.AddAux(&p.To, v)
   449  	case ssa.OpLOONG64MOVBstorezero,
   450  		ssa.OpLOONG64MOVHstorezero,
   451  		ssa.OpLOONG64MOVWstorezero,
   452  		ssa.OpLOONG64MOVVstorezero:
   453  		p := s.Prog(v.Op.Asm())
   454  		p.From.Type = obj.TYPE_REG
   455  		p.From.Reg = loong64.REGZERO
   456  		p.To.Type = obj.TYPE_MEM
   457  		p.To.Reg = v.Args[0].Reg()
   458  		ssagen.AddAux(&p.To, v)
   459  	case ssa.OpLOONG64MOVBreg,
   460  		ssa.OpLOONG64MOVBUreg,
   461  		ssa.OpLOONG64MOVHreg,
   462  		ssa.OpLOONG64MOVHUreg,
   463  		ssa.OpLOONG64MOVWreg,
   464  		ssa.OpLOONG64MOVWUreg:
   465  		a := v.Args[0]
   466  		for a.Op == ssa.OpCopy || a.Op == ssa.OpLOONG64MOVVreg {
   467  			a = a.Args[0]
   468  		}
   469  		if a.Op == ssa.OpLoadReg && loong64.REG_R0 <= a.Reg() && a.Reg() <= loong64.REG_R31 {
   470  			// LoadReg from a narrower type does an extension, except loading
   471  			// to a floating point register. So only eliminate the extension
   472  			// if it is loaded to an integer register.
   473  
   474  			t := a.Type
   475  			switch {
   476  			case v.Op == ssa.OpLOONG64MOVBreg && t.Size() == 1 && t.IsSigned(),
   477  				v.Op == ssa.OpLOONG64MOVBUreg && t.Size() == 1 && !t.IsSigned(),
   478  				v.Op == ssa.OpLOONG64MOVHreg && t.Size() == 2 && t.IsSigned(),
   479  				v.Op == ssa.OpLOONG64MOVHUreg && t.Size() == 2 && !t.IsSigned(),
   480  				v.Op == ssa.OpLOONG64MOVWreg && t.Size() == 4 && t.IsSigned(),
   481  				v.Op == ssa.OpLOONG64MOVWUreg && t.Size() == 4 && !t.IsSigned():
   482  				// arg is a proper-typed load, already zero/sign-extended, don't extend again
   483  				if v.Reg() == v.Args[0].Reg() {
   484  					return
   485  				}
   486  				p := s.Prog(loong64.AMOVV)
   487  				p.From.Type = obj.TYPE_REG
   488  				p.From.Reg = v.Args[0].Reg()
   489  				p.To.Type = obj.TYPE_REG
   490  				p.To.Reg = v.Reg()
   491  				return
   492  			default:
   493  			}
   494  		}
   495  		fallthrough
   496  
   497  	case ssa.OpLOONG64MOVWF,
   498  		ssa.OpLOONG64MOVWD,
   499  		ssa.OpLOONG64TRUNCFW,
   500  		ssa.OpLOONG64TRUNCDW,
   501  		ssa.OpLOONG64MOVVF,
   502  		ssa.OpLOONG64MOVVD,
   503  		ssa.OpLOONG64TRUNCFV,
   504  		ssa.OpLOONG64TRUNCDV,
   505  		ssa.OpLOONG64MOVFD,
   506  		ssa.OpLOONG64MOVDF,
   507  		ssa.OpLOONG64MOVWfpgp,
   508  		ssa.OpLOONG64MOVWgpfp,
   509  		ssa.OpLOONG64MOVVfpgp,
   510  		ssa.OpLOONG64MOVVgpfp,
   511  		ssa.OpLOONG64NEGF,
   512  		ssa.OpLOONG64NEGD,
   513  		ssa.OpLOONG64CLZW,
   514  		ssa.OpLOONG64CLZV,
   515  		ssa.OpLOONG64CTZW,
   516  		ssa.OpLOONG64CTZV,
   517  		ssa.OpLOONG64SQRTD,
   518  		ssa.OpLOONG64SQRTF,
   519  		ssa.OpLOONG64REVB2H,
   520  		ssa.OpLOONG64REVB2W,
   521  		ssa.OpLOONG64REVBV,
   522  		ssa.OpLOONG64BITREV4B,
   523  		ssa.OpLOONG64BITREVW,
   524  		ssa.OpLOONG64BITREVV,
   525  		ssa.OpLOONG64ABSD:
   526  		p := s.Prog(v.Op.Asm())
   527  		p.From.Type = obj.TYPE_REG
   528  		p.From.Reg = v.Args[0].Reg()
   529  		p.To.Type = obj.TYPE_REG
   530  		p.To.Reg = v.Reg()
   531  
   532  	case ssa.OpLOONG64VPCNT64,
   533  		ssa.OpLOONG64VPCNT32,
   534  		ssa.OpLOONG64VPCNT16:
   535  		p := s.Prog(v.Op.Asm())
   536  		p.From.Type = obj.TYPE_REG
   537  		p.From.Reg = ((v.Args[0].Reg() - loong64.REG_F0) & 31) + loong64.REG_V0
   538  		p.To.Type = obj.TYPE_REG
   539  		p.To.Reg = ((v.Reg() - loong64.REG_F0) & 31) + loong64.REG_V0
   540  
   541  	case ssa.OpLOONG64NEGV:
   542  		// SUB from REGZERO
   543  		p := s.Prog(loong64.ASUBVU)
   544  		p.From.Type = obj.TYPE_REG
   545  		p.From.Reg = v.Args[0].Reg()
   546  		p.Reg = loong64.REGZERO
   547  		p.To.Type = obj.TYPE_REG
   548  		p.To.Reg = v.Reg()
   549  
   550  	case ssa.OpLOONG64DUFFZERO:
   551  		// runtime.duffzero expects start address in R20
   552  		p := s.Prog(obj.ADUFFZERO)
   553  		p.To.Type = obj.TYPE_MEM
   554  		p.To.Name = obj.NAME_EXTERN
   555  		p.To.Sym = ir.Syms.Duffzero
   556  		p.To.Offset = v.AuxInt
   557  	case ssa.OpLOONG64LoweredZero:
   558  		// MOVx	R0, (Rarg0)
   559  		// ADDV	$sz, Rarg0
   560  		// BGEU	Rarg1, Rarg0, -2(PC)
   561  		mov, sz := largestMove(v.AuxInt)
   562  		p := s.Prog(mov)
   563  		p.From.Type = obj.TYPE_REG
   564  		p.From.Reg = loong64.REGZERO
   565  		p.To.Type = obj.TYPE_MEM
   566  		p.To.Reg = v.Args[0].Reg()
   567  
   568  		p2 := s.Prog(loong64.AADDVU)
   569  		p2.From.Type = obj.TYPE_CONST
   570  		p2.From.Offset = sz
   571  		p2.To.Type = obj.TYPE_REG
   572  		p2.To.Reg = v.Args[0].Reg()
   573  
   574  		p3 := s.Prog(loong64.ABGEU)
   575  		p3.From.Type = obj.TYPE_REG
   576  		p3.From.Reg = v.Args[1].Reg()
   577  		p3.Reg = v.Args[0].Reg()
   578  		p3.To.Type = obj.TYPE_BRANCH
   579  		p3.To.SetTarget(p)
   580  
   581  	case ssa.OpLOONG64DUFFCOPY:
   582  		p := s.Prog(obj.ADUFFCOPY)
   583  		p.To.Type = obj.TYPE_MEM
   584  		p.To.Name = obj.NAME_EXTERN
   585  		p.To.Sym = ir.Syms.Duffcopy
   586  		p.To.Offset = v.AuxInt
   587  	case ssa.OpLOONG64LoweredMove:
   588  		// MOVx	(Rarg1), Rtmp
   589  		// MOVx	Rtmp, (Rarg0)
   590  		// ADDV	$sz, Rarg1
   591  		// ADDV	$sz, Rarg0
   592  		// BGEU	Rarg2, Rarg0, -4(PC)
   593  		mov, sz := largestMove(v.AuxInt)
   594  		p := s.Prog(mov)
   595  		p.From.Type = obj.TYPE_MEM
   596  		p.From.Reg = v.Args[1].Reg()
   597  		p.To.Type = obj.TYPE_REG
   598  		p.To.Reg = loong64.REGTMP
   599  
   600  		p2 := s.Prog(mov)
   601  		p2.From.Type = obj.TYPE_REG
   602  		p2.From.Reg = loong64.REGTMP
   603  		p2.To.Type = obj.TYPE_MEM
   604  		p2.To.Reg = v.Args[0].Reg()
   605  
   606  		p3 := s.Prog(loong64.AADDVU)
   607  		p3.From.Type = obj.TYPE_CONST
   608  		p3.From.Offset = sz
   609  		p3.To.Type = obj.TYPE_REG
   610  		p3.To.Reg = v.Args[1].Reg()
   611  
   612  		p4 := s.Prog(loong64.AADDVU)
   613  		p4.From.Type = obj.TYPE_CONST
   614  		p4.From.Offset = sz
   615  		p4.To.Type = obj.TYPE_REG
   616  		p4.To.Reg = v.Args[0].Reg()
   617  
   618  		p5 := s.Prog(loong64.ABGEU)
   619  		p5.From.Type = obj.TYPE_REG
   620  		p5.From.Reg = v.Args[2].Reg()
   621  		p5.Reg = v.Args[1].Reg()
   622  		p5.To.Type = obj.TYPE_BRANCH
   623  		p5.To.SetTarget(p)
   624  
   625  	case ssa.OpLOONG64CALLstatic, ssa.OpLOONG64CALLclosure, ssa.OpLOONG64CALLinter:
   626  		s.Call(v)
   627  	case ssa.OpLOONG64CALLtail:
   628  		s.TailCall(v)
   629  	case ssa.OpLOONG64LoweredWB:
   630  		p := s.Prog(obj.ACALL)
   631  		p.To.Type = obj.TYPE_MEM
   632  		p.To.Name = obj.NAME_EXTERN
   633  		// AuxInt encodes how many buffer entries we need.
   634  		p.To.Sym = ir.Syms.GCWriteBarrier[v.AuxInt-1]
   635  
   636  	case ssa.OpLOONG64LoweredPubBarrier:
   637  		// DBAR 0x1A
   638  		p := s.Prog(v.Op.Asm())
   639  		p.From.Type = obj.TYPE_CONST
   640  		p.From.Offset = 0x1A
   641  
   642  	case ssa.OpLOONG64LoweredPanicBoundsA, ssa.OpLOONG64LoweredPanicBoundsB, ssa.OpLOONG64LoweredPanicBoundsC:
   643  		p := s.Prog(obj.ACALL)
   644  		p.To.Type = obj.TYPE_MEM
   645  		p.To.Name = obj.NAME_EXTERN
   646  		p.To.Sym = ssagen.BoundsCheckFunc[v.AuxInt]
   647  		s.UseArgs(16) // space used in callee args area by assembly stubs
   648  	case ssa.OpLOONG64LoweredAtomicLoad8, ssa.OpLOONG64LoweredAtomicLoad32, ssa.OpLOONG64LoweredAtomicLoad64:
   649  		// MOVB	(Rarg0), Rout
   650  		// DBAR	0x14
   651  		as := loong64.AMOVV
   652  		switch v.Op {
   653  		case ssa.OpLOONG64LoweredAtomicLoad8:
   654  			as = loong64.AMOVB
   655  		case ssa.OpLOONG64LoweredAtomicLoad32:
   656  			as = loong64.AMOVW
   657  		}
   658  		p := s.Prog(as)
   659  		p.From.Type = obj.TYPE_MEM
   660  		p.From.Reg = v.Args[0].Reg()
   661  		p.To.Type = obj.TYPE_REG
   662  		p.To.Reg = v.Reg0()
   663  		p1 := s.Prog(loong64.ADBAR)
   664  		p1.From.Type = obj.TYPE_CONST
   665  		p1.From.Offset = 0x14
   666  
   667  	case ssa.OpLOONG64LoweredAtomicStore8,
   668  		ssa.OpLOONG64LoweredAtomicStore32,
   669  		ssa.OpLOONG64LoweredAtomicStore64:
   670  		// DBAR 0x12
   671  		// MOVx (Rarg1), Rout
   672  		// DBAR 0x18
   673  		movx := loong64.AMOVV
   674  		switch v.Op {
   675  		case ssa.OpLOONG64LoweredAtomicStore8:
   676  			movx = loong64.AMOVB
   677  		case ssa.OpLOONG64LoweredAtomicStore32:
   678  			movx = loong64.AMOVW
   679  		}
   680  		p := s.Prog(loong64.ADBAR)
   681  		p.From.Type = obj.TYPE_CONST
   682  		p.From.Offset = 0x12
   683  
   684  		p1 := s.Prog(movx)
   685  		p1.From.Type = obj.TYPE_REG
   686  		p1.From.Reg = v.Args[1].Reg()
   687  		p1.To.Type = obj.TYPE_MEM
   688  		p1.To.Reg = v.Args[0].Reg()
   689  
   690  		p2 := s.Prog(loong64.ADBAR)
   691  		p2.From.Type = obj.TYPE_CONST
   692  		p2.From.Offset = 0x18
   693  
   694  	case ssa.OpLOONG64LoweredAtomicStore8Variant,
   695  		ssa.OpLOONG64LoweredAtomicStore32Variant,
   696  		ssa.OpLOONG64LoweredAtomicStore64Variant:
   697  		//AMSWAPx  Rarg1, (Rarg0), Rout
   698  		amswapx := loong64.AAMSWAPDBV
   699  		switch v.Op {
   700  		case ssa.OpLOONG64LoweredAtomicStore32Variant:
   701  			amswapx = loong64.AAMSWAPDBW
   702  		case ssa.OpLOONG64LoweredAtomicStore8Variant:
   703  			amswapx = loong64.AAMSWAPDBB
   704  		}
   705  		p := s.Prog(amswapx)
   706  		p.From.Type = obj.TYPE_REG
   707  		p.From.Reg = v.Args[1].Reg()
   708  		p.To.Type = obj.TYPE_MEM
   709  		p.To.Reg = v.Args[0].Reg()
   710  		p.RegTo2 = loong64.REGZERO
   711  
   712  	case ssa.OpLOONG64LoweredAtomicExchange32, ssa.OpLOONG64LoweredAtomicExchange64:
   713  		// AMSWAPx	Rarg1, (Rarg0), Rout
   714  		amswapx := loong64.AAMSWAPDBV
   715  		if v.Op == ssa.OpLOONG64LoweredAtomicExchange32 {
   716  			amswapx = loong64.AAMSWAPDBW
   717  		}
   718  		p := s.Prog(amswapx)
   719  		p.From.Type = obj.TYPE_REG
   720  		p.From.Reg = v.Args[1].Reg()
   721  		p.To.Type = obj.TYPE_MEM
   722  		p.To.Reg = v.Args[0].Reg()
   723  		p.RegTo2 = v.Reg0()
   724  
   725  	case ssa.OpLOONG64LoweredAtomicExchange8Variant:
   726  		// AMSWAPDBB	Rarg1, (Rarg0), Rout
   727  		p := s.Prog(loong64.AAMSWAPDBB)
   728  		p.From.Type = obj.TYPE_REG
   729  		p.From.Reg = v.Args[1].Reg()
   730  		p.To.Type = obj.TYPE_MEM
   731  		p.To.Reg = v.Args[0].Reg()
   732  		p.RegTo2 = v.Reg0()
   733  
   734  	case ssa.OpLOONG64LoweredAtomicAdd32, ssa.OpLOONG64LoweredAtomicAdd64:
   735  		// AMADDx  Rarg1, (Rarg0), Rout
   736  		// ADDV    Rarg1, Rout, Rout
   737  		amaddx := loong64.AAMADDDBV
   738  		addx := loong64.AADDV
   739  		if v.Op == ssa.OpLOONG64LoweredAtomicAdd32 {
   740  			amaddx = loong64.AAMADDDBW
   741  		}
   742  		p := s.Prog(amaddx)
   743  		p.From.Type = obj.TYPE_REG
   744  		p.From.Reg = v.Args[1].Reg()
   745  		p.To.Type = obj.TYPE_MEM
   746  		p.To.Reg = v.Args[0].Reg()
   747  		p.RegTo2 = v.Reg0()
   748  
   749  		p1 := s.Prog(addx)
   750  		p1.From.Type = obj.TYPE_REG
   751  		p1.From.Reg = v.Args[1].Reg()
   752  		p1.Reg = v.Reg0()
   753  		p1.To.Type = obj.TYPE_REG
   754  		p1.To.Reg = v.Reg0()
   755  
   756  	case ssa.OpLOONG64LoweredAtomicCas32, ssa.OpLOONG64LoweredAtomicCas64:
   757  		// MOVV $0, Rout
   758  		// DBAR 0x14
   759  		// LL	(Rarg0), Rtmp
   760  		// BNE	Rtmp, Rarg1, 4(PC)
   761  		// MOVV Rarg2, Rout
   762  		// SC	Rout, (Rarg0)
   763  		// BEQ	Rout, -4(PC)
   764  		// DBAR 0x12
   765  		ll := loong64.ALLV
   766  		sc := loong64.ASCV
   767  		if v.Op == ssa.OpLOONG64LoweredAtomicCas32 {
   768  			ll = loong64.ALL
   769  			sc = loong64.ASC
   770  		}
   771  
   772  		p := s.Prog(loong64.AMOVV)
   773  		p.From.Type = obj.TYPE_REG
   774  		p.From.Reg = loong64.REGZERO
   775  		p.To.Type = obj.TYPE_REG
   776  		p.To.Reg = v.Reg0()
   777  
   778  		p1 := s.Prog(loong64.ADBAR)
   779  		p1.From.Type = obj.TYPE_CONST
   780  		p1.From.Offset = 0x14
   781  
   782  		p2 := s.Prog(ll)
   783  		p2.From.Type = obj.TYPE_MEM
   784  		p2.From.Reg = v.Args[0].Reg()
   785  		p2.To.Type = obj.TYPE_REG
   786  		p2.To.Reg = loong64.REGTMP
   787  
   788  		p3 := s.Prog(loong64.ABNE)
   789  		p3.From.Type = obj.TYPE_REG
   790  		p3.From.Reg = v.Args[1].Reg()
   791  		p3.Reg = loong64.REGTMP
   792  		p3.To.Type = obj.TYPE_BRANCH
   793  
   794  		p4 := s.Prog(loong64.AMOVV)
   795  		p4.From.Type = obj.TYPE_REG
   796  		p4.From.Reg = v.Args[2].Reg()
   797  		p4.To.Type = obj.TYPE_REG
   798  		p4.To.Reg = v.Reg0()
   799  
   800  		p5 := s.Prog(sc)
   801  		p5.From.Type = obj.TYPE_REG
   802  		p5.From.Reg = v.Reg0()
   803  		p5.To.Type = obj.TYPE_MEM
   804  		p5.To.Reg = v.Args[0].Reg()
   805  
   806  		p6 := s.Prog(loong64.ABEQ)
   807  		p6.From.Type = obj.TYPE_REG
   808  		p6.From.Reg = v.Reg0()
   809  		p6.To.Type = obj.TYPE_BRANCH
   810  		p6.To.SetTarget(p2)
   811  
   812  		p7 := s.Prog(loong64.ADBAR)
   813  		p7.From.Type = obj.TYPE_CONST
   814  		p7.From.Offset = 0x12
   815  		p3.To.SetTarget(p7)
   816  
   817  	case ssa.OpLOONG64LoweredAtomicAnd32,
   818  		ssa.OpLOONG64LoweredAtomicOr32:
   819  		// AM{AND,OR}DBx  Rarg1, (Rarg0), RegZero
   820  		p := s.Prog(v.Op.Asm())
   821  		p.From.Type = obj.TYPE_REG
   822  		p.From.Reg = v.Args[1].Reg()
   823  		p.To.Type = obj.TYPE_MEM
   824  		p.To.Reg = v.Args[0].Reg()
   825  		p.RegTo2 = loong64.REGZERO
   826  
   827  	case ssa.OpLOONG64LoweredAtomicAnd32value,
   828  		ssa.OpLOONG64LoweredAtomicAnd64value,
   829  		ssa.OpLOONG64LoweredAtomicOr64value,
   830  		ssa.OpLOONG64LoweredAtomicOr32value:
   831  		// AM{AND,OR}DBx  Rarg1, (Rarg0), Rout
   832  		p := s.Prog(v.Op.Asm())
   833  		p.From.Type = obj.TYPE_REG
   834  		p.From.Reg = v.Args[1].Reg()
   835  		p.To.Type = obj.TYPE_MEM
   836  		p.To.Reg = v.Args[0].Reg()
   837  		p.RegTo2 = v.Reg0()
   838  
   839  	case ssa.OpLOONG64LoweredAtomicCas64Variant, ssa.OpLOONG64LoweredAtomicCas32Variant:
   840  		// MOVV         $0, Rout
   841  		// MOVV         Rarg1, Rtmp
   842  		// AMCASDBx     Rarg2, (Rarg0), Rtmp
   843  		// BNE          Rarg1, Rtmp, 2(PC)
   844  		// MOVV         $1, Rout
   845  		// NOP
   846  
   847  		amcasx := loong64.AAMCASDBV
   848  		if v.Op == ssa.OpLOONG64LoweredAtomicCas32Variant {
   849  			amcasx = loong64.AAMCASDBW
   850  		}
   851  
   852  		p := s.Prog(loong64.AMOVV)
   853  		p.From.Type = obj.TYPE_REG
   854  		p.From.Reg = loong64.REGZERO
   855  		p.To.Type = obj.TYPE_REG
   856  		p.To.Reg = v.Reg0()
   857  
   858  		p1 := s.Prog(loong64.AMOVV)
   859  		p1.From.Type = obj.TYPE_REG
   860  		p1.From.Reg = v.Args[1].Reg()
   861  		p1.To.Type = obj.TYPE_REG
   862  		p1.To.Reg = loong64.REGTMP
   863  
   864  		p2 := s.Prog(amcasx)
   865  		p2.From.Type = obj.TYPE_REG
   866  		p2.From.Reg = v.Args[2].Reg()
   867  		p2.To.Type = obj.TYPE_MEM
   868  		p2.To.Reg = v.Args[0].Reg()
   869  		p2.RegTo2 = loong64.REGTMP
   870  
   871  		p3 := s.Prog(loong64.ABNE)
   872  		p3.From.Type = obj.TYPE_REG
   873  		p3.From.Reg = v.Args[1].Reg()
   874  		p3.Reg = loong64.REGTMP
   875  		p3.To.Type = obj.TYPE_BRANCH
   876  
   877  		p4 := s.Prog(loong64.AMOVV)
   878  		p4.From.Type = obj.TYPE_CONST
   879  		p4.From.Offset = 0x1
   880  		p4.To.Type = obj.TYPE_REG
   881  		p4.To.Reg = v.Reg0()
   882  
   883  		p5 := s.Prog(obj.ANOP)
   884  		p3.To.SetTarget(p5)
   885  
   886  	case ssa.OpLOONG64LoweredNilCheck:
   887  		// Issue a load which will fault if arg is nil.
   888  		p := s.Prog(loong64.AMOVB)
   889  		p.From.Type = obj.TYPE_MEM
   890  		p.From.Reg = v.Args[0].Reg()
   891  		ssagen.AddAux(&p.From, v)
   892  		p.To.Type = obj.TYPE_REG
   893  		p.To.Reg = loong64.REGTMP
   894  		if logopt.Enabled() {
   895  			logopt.LogOpt(v.Pos, "nilcheck", "genssa", v.Block.Func.Name)
   896  		}
   897  		if base.Debug.Nil != 0 && v.Pos.Line() > 1 { // v.Pos.Line()==1 in generated wrappers
   898  			base.WarnfAt(v.Pos, "generated nil check")
   899  		}
   900  	case ssa.OpLOONG64FPFlagTrue,
   901  		ssa.OpLOONG64FPFlagFalse:
   902  		// MOVV	$0, r
   903  		// BFPF	2(PC)
   904  		// MOVV	$1, r
   905  		branch := loong64.ABFPF
   906  		if v.Op == ssa.OpLOONG64FPFlagFalse {
   907  			branch = loong64.ABFPT
   908  		}
   909  		p := s.Prog(loong64.AMOVV)
   910  		p.From.Type = obj.TYPE_REG
   911  		p.From.Reg = loong64.REGZERO
   912  		p.To.Type = obj.TYPE_REG
   913  		p.To.Reg = v.Reg()
   914  		p2 := s.Prog(branch)
   915  		p2.To.Type = obj.TYPE_BRANCH
   916  		p3 := s.Prog(loong64.AMOVV)
   917  		p3.From.Type = obj.TYPE_CONST
   918  		p3.From.Offset = 1
   919  		p3.To.Type = obj.TYPE_REG
   920  		p3.To.Reg = v.Reg()
   921  		p4 := s.Prog(obj.ANOP) // not a machine instruction, for branch to land
   922  		p2.To.SetTarget(p4)
   923  	case ssa.OpLOONG64LoweredGetClosurePtr:
   924  		// Closure pointer is R22 (loong64.REGCTXT).
   925  		ssagen.CheckLoweredGetClosurePtr(v)
   926  	case ssa.OpLOONG64LoweredGetCallerSP:
   927  		// caller's SP is FixedFrameSize below the address of the first arg
   928  		p := s.Prog(loong64.AMOVV)
   929  		p.From.Type = obj.TYPE_ADDR
   930  		p.From.Offset = -base.Ctxt.Arch.FixedFrameSize
   931  		p.From.Name = obj.NAME_PARAM
   932  		p.To.Type = obj.TYPE_REG
   933  		p.To.Reg = v.Reg()
   934  	case ssa.OpLOONG64LoweredGetCallerPC:
   935  		p := s.Prog(obj.AGETCALLERPC)
   936  		p.To.Type = obj.TYPE_REG
   937  		p.To.Reg = v.Reg()
   938  	case ssa.OpLOONG64MASKEQZ, ssa.OpLOONG64MASKNEZ:
   939  		p := s.Prog(v.Op.Asm())
   940  		p.From.Type = obj.TYPE_REG
   941  		p.From.Reg = v.Args[1].Reg()
   942  		p.Reg = v.Args[0].Reg()
   943  		p.To.Type = obj.TYPE_REG
   944  		p.To.Reg = v.Reg()
   945  	case ssa.OpClobber, ssa.OpClobberReg:
   946  		// TODO: implement for clobberdead experiment. Nop is ok for now.
   947  	default:
   948  		v.Fatalf("genValue not implemented: %s", v.LongString())
   949  	}
   950  }
   951  
   952  var blockJump = map[ssa.BlockKind]struct {
   953  	asm, invasm obj.As
   954  }{
   955  	ssa.BlockLOONG64EQ:   {loong64.ABEQ, loong64.ABNE},
   956  	ssa.BlockLOONG64NE:   {loong64.ABNE, loong64.ABEQ},
   957  	ssa.BlockLOONG64LTZ:  {loong64.ABLTZ, loong64.ABGEZ},
   958  	ssa.BlockLOONG64GEZ:  {loong64.ABGEZ, loong64.ABLTZ},
   959  	ssa.BlockLOONG64LEZ:  {loong64.ABLEZ, loong64.ABGTZ},
   960  	ssa.BlockLOONG64GTZ:  {loong64.ABGTZ, loong64.ABLEZ},
   961  	ssa.BlockLOONG64FPT:  {loong64.ABFPT, loong64.ABFPF},
   962  	ssa.BlockLOONG64FPF:  {loong64.ABFPF, loong64.ABFPT},
   963  	ssa.BlockLOONG64BEQ:  {loong64.ABEQ, loong64.ABNE},
   964  	ssa.BlockLOONG64BNE:  {loong64.ABNE, loong64.ABEQ},
   965  	ssa.BlockLOONG64BGE:  {loong64.ABGE, loong64.ABLT},
   966  	ssa.BlockLOONG64BLT:  {loong64.ABLT, loong64.ABGE},
   967  	ssa.BlockLOONG64BLTU: {loong64.ABLTU, loong64.ABGEU},
   968  	ssa.BlockLOONG64BGEU: {loong64.ABGEU, loong64.ABLTU},
   969  }
   970  
   971  func ssaGenBlock(s *ssagen.State, b, next *ssa.Block) {
   972  	switch b.Kind {
   973  	case ssa.BlockPlain:
   974  		if b.Succs[0].Block() != next {
   975  			p := s.Prog(obj.AJMP)
   976  			p.To.Type = obj.TYPE_BRANCH
   977  			s.Branches = append(s.Branches, ssagen.Branch{P: p, B: b.Succs[0].Block()})
   978  		}
   979  	case ssa.BlockDefer:
   980  		// defer returns in R19:
   981  		// 0 if we should continue executing
   982  		// 1 if we should jump to deferreturn call
   983  		p := s.Prog(loong64.ABNE)
   984  		p.From.Type = obj.TYPE_REG
   985  		p.From.Reg = loong64.REGZERO
   986  		p.Reg = loong64.REG_R19
   987  		p.To.Type = obj.TYPE_BRANCH
   988  		s.Branches = append(s.Branches, ssagen.Branch{P: p, B: b.Succs[1].Block()})
   989  		if b.Succs[0].Block() != next {
   990  			p := s.Prog(obj.AJMP)
   991  			p.To.Type = obj.TYPE_BRANCH
   992  			s.Branches = append(s.Branches, ssagen.Branch{P: p, B: b.Succs[0].Block()})
   993  		}
   994  	case ssa.BlockExit, ssa.BlockRetJmp:
   995  	case ssa.BlockRet:
   996  		s.Prog(obj.ARET)
   997  	case ssa.BlockLOONG64EQ, ssa.BlockLOONG64NE,
   998  		ssa.BlockLOONG64LTZ, ssa.BlockLOONG64GEZ,
   999  		ssa.BlockLOONG64LEZ, ssa.BlockLOONG64GTZ,
  1000  		ssa.BlockLOONG64BEQ, ssa.BlockLOONG64BNE,
  1001  		ssa.BlockLOONG64BLT, ssa.BlockLOONG64BGE,
  1002  		ssa.BlockLOONG64BLTU, ssa.BlockLOONG64BGEU,
  1003  		ssa.BlockLOONG64FPT, ssa.BlockLOONG64FPF:
  1004  		jmp := blockJump[b.Kind]
  1005  		var p *obj.Prog
  1006  		switch next {
  1007  		case b.Succs[0].Block():
  1008  			p = s.Br(jmp.invasm, b.Succs[1].Block())
  1009  		case b.Succs[1].Block():
  1010  			p = s.Br(jmp.asm, b.Succs[0].Block())
  1011  		default:
  1012  			if b.Likely != ssa.BranchUnlikely {
  1013  				p = s.Br(jmp.asm, b.Succs[0].Block())
  1014  				s.Br(obj.AJMP, b.Succs[1].Block())
  1015  			} else {
  1016  				p = s.Br(jmp.invasm, b.Succs[1].Block())
  1017  				s.Br(obj.AJMP, b.Succs[0].Block())
  1018  			}
  1019  		}
  1020  		switch b.Kind {
  1021  		case ssa.BlockLOONG64BEQ, ssa.BlockLOONG64BNE,
  1022  			ssa.BlockLOONG64BGE, ssa.BlockLOONG64BLT,
  1023  			ssa.BlockLOONG64BGEU, ssa.BlockLOONG64BLTU:
  1024  			p.From.Type = obj.TYPE_REG
  1025  			p.From.Reg = b.Controls[0].Reg()
  1026  			p.Reg = b.Controls[1].Reg()
  1027  		case ssa.BlockLOONG64EQ, ssa.BlockLOONG64NE,
  1028  			ssa.BlockLOONG64LTZ, ssa.BlockLOONG64GEZ,
  1029  			ssa.BlockLOONG64LEZ, ssa.BlockLOONG64GTZ,
  1030  			ssa.BlockLOONG64FPT, ssa.BlockLOONG64FPF:
  1031  			if !b.Controls[0].Type.IsFlags() {
  1032  				p.From.Type = obj.TYPE_REG
  1033  				p.From.Reg = b.Controls[0].Reg()
  1034  			}
  1035  		}
  1036  	default:
  1037  		b.Fatalf("branch not implemented: %s", b.LongString())
  1038  	}
  1039  }
  1040  
  1041  func loadRegResult(s *ssagen.State, f *ssa.Func, t *types.Type, reg int16, n *ir.Name, off int64) *obj.Prog {
  1042  	p := s.Prog(loadByType(t, reg))
  1043  	p.From.Type = obj.TYPE_MEM
  1044  	p.From.Name = obj.NAME_AUTO
  1045  	p.From.Sym = n.Linksym()
  1046  	p.From.Offset = n.FrameOffset() + off
  1047  	p.To.Type = obj.TYPE_REG
  1048  	p.To.Reg = reg
  1049  	return p
  1050  }
  1051  
  1052  func spillArgReg(pp *objw.Progs, p *obj.Prog, f *ssa.Func, t *types.Type, reg int16, n *ir.Name, off int64) *obj.Prog {
  1053  	p = pp.Append(p, storeByType(t, reg), obj.TYPE_REG, reg, 0, obj.TYPE_MEM, 0, n.FrameOffset()+off)
  1054  	p.To.Name = obj.NAME_PARAM
  1055  	p.To.Sym = n.Linksym()
  1056  	p.Pos = p.Pos.WithNotStmt()
  1057  	return p
  1058  }
  1059  

View as plain text