Source file src/cmd/compile/internal/ssa/value.go

     1  // Copyright 2015 The Go Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  package ssa
     6  
     7  import (
     8  	"cmd/compile/internal/ir"
     9  	"cmd/compile/internal/types"
    10  	"cmd/internal/src"
    11  	"fmt"
    12  	"internal/buildcfg"
    13  	"math"
    14  	"sort"
    15  	"strings"
    16  )
    17  
    18  // A Value represents a value in the SSA representation of the program.
    19  // The ID and Type fields must not be modified. The remainder may be modified
    20  // if they preserve the value of the Value (e.g. changing a (mul 2 x) to an (add x x)).
    21  type Value struct {
    22  	// A unique identifier for the value. For performance we allocate these IDs
    23  	// densely starting at 1.  There is no guarantee that there won't be occasional holes, though.
    24  	ID ID
    25  
    26  	// The operation that computes this value. See op.go.
    27  	Op Op
    28  
    29  	// The type of this value. Normally this will be a Go type, but there
    30  	// are a few other pseudo-types, see ../types/type.go.
    31  	Type *types.Type
    32  
    33  	// Auxiliary info for this value. The type of this information depends on the opcode and type.
    34  	// AuxInt is used for integer values, Aux is used for other values.
    35  	// Floats are stored in AuxInt using math.Float64bits(f).
    36  	// Unused portions of AuxInt are filled by sign-extending the used portion,
    37  	// even if the represented value is unsigned.
    38  	// Users of AuxInt which interpret AuxInt as unsigned (e.g. shifts) must be careful.
    39  	// Use Value.AuxUnsigned to get the zero-extended value of AuxInt.
    40  	AuxInt int64
    41  	Aux    Aux
    42  
    43  	// Arguments of this value
    44  	Args []*Value
    45  
    46  	// Containing basic block
    47  	Block *Block
    48  
    49  	// Source position
    50  	Pos src.XPos
    51  
    52  	// Use count. Each appearance in Value.Args and Block.Controls counts once.
    53  	Uses int32
    54  
    55  	// wasm: Value stays on the WebAssembly stack. This value will not get a "register" (WebAssembly variable)
    56  	// nor a slot on Go stack, and the generation of this value is delayed to its use time.
    57  	OnWasmStack bool
    58  
    59  	// Is this value in the per-function constant cache? If so, remove from cache before changing it or recycling it.
    60  	InCache bool
    61  
    62  	// Storage for the first three args
    63  	argstorage [3]*Value
    64  }
    65  
    66  // Examples:
    67  // Opcode          aux   args
    68  //  OpAdd          nil      2
    69  //  OpConst     string      0    string constant
    70  //  OpConst      int64      0    int64 constant
    71  //  OpAddcq      int64      1    amd64 op: v = arg[0] + constant
    72  
    73  // short form print. Just v#.
    74  func (v *Value) String() string {
    75  	if v == nil {
    76  		return "nil" // should never happen, but not panicking helps with debugging
    77  	}
    78  	return fmt.Sprintf("v%d", v.ID)
    79  }
    80  
    81  func (v *Value) AuxInt8() int8 {
    82  	if opcodeTable[v.Op].auxType != auxInt8 && opcodeTable[v.Op].auxType != auxNameOffsetInt8 {
    83  		v.Fatalf("op %s doesn't have an int8 aux field", v.Op)
    84  	}
    85  	return int8(v.AuxInt)
    86  }
    87  
    88  func (v *Value) AuxUInt8() uint8 {
    89  	if opcodeTable[v.Op].auxType != auxUInt8 {
    90  		v.Fatalf("op %s doesn't have a uint8 aux field", v.Op)
    91  	}
    92  	return uint8(v.AuxInt)
    93  }
    94  
    95  func (v *Value) AuxInt16() int16 {
    96  	if opcodeTable[v.Op].auxType != auxInt16 {
    97  		v.Fatalf("op %s doesn't have an int16 aux field", v.Op)
    98  	}
    99  	return int16(v.AuxInt)
   100  }
   101  
   102  func (v *Value) AuxInt32() int32 {
   103  	if opcodeTable[v.Op].auxType != auxInt32 {
   104  		v.Fatalf("op %s doesn't have an int32 aux field", v.Op)
   105  	}
   106  	return int32(v.AuxInt)
   107  }
   108  
   109  // AuxUnsigned returns v.AuxInt as an unsigned value for OpConst*.
   110  // v.AuxInt is always sign-extended to 64 bits, even if the
   111  // represented value is unsigned. This undoes that sign extension.
   112  func (v *Value) AuxUnsigned() uint64 {
   113  	c := v.AuxInt
   114  	switch v.Op {
   115  	case OpConst64:
   116  		return uint64(c)
   117  	case OpConst32:
   118  		return uint64(uint32(c))
   119  	case OpConst16:
   120  		return uint64(uint16(c))
   121  	case OpConst8:
   122  		return uint64(uint8(c))
   123  	}
   124  	v.Fatalf("op %s isn't OpConst*", v.Op)
   125  	return 0
   126  }
   127  
   128  func (v *Value) AuxFloat() float64 {
   129  	if opcodeTable[v.Op].auxType != auxFloat32 && opcodeTable[v.Op].auxType != auxFloat64 {
   130  		v.Fatalf("op %s doesn't have a float aux field", v.Op)
   131  	}
   132  	return math.Float64frombits(uint64(v.AuxInt))
   133  }
   134  func (v *Value) AuxValAndOff() ValAndOff {
   135  	if opcodeTable[v.Op].auxType != auxSymValAndOff {
   136  		v.Fatalf("op %s doesn't have a ValAndOff aux field", v.Op)
   137  	}
   138  	return ValAndOff(v.AuxInt)
   139  }
   140  
   141  func (v *Value) AuxArm64BitField() arm64BitField {
   142  	if opcodeTable[v.Op].auxType != auxARM64BitField {
   143  		v.Fatalf("op %s doesn't have a ARM64BitField aux field", v.Op)
   144  	}
   145  	return arm64BitField(v.AuxInt)
   146  }
   147  
   148  func (v *Value) AuxArm64ConditionalParams() arm64ConditionalParams {
   149  	if opcodeTable[v.Op].auxType != auxARM64ConditionalParams {
   150  		v.Fatalf("op %s doesn't have a ARM64ConditionalParams aux field", v.Op)
   151  	}
   152  	return auxIntToArm64ConditionalParams(v.AuxInt)
   153  }
   154  
   155  // long form print.  v# = opcode <type> [aux] args [: reg] (names)
   156  func (v *Value) LongString() string {
   157  	if v == nil {
   158  		return "<NIL VALUE>"
   159  	}
   160  	s := fmt.Sprintf("v%d = %s", v.ID, v.Op)
   161  	s += " <" + v.Type.String() + ">"
   162  	s += v.auxString()
   163  	for _, a := range v.Args {
   164  		s += fmt.Sprintf(" %v", a)
   165  	}
   166  	if v.Block == nil {
   167  		return s
   168  	}
   169  	r := v.Block.Func.RegAlloc
   170  	if int(v.ID) < len(r) && r[v.ID] != nil {
   171  		s += " : " + r[v.ID].String()
   172  	}
   173  	if reg := v.Block.Func.tempRegs[v.ID]; reg != nil {
   174  		s += " tmp=" + reg.String()
   175  	}
   176  	var names []string
   177  	for name, values := range v.Block.Func.NamedValues {
   178  		for _, value := range values {
   179  			if value == v {
   180  				names = append(names, name.String())
   181  				break // drop duplicates.
   182  			}
   183  		}
   184  	}
   185  	if len(names) != 0 {
   186  		sort.Strings(names) // Otherwise a source of variation in debugging output.
   187  		s += " (" + strings.Join(names, ", ") + ")"
   188  	}
   189  	return s
   190  }
   191  
   192  func (v *Value) auxString() string {
   193  	switch opcodeTable[v.Op].auxType {
   194  	case auxBool:
   195  		if v.AuxInt == 0 {
   196  			return " [false]"
   197  		} else {
   198  			return " [true]"
   199  		}
   200  	case auxInt8:
   201  		return fmt.Sprintf(" [%d]", v.AuxInt8())
   202  	case auxInt16:
   203  		return fmt.Sprintf(" [%d]", v.AuxInt16())
   204  	case auxInt32:
   205  		return fmt.Sprintf(" [%d]", v.AuxInt32())
   206  	case auxInt64, auxInt128:
   207  		return fmt.Sprintf(" [%d]", v.AuxInt)
   208  	case auxUInt8:
   209  		return fmt.Sprintf(" [%d]", v.AuxUInt8())
   210  	case auxARM64BitField:
   211  		lsb := v.AuxArm64BitField().lsb()
   212  		width := v.AuxArm64BitField().width()
   213  		return fmt.Sprintf(" [lsb=%d,width=%d]", lsb, width)
   214  	case auxARM64ConditionalParams:
   215  		params := v.AuxArm64ConditionalParams()
   216  		cond := params.Cond()
   217  		nzcv := params.Nzcv()
   218  		imm, ok := params.ConstValue()
   219  		if ok {
   220  			return fmt.Sprintf(" [cond=%s,nzcv=%d,imm=%d]", cond, nzcv, imm)
   221  		}
   222  		return fmt.Sprintf(" [cond=%s,nzcv=%d]", cond, nzcv)
   223  	case auxFloat32, auxFloat64:
   224  		return fmt.Sprintf(" [%g]", v.AuxFloat())
   225  	case auxString:
   226  		return fmt.Sprintf(" {%q}", v.Aux)
   227  	case auxSym, auxCall, auxTyp:
   228  		if v.Aux != nil {
   229  			return fmt.Sprintf(" {%v}", v.Aux)
   230  		}
   231  		return ""
   232  	case auxSymOff, auxCallOff, auxTypSize, auxNameOffsetInt8:
   233  		s := ""
   234  		if v.Aux != nil {
   235  			s = fmt.Sprintf(" {%v}", v.Aux)
   236  		}
   237  		if v.AuxInt != 0 || opcodeTable[v.Op].auxType == auxNameOffsetInt8 {
   238  			s += fmt.Sprintf(" [%v]", v.AuxInt)
   239  		}
   240  		return s
   241  	case auxSymValAndOff:
   242  		s := ""
   243  		if v.Aux != nil {
   244  			s = fmt.Sprintf(" {%v}", v.Aux)
   245  		}
   246  		return s + fmt.Sprintf(" [%s]", v.AuxValAndOff())
   247  	case auxCCop:
   248  		return fmt.Sprintf(" [%s]", Op(v.AuxInt))
   249  	case auxS390XCCMask, auxS390XRotateParams:
   250  		return fmt.Sprintf(" {%v}", v.Aux)
   251  	case auxFlagConstant:
   252  		return fmt.Sprintf("[%s]", flagConstant(v.AuxInt))
   253  	case auxNone:
   254  		return ""
   255  	default:
   256  		// If you see this, add a case above instead.
   257  		return fmt.Sprintf("[auxtype=%d AuxInt=%d Aux=%v]", opcodeTable[v.Op].auxType, v.AuxInt, v.Aux)
   258  	}
   259  }
   260  
   261  // If/when midstack inlining is enabled (-l=4), the compiler gets both larger and slower.
   262  // Not-inlining this method is a help (*Value.reset and *Block.NewValue0 are similar).
   263  //
   264  //go:noinline
   265  func (v *Value) AddArg(w *Value) {
   266  	if v.Args == nil {
   267  		v.resetArgs() // use argstorage
   268  	}
   269  	v.Args = append(v.Args, w)
   270  	w.Uses++
   271  }
   272  
   273  //go:noinline
   274  func (v *Value) AddArg2(w1, w2 *Value) {
   275  	if v.Args == nil {
   276  		v.resetArgs() // use argstorage
   277  	}
   278  	v.Args = append(v.Args, w1, w2)
   279  	w1.Uses++
   280  	w2.Uses++
   281  }
   282  
   283  //go:noinline
   284  func (v *Value) AddArg3(w1, w2, w3 *Value) {
   285  	if v.Args == nil {
   286  		v.resetArgs() // use argstorage
   287  	}
   288  	v.Args = append(v.Args, w1, w2, w3)
   289  	w1.Uses++
   290  	w2.Uses++
   291  	w3.Uses++
   292  }
   293  
   294  //go:noinline
   295  func (v *Value) AddArg4(w1, w2, w3, w4 *Value) {
   296  	v.Args = append(v.Args, w1, w2, w3, w4)
   297  	w1.Uses++
   298  	w2.Uses++
   299  	w3.Uses++
   300  	w4.Uses++
   301  }
   302  
   303  //go:noinline
   304  func (v *Value) AddArg5(w1, w2, w3, w4, w5 *Value) {
   305  	v.Args = append(v.Args, w1, w2, w3, w4, w5)
   306  	w1.Uses++
   307  	w2.Uses++
   308  	w3.Uses++
   309  	w4.Uses++
   310  	w5.Uses++
   311  }
   312  
   313  //go:noinline
   314  func (v *Value) AddArg6(w1, w2, w3, w4, w5, w6 *Value) {
   315  	v.Args = append(v.Args, w1, w2, w3, w4, w5, w6)
   316  	w1.Uses++
   317  	w2.Uses++
   318  	w3.Uses++
   319  	w4.Uses++
   320  	w5.Uses++
   321  	w6.Uses++
   322  }
   323  
   324  func (v *Value) AddArgs(a ...*Value) {
   325  	if v.Args == nil {
   326  		v.resetArgs() // use argstorage
   327  	}
   328  	v.Args = append(v.Args, a...)
   329  	for _, x := range a {
   330  		x.Uses++
   331  	}
   332  }
   333  func (v *Value) SetArg(i int, w *Value) {
   334  	v.Args[i].Uses--
   335  	v.Args[i] = w
   336  	w.Uses++
   337  }
   338  func (v *Value) SetArgs1(a *Value) {
   339  	v.resetArgs()
   340  	v.AddArg(a)
   341  }
   342  func (v *Value) SetArgs2(a, b *Value) {
   343  	v.resetArgs()
   344  	v.AddArg(a)
   345  	v.AddArg(b)
   346  }
   347  func (v *Value) SetArgs3(a, b, c *Value) {
   348  	v.resetArgs()
   349  	v.AddArg(a)
   350  	v.AddArg(b)
   351  	v.AddArg(c)
   352  }
   353  func (v *Value) SetArgs4(a, b, c, d *Value) {
   354  	v.resetArgs()
   355  	v.AddArg(a)
   356  	v.AddArg(b)
   357  	v.AddArg(c)
   358  	v.AddArg(d)
   359  }
   360  
   361  func (v *Value) resetArgs() {
   362  	for _, a := range v.Args {
   363  		a.Uses--
   364  	}
   365  	v.argstorage[0] = nil
   366  	v.argstorage[1] = nil
   367  	v.argstorage[2] = nil
   368  	v.Args = v.argstorage[:0]
   369  }
   370  
   371  // reset is called from most rewrite rules.
   372  // Allowing it to be inlined increases the size
   373  // of cmd/compile by almost 10%, and slows it down.
   374  //
   375  //go:noinline
   376  func (v *Value) reset(op Op) {
   377  	if v.InCache {
   378  		v.Block.Func.unCache(v)
   379  	}
   380  	v.Op = op
   381  	v.resetArgs()
   382  	v.AuxInt = 0
   383  	v.Aux = nil
   384  }
   385  
   386  // invalidateRecursively marks a value as invalid (unused)
   387  // and after decrementing reference counts on its Args,
   388  // also recursively invalidates any of those whose use
   389  // count goes to zero.  It returns whether any of the
   390  // invalidated values was marked with IsStmt.
   391  //
   392  // BEWARE of doing this *before* you've applied intended
   393  // updates to SSA.
   394  func (v *Value) invalidateRecursively() bool {
   395  	lostStmt := v.Pos.IsStmt() == src.PosIsStmt
   396  	if v.InCache {
   397  		v.Block.Func.unCache(v)
   398  	}
   399  	v.Op = OpInvalid
   400  
   401  	for _, a := range v.Args {
   402  		a.Uses--
   403  		if a.Uses == 0 {
   404  			lost := a.invalidateRecursively()
   405  			lostStmt = lost || lostStmt
   406  		}
   407  	}
   408  
   409  	v.argstorage[0] = nil
   410  	v.argstorage[1] = nil
   411  	v.argstorage[2] = nil
   412  	v.Args = v.argstorage[:0]
   413  
   414  	v.AuxInt = 0
   415  	v.Aux = nil
   416  	return lostStmt
   417  }
   418  
   419  // copyOf is called from rewrite rules.
   420  // It modifies v to be (Copy a).
   421  //
   422  //go:noinline
   423  func (v *Value) copyOf(a *Value) {
   424  	if v == a {
   425  		return
   426  	}
   427  	if v.InCache {
   428  		v.Block.Func.unCache(v)
   429  	}
   430  	v.Op = OpCopy
   431  	v.resetArgs()
   432  	v.AddArg(a)
   433  	v.AuxInt = 0
   434  	v.Aux = nil
   435  	v.Type = a.Type
   436  }
   437  
   438  // copyInto makes a new value identical to v and adds it to the end of b.
   439  // unlike copyIntoWithXPos this does not check for v.Pos being a statement.
   440  func (v *Value) copyInto(b *Block) *Value {
   441  	c := b.NewValue0(v.Pos.WithNotStmt(), v.Op, v.Type) // Lose the position, this causes line number churn otherwise.
   442  	c.Aux = v.Aux
   443  	c.AuxInt = v.AuxInt
   444  	c.AddArgs(v.Args...)
   445  	for _, a := range v.Args {
   446  		if a.Type.IsMemory() {
   447  			v.Fatalf("can't move a value with a memory arg %s", v.LongString())
   448  		}
   449  	}
   450  	return c
   451  }
   452  
   453  // copyIntoWithXPos makes a new value identical to v and adds it to the end of b.
   454  // The supplied position is used as the position of the new value.
   455  // Because this is used for rematerialization, check for case that (rematerialized)
   456  // input to value with position 'pos' carried a statement mark, and that the supplied
   457  // position (of the instruction using the rematerialized value) is not marked, and
   458  // preserve that mark if its line matches the supplied position.
   459  func (v *Value) copyIntoWithXPos(b *Block, pos src.XPos) *Value {
   460  	if v.Pos.IsStmt() == src.PosIsStmt && pos.IsStmt() != src.PosIsStmt && v.Pos.SameFileAndLine(pos) {
   461  		pos = pos.WithIsStmt()
   462  	}
   463  	c := b.NewValue0(pos, v.Op, v.Type)
   464  	c.Aux = v.Aux
   465  	c.AuxInt = v.AuxInt
   466  	c.AddArgs(v.Args...)
   467  	for _, a := range v.Args {
   468  		if a.Type.IsMemory() {
   469  			v.Fatalf("can't move a value with a memory arg %s", v.LongString())
   470  		}
   471  	}
   472  	return c
   473  }
   474  
   475  func (v *Value) Logf(msg string, args ...any) { v.Block.Logf(msg, args...) }
   476  func (v *Value) Log() bool                    { return v.Block.Log() }
   477  func (v *Value) Fatalf(msg string, args ...any) {
   478  	v.Block.Func.fe.Fatalf(v.Pos, msg, args...)
   479  }
   480  
   481  // isGenericIntConst reports whether v is a generic integer constant.
   482  func (v *Value) isGenericIntConst() bool {
   483  	return v != nil && (v.Op == OpConst64 || v.Op == OpConst32 || v.Op == OpConst16 || v.Op == OpConst8)
   484  }
   485  
   486  // ResultReg returns the result register assigned to v, in cmd/internal/obj/$ARCH numbering.
   487  // It is similar to Reg and Reg0, except that it is usable interchangeably for all Value Ops.
   488  // If you know v.Op, using Reg or Reg0 (as appropriate) will be more efficient.
   489  func (v *Value) ResultReg() int16 {
   490  	reg := v.Block.Func.RegAlloc[v.ID]
   491  	if reg == nil {
   492  		v.Fatalf("nil reg for value: %s\n%s\n", v.LongString(), v.Block.Func)
   493  	}
   494  	if pair, ok := reg.(LocPair); ok {
   495  		reg = pair[0]
   496  	}
   497  	if reg == nil {
   498  		v.Fatalf("nil reg0 for value: %s\n%s\n", v.LongString(), v.Block.Func)
   499  	}
   500  	return reg.(*Register).objNum
   501  }
   502  
   503  // Reg returns the register assigned to v, in cmd/internal/obj/$ARCH numbering.
   504  func (v *Value) Reg() int16 {
   505  	reg := v.Block.Func.RegAlloc[v.ID]
   506  	if reg == nil {
   507  		v.Fatalf("nil register for value: %s\n%s\n", v.LongString(), v.Block.Func)
   508  	}
   509  	return reg.(*Register).objNum
   510  }
   511  
   512  // Reg0 returns the register assigned to the first output of v, in cmd/internal/obj/$ARCH numbering.
   513  func (v *Value) Reg0() int16 {
   514  	reg := v.Block.Func.RegAlloc[v.ID].(LocPair)[0]
   515  	if reg == nil {
   516  		v.Fatalf("nil first register for value: %s\n%s\n", v.LongString(), v.Block.Func)
   517  	}
   518  	return reg.(*Register).objNum
   519  }
   520  
   521  // Reg1 returns the register assigned to the second output of v, in cmd/internal/obj/$ARCH numbering.
   522  func (v *Value) Reg1() int16 {
   523  	reg := v.Block.Func.RegAlloc[v.ID].(LocPair)[1]
   524  	if reg == nil {
   525  		v.Fatalf("nil second register for value: %s\n%s\n", v.LongString(), v.Block.Func)
   526  	}
   527  	return reg.(*Register).objNum
   528  }
   529  
   530  // RegTmp returns the temporary register assigned to v, in cmd/internal/obj/$ARCH numbering.
   531  func (v *Value) RegTmp() int16 {
   532  	reg := v.Block.Func.tempRegs[v.ID]
   533  	if reg == nil {
   534  		v.Fatalf("nil tmp register for value: %s\n%s\n", v.LongString(), v.Block.Func)
   535  	}
   536  	return reg.objNum
   537  }
   538  
   539  func (v *Value) RegName() string {
   540  	reg := v.Block.Func.RegAlloc[v.ID]
   541  	if reg == nil {
   542  		v.Fatalf("nil register for value: %s\n%s\n", v.LongString(), v.Block.Func)
   543  	}
   544  	return reg.(*Register).name
   545  }
   546  
   547  // MemoryArg returns the memory argument for the Value.
   548  // The returned value, if non-nil, will be memory-typed (or a tuple with a memory-typed second part).
   549  // Otherwise, nil is returned.
   550  func (v *Value) MemoryArg() *Value {
   551  	if v.Op == OpPhi {
   552  		v.Fatalf("MemoryArg on Phi")
   553  	}
   554  	na := len(v.Args)
   555  	if na == 0 {
   556  		return nil
   557  	}
   558  	if m := v.Args[na-1]; m.Type.IsMemory() {
   559  		return m
   560  	}
   561  	return nil
   562  }
   563  
   564  // LackingPos indicates whether v is a value that is unlikely to have a correct
   565  // position assigned to it.  Ignoring such values leads to more user-friendly positions
   566  // assigned to nearby values and the blocks containing them.
   567  func (v *Value) LackingPos() bool {
   568  	// The exact definition of LackingPos is somewhat heuristically defined and may change
   569  	// in the future, for example if some of these operations are generated more carefully
   570  	// with respect to their source position.
   571  	return v.Op == OpVarDef || v.Op == OpVarLive || v.Op == OpPhi ||
   572  		(v.Op == OpFwdRef || v.Op == OpCopy) && v.Type == types.TypeMem
   573  }
   574  
   575  // removeable reports whether the value v can be removed from the SSA graph entirely
   576  // if its use count drops to 0.
   577  func (v *Value) removeable() bool {
   578  	if v.Type.IsVoid() {
   579  		// Void ops (inline marks), must stay.
   580  		return false
   581  	}
   582  	if opcodeTable[v.Op].nilCheck {
   583  		// Nil pointer checks must stay.
   584  		return false
   585  	}
   586  	if v.Type.IsMemory() {
   587  		// We don't need to preserve all memory ops, but we do need
   588  		// to keep calls at least (because they might have
   589  		// synchronization operations we can't see).
   590  		return false
   591  	}
   592  	if v.Op.HasSideEffects() {
   593  		// These are mostly synchronization operations.
   594  		return false
   595  	}
   596  	return true
   597  }
   598  
   599  // AutoVar returns a *Name and int64 representing the auto variable and offset within it
   600  // where v should be spilled.
   601  func AutoVar(v *Value) (*ir.Name, int64) {
   602  	if loc, ok := v.Block.Func.RegAlloc[v.ID].(LocalSlot); ok {
   603  		if v.Type.Size() > loc.Type.Size() {
   604  			v.Fatalf("v%d: spill/restore type %v doesn't fit in slot type %v", v.ID, v.Type, loc.Type)
   605  		}
   606  		return loc.N, loc.Off
   607  	}
   608  	// Assume it is a register, return its spill slot, which needs to be live
   609  	nameOff := v.Aux.(*AuxNameOffset)
   610  	return nameOff.Name, nameOff.Offset
   611  }
   612  
   613  // CanSSA reports whether values of type t can be represented as a Value.
   614  func CanSSA(t *types.Type) bool {
   615  	types.CalcSize(t)
   616  	if t.IsSIMD() {
   617  		return true
   618  	}
   619  	sizeLimit := int64(MaxStruct * types.PtrSize)
   620  	if t.Size() > sizeLimit {
   621  		// 4*Widthptr is an arbitrary constant. We want it
   622  		// to be at least 3*Widthptr so slices can be registerized.
   623  		// Too big and we'll introduce too much register pressure.
   624  		if !buildcfg.Experiment.SIMD {
   625  			return false
   626  		}
   627  	}
   628  	switch t.Kind() {
   629  	case types.TARRAY:
   630  		// We can't do larger arrays because dynamic indexing is
   631  		// not supported on SSA variables.
   632  		// TODO: allow if all indexes are constant.
   633  		if t.NumElem() <= 1 {
   634  			return CanSSA(t.Elem())
   635  		}
   636  		return false
   637  	case types.TSTRUCT:
   638  		if t.NumFields() > MaxStruct {
   639  			return false
   640  		}
   641  		for _, t1 := range t.Fields() {
   642  			if !CanSSA(t1.Type) {
   643  				return false
   644  			}
   645  		}
   646  		// Special check for SIMD. If the composite type
   647  		// contains SIMD vectors we can return true
   648  		// if it pass the checks below.
   649  		if !buildcfg.Experiment.SIMD {
   650  			return true
   651  		}
   652  		if t.Size() <= sizeLimit {
   653  			return true
   654  		}
   655  		i, f := t.Registers()
   656  		return i+f <= MaxStruct
   657  	default:
   658  		return true
   659  	}
   660  }
   661  

View as plain text