Source file src/cmd/compile/internal/walk/order.go

     1  // Copyright 2012 The Go Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  package walk
     6  
     7  import (
     8  	"fmt"
     9  	"go/constant"
    10  	"internal/buildcfg"
    11  
    12  	"cmd/compile/internal/base"
    13  	"cmd/compile/internal/ir"
    14  	"cmd/compile/internal/reflectdata"
    15  	"cmd/compile/internal/ssa"
    16  	"cmd/compile/internal/staticinit"
    17  	"cmd/compile/internal/typecheck"
    18  	"cmd/compile/internal/types"
    19  	"cmd/internal/objabi"
    20  	"cmd/internal/src"
    21  )
    22  
    23  // Rewrite tree to use separate statements to enforce
    24  // order of evaluation. Makes walk easier, because it
    25  // can (after this runs) reorder at will within an expression.
    26  //
    27  // Rewrite m[k] op= r into m[k] = m[k] op r if op is / or %.
    28  //
    29  // Introduce temporaries as needed by runtime routines.
    30  // For example, the map runtime routines take the map key
    31  // by reference, so make sure all map keys are addressable
    32  // by copying them to temporaries as needed.
    33  // The same is true for channel operations.
    34  //
    35  // Arrange that map index expressions only appear in direct
    36  // assignments x = m[k] or m[k] = x, never in larger expressions.
    37  //
    38  // Arrange that receive expressions only appear in direct assignments
    39  // x = <-c or as standalone statements <-c, never in larger expressions.
    40  
    41  // orderState holds state during the ordering process.
    42  type orderState struct {
    43  	out  []ir.Node             // list of generated statements
    44  	temp []*ir.Name            // stack of temporary variables
    45  	free map[string][]*ir.Name // free list of unused temporaries, by type.LinkString().
    46  	edit func(ir.Node) ir.Node // cached closure of o.exprNoLHS
    47  }
    48  
    49  // order rewrites fn.Nbody to apply the ordering constraints
    50  // described in the comment at the top of the file.
    51  func order(fn *ir.Func) {
    52  	if base.Flag.W > 1 {
    53  		s := fmt.Sprintf("\nbefore order %v", fn.Sym())
    54  		ir.DumpList(s, fn.Body)
    55  	}
    56  	ir.SetPos(fn) // Set reasonable position for instrumenting code. See issue 53688.
    57  	orderBlock(&fn.Body, map[string][]*ir.Name{})
    58  }
    59  
    60  // append typechecks stmt and appends it to out.
    61  func (o *orderState) append(stmt ir.Node) {
    62  	o.out = append(o.out, typecheck.Stmt(stmt))
    63  }
    64  
    65  // newTemp allocates a new temporary with the given type,
    66  // pushes it onto the temp stack, and returns it.
    67  // If clear is true, newTemp emits code to zero the temporary.
    68  func (o *orderState) newTemp(t *types.Type, clear bool) *ir.Name {
    69  	var v *ir.Name
    70  	key := t.LinkString()
    71  	if a := o.free[key]; len(a) > 0 {
    72  		v = a[len(a)-1]
    73  		if !types.Identical(t, v.Type()) {
    74  			base.Fatalf("expected %L to have type %v", v, t)
    75  		}
    76  		o.free[key] = a[:len(a)-1]
    77  	} else {
    78  		v = typecheck.TempAt(base.Pos, ir.CurFunc, t)
    79  	}
    80  	if clear {
    81  		o.append(ir.NewAssignStmt(base.Pos, v, nil))
    82  	}
    83  
    84  	o.temp = append(o.temp, v)
    85  	return v
    86  }
    87  
    88  // copyExpr behaves like newTemp but also emits
    89  // code to initialize the temporary to the value n.
    90  func (o *orderState) copyExpr(n ir.Node) *ir.Name {
    91  	return o.copyExpr1(n, false)
    92  }
    93  
    94  // copyExprClear is like copyExpr but clears the temp before assignment.
    95  // It is provided for use when the evaluation of tmp = n turns into
    96  // a function call that is passed a pointer to the temporary as the output space.
    97  // If the call blocks before tmp has been written,
    98  // the garbage collector will still treat the temporary as live,
    99  // so we must zero it before entering that call.
   100  // Today, this only happens for channel receive operations.
   101  // (The other candidate would be map access, but map access
   102  // returns a pointer to the result data instead of taking a pointer
   103  // to be filled in.)
   104  func (o *orderState) copyExprClear(n ir.Node) *ir.Name {
   105  	return o.copyExpr1(n, true)
   106  }
   107  
   108  func (o *orderState) copyExpr1(n ir.Node, clear bool) *ir.Name {
   109  	t := n.Type()
   110  	v := o.newTemp(t, clear)
   111  	o.append(ir.NewAssignStmt(base.Pos, v, n))
   112  	return v
   113  }
   114  
   115  // cheapExpr returns a cheap version of n.
   116  // The definition of cheap is that n is a variable or constant.
   117  // If not, cheapExpr allocates a new tmp, emits tmp = n,
   118  // and then returns tmp.
   119  func (o *orderState) cheapExpr(n ir.Node) ir.Node {
   120  	if n == nil {
   121  		return nil
   122  	}
   123  
   124  	switch n.Op() {
   125  	case ir.ONAME, ir.OLITERAL, ir.ONIL:
   126  		return n
   127  	case ir.OLEN, ir.OCAP:
   128  		n := n.(*ir.UnaryExpr)
   129  		l := o.cheapExpr(n.X)
   130  		if l == n.X {
   131  			return n
   132  		}
   133  		a := ir.Copy(n).(*ir.UnaryExpr)
   134  		a.X = l
   135  		return typecheck.Expr(a)
   136  	}
   137  
   138  	return o.copyExpr(n)
   139  }
   140  
   141  // safeExpr returns a safe version of n.
   142  // The definition of safe is that n can appear multiple times
   143  // without violating the semantics of the original program,
   144  // and that assigning to the safe version has the same effect
   145  // as assigning to the original n.
   146  //
   147  // The intended use is to apply to x when rewriting x += y into x = x + y.
   148  func (o *orderState) safeExpr(n ir.Node) ir.Node {
   149  	switch n.Op() {
   150  	case ir.ONAME, ir.OLITERAL, ir.ONIL:
   151  		return n
   152  
   153  	case ir.OLEN, ir.OCAP:
   154  		n := n.(*ir.UnaryExpr)
   155  		l := o.safeExpr(n.X)
   156  		if l == n.X {
   157  			return n
   158  		}
   159  		a := ir.Copy(n).(*ir.UnaryExpr)
   160  		a.X = l
   161  		return typecheck.Expr(a)
   162  
   163  	case ir.ODOT:
   164  		n := n.(*ir.SelectorExpr)
   165  		l := o.safeExpr(n.X)
   166  		if l == n.X {
   167  			return n
   168  		}
   169  		a := ir.Copy(n).(*ir.SelectorExpr)
   170  		a.X = l
   171  		return typecheck.Expr(a)
   172  
   173  	case ir.ODOTPTR:
   174  		n := n.(*ir.SelectorExpr)
   175  		l := o.cheapExpr(n.X)
   176  		if l == n.X {
   177  			return n
   178  		}
   179  		a := ir.Copy(n).(*ir.SelectorExpr)
   180  		a.X = l
   181  		return typecheck.Expr(a)
   182  
   183  	case ir.ODEREF:
   184  		n := n.(*ir.StarExpr)
   185  		l := o.cheapExpr(n.X)
   186  		if l == n.X {
   187  			return n
   188  		}
   189  		a := ir.Copy(n).(*ir.StarExpr)
   190  		a.X = l
   191  		return typecheck.Expr(a)
   192  
   193  	case ir.OINDEX, ir.OINDEXMAP:
   194  		n := n.(*ir.IndexExpr)
   195  		var l ir.Node
   196  		if n.X.Type().IsArray() {
   197  			l = o.safeExpr(n.X)
   198  		} else {
   199  			l = o.cheapExpr(n.X)
   200  		}
   201  		r := o.cheapExpr(n.Index)
   202  		if l == n.X && r == n.Index {
   203  			return n
   204  		}
   205  		a := ir.Copy(n).(*ir.IndexExpr)
   206  		a.X = l
   207  		a.Index = r
   208  		return typecheck.Expr(a)
   209  
   210  	default:
   211  		base.Fatalf("order.safeExpr %v", n.Op())
   212  		return nil // not reached
   213  	}
   214  }
   215  
   216  // addrTemp ensures that n is okay to pass by address to runtime routines.
   217  // If the original argument n is not okay, addrTemp creates a tmp, emits
   218  // tmp = n, and then returns tmp.
   219  // The result of addrTemp MUST be assigned back to n, e.g.
   220  //
   221  //	n.Left = o.addrTemp(n.Left)
   222  func (o *orderState) addrTemp(n ir.Node) ir.Node {
   223  	// Note: Avoid addrTemp with static assignment for literal strings
   224  	// when compiling FIPS packages.
   225  	// The problem is that panic("foo") ends up creating a static RODATA temp
   226  	// for the implicit conversion of "foo" to any, and we can't handle
   227  	// the relocations in that temp.
   228  	if n.Op() == ir.ONIL || (n.Op() == ir.OLITERAL && !base.Ctxt.IsFIPS()) {
   229  		// TODO: expand this to all static composite literal nodes?
   230  		n = typecheck.DefaultLit(n, nil)
   231  		types.CalcSize(n.Type())
   232  		vstat := readonlystaticname(n.Type())
   233  		var s staticinit.Schedule
   234  		s.StaticAssign(vstat, 0, n, n.Type())
   235  		if s.Out != nil {
   236  			base.Fatalf("staticassign of const generated code: %+v", n)
   237  		}
   238  		vstat = typecheck.Expr(vstat).(*ir.Name)
   239  		return vstat
   240  	}
   241  
   242  	// Prevent taking the address of an SSA-able local variable (#63332).
   243  	//
   244  	// TODO(mdempsky): Note that OuterValue unwraps OCONVNOPs, but
   245  	// IsAddressable does not. It should be possible to skip copying for
   246  	// at least some of these OCONVNOPs (e.g., reinsert them after the
   247  	// OADDR operation), but at least walkCompare needs to be fixed to
   248  	// support that (see trybot failures on go.dev/cl/541715, PS1).
   249  	if ir.IsAddressable(n) {
   250  		if name, ok := ir.OuterValue(n).(*ir.Name); ok && name.Op() == ir.ONAME {
   251  			if name.Class == ir.PAUTO && !name.Addrtaken() && ssa.CanSSA(name.Type()) {
   252  				goto Copy
   253  			}
   254  		}
   255  
   256  		return n
   257  	}
   258  
   259  Copy:
   260  	return o.copyExpr(n)
   261  }
   262  
   263  // mapKeyTemp prepares n to be a key in a map runtime call and returns n.
   264  // The first parameter is the position of n's containing node, for use in case
   265  // that n's position is not unique (e.g., if n is an ONAME).
   266  func (o *orderState) mapKeyTemp(outerPos src.XPos, t *types.Type, n ir.Node) ir.Node {
   267  	pos := outerPos
   268  	if ir.HasUniquePos(n) {
   269  		pos = n.Pos()
   270  	}
   271  	// Most map calls need to take the address of the key.
   272  	// Exception: map*_fast* calls. See golang.org/issue/19015.
   273  	alg := mapfast(t)
   274  	if alg == mapslow {
   275  		return o.addrTemp(n)
   276  	}
   277  	var kt *types.Type
   278  	switch alg {
   279  	case mapfast32:
   280  		kt = types.Types[types.TUINT32]
   281  	case mapfast64:
   282  		kt = types.Types[types.TUINT64]
   283  	case mapfast32ptr, mapfast64ptr:
   284  		kt = types.Types[types.TUNSAFEPTR]
   285  	case mapfaststr:
   286  		kt = types.Types[types.TSTRING]
   287  	}
   288  	nt := n.Type()
   289  	switch {
   290  	case nt == kt:
   291  		return n
   292  	case nt.Kind() == kt.Kind(), nt.IsPtrShaped() && kt.IsPtrShaped():
   293  		// can directly convert (e.g. named type to underlying type, or one pointer to another)
   294  		return typecheck.Expr(ir.NewConvExpr(pos, ir.OCONVNOP, kt, n))
   295  	case nt.IsInteger() && kt.IsInteger():
   296  		// can directly convert (e.g. int32 to uint32)
   297  		if n.Op() == ir.OLITERAL && nt.IsSigned() {
   298  			// avoid constant overflow error
   299  			n = ir.NewConstExpr(constant.MakeUint64(uint64(ir.Int64Val(n))), n)
   300  			n.SetType(kt)
   301  			return n
   302  		}
   303  		return typecheck.Expr(ir.NewConvExpr(pos, ir.OCONV, kt, n))
   304  	default:
   305  		// Unsafe cast through memory.
   306  		// We'll need to do a load with type kt. Create a temporary of type kt to
   307  		// ensure sufficient alignment. nt may be under-aligned.
   308  		if uint8(kt.Alignment()) < uint8(nt.Alignment()) {
   309  			base.Fatalf("mapKeyTemp: key type is not sufficiently aligned, kt=%v nt=%v", kt, nt)
   310  		}
   311  		tmp := o.newTemp(kt, true)
   312  		// *(*nt)(&tmp) = n
   313  		var e ir.Node = typecheck.NodAddr(tmp)
   314  		e = ir.NewConvExpr(pos, ir.OCONVNOP, nt.PtrTo(), e)
   315  		e = ir.NewStarExpr(pos, e)
   316  		o.append(ir.NewAssignStmt(pos, e, n))
   317  		return tmp
   318  	}
   319  }
   320  
   321  // mapKeyReplaceStrConv replaces OBYTES2STR by OBYTES2STRTMP
   322  // in n to avoid string allocations for keys in map lookups.
   323  // Returns a bool that signals if a modification was made.
   324  //
   325  // For:
   326  //
   327  //	x = m[string(k)]
   328  //	x = m[T1{... Tn{..., string(k), ...}}]
   329  //
   330  // where k is []byte, T1 to Tn is a nesting of struct and array literals,
   331  // the allocation of backing bytes for the string can be avoided
   332  // by reusing the []byte backing array. These are special cases
   333  // for avoiding allocations when converting byte slices to strings.
   334  // It would be nice to handle these generally, but because
   335  // []byte keys are not allowed in maps, the use of string(k)
   336  // comes up in important cases in practice. See issue 3512.
   337  //
   338  // Note that this code does not handle the case:
   339  //
   340  //      s := string(k)
   341  //      x = m[s]
   342  //
   343  // Cases like this are handled during SSA, search for slicebytetostring
   344  // in ../ssa/_gen/generic.rules.
   345  func mapKeyReplaceStrConv(n ir.Node) bool {
   346  	var replaced bool
   347  	switch n.Op() {
   348  	case ir.OBYTES2STR:
   349  		n := n.(*ir.ConvExpr)
   350  		n.SetOp(ir.OBYTES2STRTMP)
   351  		replaced = true
   352  	case ir.OSTRUCTLIT:
   353  		n := n.(*ir.CompLitExpr)
   354  		for _, elem := range n.List {
   355  			elem := elem.(*ir.StructKeyExpr)
   356  			if mapKeyReplaceStrConv(elem.Value) {
   357  				replaced = true
   358  			}
   359  		}
   360  	case ir.OARRAYLIT:
   361  		n := n.(*ir.CompLitExpr)
   362  		for _, elem := range n.List {
   363  			if elem.Op() == ir.OKEY {
   364  				elem = elem.(*ir.KeyExpr).Value
   365  			}
   366  			if mapKeyReplaceStrConv(elem) {
   367  				replaced = true
   368  			}
   369  		}
   370  	}
   371  	return replaced
   372  }
   373  
   374  type ordermarker int
   375  
   376  // markTemp returns the top of the temporary variable stack.
   377  func (o *orderState) markTemp() ordermarker {
   378  	return ordermarker(len(o.temp))
   379  }
   380  
   381  // popTemp pops temporaries off the stack until reaching the mark,
   382  // which must have been returned by markTemp.
   383  func (o *orderState) popTemp(mark ordermarker) {
   384  	for _, n := range o.temp[mark:] {
   385  		key := n.Type().LinkString()
   386  		o.free[key] = append(o.free[key], n)
   387  	}
   388  	o.temp = o.temp[:mark]
   389  }
   390  
   391  // stmtList orders each of the statements in the list.
   392  func (o *orderState) stmtList(l ir.Nodes) {
   393  	s := l
   394  	for i := range s {
   395  		orderMakeSliceCopy(s[i:])
   396  		o.stmt(s[i])
   397  	}
   398  }
   399  
   400  // orderMakeSliceCopy matches the pattern:
   401  //
   402  //	m = OMAKESLICE([]T, x); OCOPY(m, s)
   403  //
   404  // and rewrites it to:
   405  //
   406  //	m = OMAKESLICECOPY([]T, x, s); nil
   407  func orderMakeSliceCopy(s []ir.Node) {
   408  	if base.Flag.N != 0 || base.Flag.Cfg.Instrumenting {
   409  		return
   410  	}
   411  	if len(s) < 2 || s[0] == nil || s[0].Op() != ir.OAS || s[1] == nil || s[1].Op() != ir.OCOPY {
   412  		return
   413  	}
   414  
   415  	as := s[0].(*ir.AssignStmt)
   416  	cp := s[1].(*ir.BinaryExpr)
   417  	if as.Y == nil || as.Y.Op() != ir.OMAKESLICE || ir.IsBlank(as.X) ||
   418  		as.X.Op() != ir.ONAME || cp.X.Op() != ir.ONAME || cp.Y.Op() != ir.ONAME ||
   419  		as.X.Name() != cp.X.Name() || cp.X.Name() == cp.Y.Name() {
   420  		// The line above this one is correct with the differing equality operators:
   421  		// we want as.X and cp.X to be the same name,
   422  		// but we want the initial data to be coming from a different name.
   423  		return
   424  	}
   425  
   426  	mk := as.Y.(*ir.MakeExpr)
   427  	if mk.Esc() == ir.EscNone || mk.Len == nil || mk.Cap != nil {
   428  		return
   429  	}
   430  	mk.SetOp(ir.OMAKESLICECOPY)
   431  	mk.Cap = cp.Y
   432  	// Set bounded when m = OMAKESLICE([]T, len(s)); OCOPY(m, s)
   433  	mk.SetBounded(mk.Len.Op() == ir.OLEN && ir.SameSafeExpr(mk.Len.(*ir.UnaryExpr).X, cp.Y))
   434  	as.Y = typecheck.Expr(mk)
   435  	s[1] = nil // remove separate copy call
   436  }
   437  
   438  // edge inserts coverage instrumentation for libfuzzer.
   439  func (o *orderState) edge() {
   440  	if base.Debug.Libfuzzer == 0 {
   441  		return
   442  	}
   443  
   444  	// Create a new uint8 counter to be allocated in section __sancov_cntrs
   445  	counter := staticinit.StaticName(types.Types[types.TUINT8])
   446  	counter.SetLibfuzzer8BitCounter(true)
   447  	// As well as setting SetLibfuzzer8BitCounter, we preemptively set the
   448  	// symbol type to SLIBFUZZER_8BIT_COUNTER so that the race detector
   449  	// instrumentation pass (which does not have access to the flags set by
   450  	// SetLibfuzzer8BitCounter) knows to ignore them. This information is
   451  	// lost by the time it reaches the compile step, so SetLibfuzzer8BitCounter
   452  	// is still necessary.
   453  	counter.Linksym().Type = objabi.SLIBFUZZER_8BIT_COUNTER
   454  
   455  	// We guarantee that the counter never becomes zero again once it has been
   456  	// incremented once. This implementation follows the NeverZero optimization
   457  	// presented by the paper:
   458  	// "AFL++: Combining Incremental Steps of Fuzzing Research"
   459  	// The NeverZero policy avoids the overflow to 0 by setting the counter to one
   460  	// after it reaches 255 and so, if an edge is executed at least one time, the entry is
   461  	// never 0.
   462  	// Another policy presented in the paper is the Saturated Counters policy which
   463  	// freezes the counter when it reaches the value of 255. However, a range
   464  	// of experiments showed that doing so decreases overall performance.
   465  	o.append(ir.NewIfStmt(base.Pos,
   466  		ir.NewBinaryExpr(base.Pos, ir.OEQ, counter, ir.NewInt(base.Pos, 0xff)),
   467  		[]ir.Node{ir.NewAssignStmt(base.Pos, counter, ir.NewInt(base.Pos, 1))},
   468  		[]ir.Node{ir.NewAssignOpStmt(base.Pos, ir.OADD, counter, ir.NewInt(base.Pos, 1))}))
   469  }
   470  
   471  // orderBlock orders the block of statements in n into a new slice,
   472  // and then replaces the old slice in n with the new slice.
   473  // free is a map that can be used to obtain temporary variables by type.
   474  func orderBlock(n *ir.Nodes, free map[string][]*ir.Name) {
   475  	if len(*n) != 0 {
   476  		// Set reasonable position for instrumenting code. See issue 53688.
   477  		// It would be nice if ir.Nodes had a position (the opening {, probably),
   478  		// but it doesn't. So we use the first statement's position instead.
   479  		ir.SetPos((*n)[0])
   480  	}
   481  	var order orderState
   482  	order.free = free
   483  	mark := order.markTemp()
   484  	order.edge()
   485  	order.stmtList(*n)
   486  	order.popTemp(mark)
   487  	*n = order.out
   488  }
   489  
   490  // exprInPlace orders the side effects in *np and
   491  // leaves them as the init list of the final *np.
   492  // The result of exprInPlace MUST be assigned back to n, e.g.
   493  //
   494  //	n.Left = o.exprInPlace(n.Left)
   495  func (o *orderState) exprInPlace(n ir.Node) ir.Node {
   496  	var order orderState
   497  	order.free = o.free
   498  	n = order.expr(n, nil)
   499  	n = ir.InitExpr(order.out, n)
   500  
   501  	// insert new temporaries from order
   502  	// at head of outer list.
   503  	o.temp = append(o.temp, order.temp...)
   504  	return n
   505  }
   506  
   507  // orderStmtInPlace orders the side effects of the single statement *np
   508  // and replaces it with the resulting statement list.
   509  // The result of orderStmtInPlace MUST be assigned back to n, e.g.
   510  //
   511  //	n.Left = orderStmtInPlace(n.Left)
   512  //
   513  // free is a map that can be used to obtain temporary variables by type.
   514  func orderStmtInPlace(n ir.Node, free map[string][]*ir.Name) ir.Node {
   515  	var order orderState
   516  	order.free = free
   517  	mark := order.markTemp()
   518  	order.stmt(n)
   519  	order.popTemp(mark)
   520  	return ir.NewBlockStmt(src.NoXPos, order.out)
   521  }
   522  
   523  // init moves n's init list to o.out.
   524  func (o *orderState) init(n ir.Node) {
   525  	if ir.MayBeShared(n) {
   526  		// For concurrency safety, don't mutate potentially shared nodes.
   527  		// First, ensure that no work is required here.
   528  		if len(n.Init()) > 0 {
   529  			base.Fatalf("order.init shared node with ninit")
   530  		}
   531  		return
   532  	}
   533  	o.stmtList(ir.TakeInit(n))
   534  }
   535  
   536  // call orders the call expression n.
   537  // n.Op is OCALLFUNC/OCALLINTER or a builtin like OCOPY.
   538  func (o *orderState) call(nn ir.Node) {
   539  	if len(nn.Init()) > 0 {
   540  		// Caller should have already called o.init(nn).
   541  		base.Fatalf("%v with unexpected ninit", nn.Op())
   542  	}
   543  	if nn.Op() == ir.OCALLMETH {
   544  		base.FatalfAt(nn.Pos(), "OCALLMETH missed by typecheck")
   545  	}
   546  
   547  	// Builtin functions.
   548  	if nn.Op() != ir.OCALLFUNC && nn.Op() != ir.OCALLINTER {
   549  		switch n := nn.(type) {
   550  		default:
   551  			base.Fatalf("unexpected call: %+v", n)
   552  		case *ir.UnaryExpr:
   553  			n.X = o.expr(n.X, nil)
   554  		case *ir.ConvExpr:
   555  			n.X = o.expr(n.X, nil)
   556  		case *ir.BinaryExpr:
   557  			n.X = o.expr(n.X, nil)
   558  			n.Y = o.expr(n.Y, nil)
   559  		case *ir.MakeExpr:
   560  			n.Len = o.expr(n.Len, nil)
   561  			n.Cap = o.expr(n.Cap, nil)
   562  		case *ir.CallExpr:
   563  			o.exprList(n.Args)
   564  		}
   565  		return
   566  	}
   567  
   568  	n := nn.(*ir.CallExpr)
   569  	typecheck.AssertFixedCall(n)
   570  
   571  	if ir.IsFuncPCIntrinsic(n) && ir.IsIfaceOfFunc(n.Args[0]) != nil {
   572  		// For internal/abi.FuncPCABIxxx(fn), if fn is a defined function,
   573  		// do not introduce temporaries here, so it is easier to rewrite it
   574  		// to symbol address reference later in walk.
   575  		return
   576  	}
   577  
   578  	n.Fun = o.expr(n.Fun, nil)
   579  	o.exprList(n.Args)
   580  }
   581  
   582  // mapAssign appends n to o.out.
   583  func (o *orderState) mapAssign(n ir.Node) {
   584  	switch n.Op() {
   585  	default:
   586  		base.Fatalf("order.mapAssign %v", n.Op())
   587  
   588  	case ir.OAS:
   589  		n := n.(*ir.AssignStmt)
   590  		if n.X.Op() == ir.OINDEXMAP {
   591  			n.Y = o.safeMapRHS(n.Y)
   592  		}
   593  		o.out = append(o.out, n)
   594  	case ir.OASOP:
   595  		n := n.(*ir.AssignOpStmt)
   596  		if n.X.Op() == ir.OINDEXMAP {
   597  			n.Y = o.safeMapRHS(n.Y)
   598  		}
   599  		o.out = append(o.out, n)
   600  	}
   601  }
   602  
   603  func (o *orderState) safeMapRHS(r ir.Node) ir.Node {
   604  	// Make sure we evaluate the RHS before starting the map insert.
   605  	// We need to make sure the RHS won't panic.  See issue 22881.
   606  	if r.Op() == ir.OAPPEND {
   607  		r := r.(*ir.CallExpr)
   608  		s := r.Args[1:]
   609  		for i, n := range s {
   610  			s[i] = o.cheapExpr(n)
   611  		}
   612  		return r
   613  	}
   614  	return o.cheapExpr(r)
   615  }
   616  
   617  // stmt orders the statement n, appending to o.out.
   618  func (o *orderState) stmt(n ir.Node) {
   619  	if n == nil {
   620  		return
   621  	}
   622  
   623  	lno := ir.SetPos(n)
   624  	o.init(n)
   625  
   626  	switch n.Op() {
   627  	default:
   628  		base.Fatalf("order.stmt %v", n.Op())
   629  
   630  	case ir.OINLMARK:
   631  		o.out = append(o.out, n)
   632  
   633  	case ir.OAS:
   634  		n := n.(*ir.AssignStmt)
   635  		t := o.markTemp()
   636  
   637  		// There's a delicate interaction here between two OINDEXMAP
   638  		// optimizations.
   639  		//
   640  		// First, we want to handle m[k] = append(m[k], ...) with a single
   641  		// runtime call to mapassign. This requires the m[k] expressions to
   642  		// satisfy ir.SameSafeExpr in walkAssign.
   643  		//
   644  		// But if k is a slow map key type that's passed by reference (e.g.,
   645  		// byte), then we want to avoid marking user variables as addrtaken,
   646  		// if that might prevent the compiler from keeping k in a register.
   647  		//
   648  		// TODO(mdempsky): It would be better if walk was responsible for
   649  		// inserting temporaries as needed.
   650  		mapAppend := n.X.Op() == ir.OINDEXMAP && n.Y.Op() == ir.OAPPEND &&
   651  			ir.SameSafeExpr(n.X, n.Y.(*ir.CallExpr).Args[0])
   652  
   653  		n.X = o.expr(n.X, nil)
   654  		if mapAppend {
   655  			indexLHS := n.X.(*ir.IndexExpr)
   656  			indexLHS.X = o.cheapExpr(indexLHS.X)
   657  			indexLHS.Index = o.cheapExpr(indexLHS.Index)
   658  
   659  			call := n.Y.(*ir.CallExpr)
   660  			arg0 := call.Args[0]
   661  			// ir.SameSafeExpr skips OCONVNOPs, so we must do the same here (#66096).
   662  			for arg0.Op() == ir.OCONVNOP {
   663  				arg0 = arg0.(*ir.ConvExpr).X
   664  			}
   665  			indexRHS := arg0.(*ir.IndexExpr)
   666  			indexRHS.X = indexLHS.X
   667  			indexRHS.Index = indexLHS.Index
   668  
   669  			o.exprList(call.Args[1:])
   670  		} else {
   671  			n.Y = o.expr(n.Y, n.X)
   672  		}
   673  		o.mapAssign(n)
   674  		o.popTemp(t)
   675  
   676  	case ir.OASOP:
   677  		n := n.(*ir.AssignOpStmt)
   678  		t := o.markTemp()
   679  		n.X = o.expr(n.X, nil)
   680  		n.Y = o.expr(n.Y, nil)
   681  
   682  		if base.Flag.Cfg.Instrumenting || n.X.Op() == ir.OINDEXMAP && (n.AsOp == ir.ODIV || n.AsOp == ir.OMOD) {
   683  			// Rewrite m[k] op= r into m[k] = m[k] op r so
   684  			// that we can ensure that if op panics
   685  			// because r is zero, the panic happens before
   686  			// the map assignment.
   687  			// DeepCopy is a big hammer here, but safeExpr
   688  			// makes sure there is nothing too deep being copied.
   689  			l1 := o.safeExpr(n.X)
   690  			l2 := ir.DeepCopy(src.NoXPos, l1)
   691  			if l2.Op() == ir.OINDEXMAP {
   692  				l2 := l2.(*ir.IndexExpr)
   693  				l2.Assigned = false
   694  			}
   695  			l2 = o.copyExpr(l2)
   696  			r := o.expr(typecheck.Expr(ir.NewBinaryExpr(n.Pos(), n.AsOp, l2, n.Y)), nil)
   697  			as := typecheck.Stmt(ir.NewAssignStmt(n.Pos(), l1, r))
   698  			o.mapAssign(as)
   699  			o.popTemp(t)
   700  			return
   701  		}
   702  
   703  		o.mapAssign(n)
   704  		o.popTemp(t)
   705  
   706  	case ir.OAS2:
   707  		n := n.(*ir.AssignListStmt)
   708  		t := o.markTemp()
   709  		o.exprList(n.Lhs)
   710  		o.exprList(n.Rhs)
   711  		o.out = append(o.out, n)
   712  		o.popTemp(t)
   713  
   714  	// Special: avoid copy of func call n.Right
   715  	case ir.OAS2FUNC:
   716  		n := n.(*ir.AssignListStmt)
   717  		t := o.markTemp()
   718  		o.exprList(n.Lhs)
   719  		call := n.Rhs[0]
   720  		o.init(call)
   721  		if ic, ok := call.(*ir.InlinedCallExpr); ok {
   722  			o.stmtList(ic.Body)
   723  
   724  			n.SetOp(ir.OAS2)
   725  			n.Rhs = ic.ReturnVars
   726  
   727  			o.exprList(n.Rhs)
   728  			o.out = append(o.out, n)
   729  		} else {
   730  			o.call(call)
   731  			o.as2func(n)
   732  		}
   733  		o.popTemp(t)
   734  
   735  	// Special: use temporary variables to hold result,
   736  	// so that runtime can take address of temporary.
   737  	// No temporary for blank assignment.
   738  	//
   739  	// OAS2MAPR: make sure key is addressable if needed,
   740  	//           and make sure OINDEXMAP is not copied out.
   741  	case ir.OAS2DOTTYPE, ir.OAS2RECV, ir.OAS2MAPR:
   742  		n := n.(*ir.AssignListStmt)
   743  		t := o.markTemp()
   744  		o.exprList(n.Lhs)
   745  
   746  		switch r := n.Rhs[0]; r.Op() {
   747  		case ir.ODOTTYPE2:
   748  			r := r.(*ir.TypeAssertExpr)
   749  			r.X = o.expr(r.X, nil)
   750  		case ir.ODYNAMICDOTTYPE2:
   751  			r := r.(*ir.DynamicTypeAssertExpr)
   752  			r.X = o.expr(r.X, nil)
   753  			r.RType = o.expr(r.RType, nil)
   754  			r.ITab = o.expr(r.ITab, nil)
   755  		case ir.ORECV:
   756  			r := r.(*ir.UnaryExpr)
   757  			r.X = o.expr(r.X, nil)
   758  		case ir.OINDEXMAP:
   759  			r := r.(*ir.IndexExpr)
   760  			r.X = o.expr(r.X, nil)
   761  			r.Index = o.expr(r.Index, nil)
   762  			// See similar conversion for OINDEXMAP below.
   763  			_ = mapKeyReplaceStrConv(r.Index)
   764  			r.Index = o.mapKeyTemp(r.Pos(), r.X.Type(), r.Index)
   765  		default:
   766  			base.Fatalf("order.stmt: %v", r.Op())
   767  		}
   768  
   769  		o.as2ok(n)
   770  		o.popTemp(t)
   771  
   772  	// Special: does not save n onto out.
   773  	case ir.OBLOCK:
   774  		n := n.(*ir.BlockStmt)
   775  		o.stmtList(n.List)
   776  
   777  	// Special: n->left is not an expression; save as is.
   778  	case ir.OBREAK,
   779  		ir.OCONTINUE,
   780  		ir.ODCL,
   781  		ir.OFALL,
   782  		ir.OGOTO,
   783  		ir.OLABEL,
   784  		ir.OTAILCALL:
   785  		o.out = append(o.out, n)
   786  
   787  	// Special: handle call arguments.
   788  	case ir.OCALLFUNC, ir.OCALLINTER:
   789  		n := n.(*ir.CallExpr)
   790  		t := o.markTemp()
   791  		o.call(n)
   792  		o.out = append(o.out, n)
   793  		o.popTemp(t)
   794  
   795  	case ir.OINLCALL:
   796  		n := n.(*ir.InlinedCallExpr)
   797  		o.stmtList(n.Body)
   798  
   799  		// discard results; double-check for no side effects
   800  		for _, result := range n.ReturnVars {
   801  			if staticinit.AnySideEffects(result) {
   802  				base.FatalfAt(result.Pos(), "inlined call result has side effects: %v", result)
   803  			}
   804  		}
   805  
   806  	case ir.OCHECKNIL, ir.OCLEAR, ir.OCLOSE, ir.OPANIC, ir.ORECV:
   807  		n := n.(*ir.UnaryExpr)
   808  		t := o.markTemp()
   809  		n.X = o.expr(n.X, nil)
   810  		o.out = append(o.out, n)
   811  		o.popTemp(t)
   812  
   813  	case ir.OCOPY:
   814  		n := n.(*ir.BinaryExpr)
   815  		t := o.markTemp()
   816  		n.X = o.expr(n.X, nil)
   817  		n.Y = o.expr(n.Y, nil)
   818  		o.out = append(o.out, n)
   819  		o.popTemp(t)
   820  
   821  	case ir.OPRINT, ir.OPRINTLN, ir.ORECOVERFP:
   822  		n := n.(*ir.CallExpr)
   823  		t := o.markTemp()
   824  		o.call(n)
   825  		o.out = append(o.out, n)
   826  		o.popTemp(t)
   827  
   828  	// Special: order arguments to inner call but not call itself.
   829  	case ir.ODEFER, ir.OGO:
   830  		n := n.(*ir.GoDeferStmt)
   831  		t := o.markTemp()
   832  		o.init(n.Call)
   833  		o.call(n.Call)
   834  		o.out = append(o.out, n)
   835  		o.popTemp(t)
   836  
   837  	case ir.ODELETE:
   838  		n := n.(*ir.CallExpr)
   839  		t := o.markTemp()
   840  		n.Args[0] = o.expr(n.Args[0], nil)
   841  		n.Args[1] = o.expr(n.Args[1], nil)
   842  		n.Args[1] = o.mapKeyTemp(n.Pos(), n.Args[0].Type(), n.Args[1])
   843  		o.out = append(o.out, n)
   844  		o.popTemp(t)
   845  
   846  	// Clean temporaries from condition evaluation at
   847  	// beginning of loop body and after for statement.
   848  	case ir.OFOR:
   849  		n := n.(*ir.ForStmt)
   850  		t := o.markTemp()
   851  		n.Cond = o.exprInPlace(n.Cond)
   852  		orderBlock(&n.Body, o.free)
   853  		n.Post = orderStmtInPlace(n.Post, o.free)
   854  		o.out = append(o.out, n)
   855  		o.popTemp(t)
   856  
   857  	// Clean temporaries from condition at
   858  	// beginning of both branches.
   859  	case ir.OIF:
   860  		n := n.(*ir.IfStmt)
   861  		t := o.markTemp()
   862  		n.Cond = o.exprInPlace(n.Cond)
   863  		o.popTemp(t)
   864  		orderBlock(&n.Body, o.free)
   865  		orderBlock(&n.Else, o.free)
   866  		o.out = append(o.out, n)
   867  
   868  	case ir.ORANGE:
   869  		// n.Right is the expression being ranged over.
   870  		// order it, and then make a copy if we need one.
   871  		// We almost always do, to ensure that we don't
   872  		// see any value changes made during the loop.
   873  		// Usually the copy is cheap (e.g., array pointer,
   874  		// chan, slice, string are all tiny).
   875  		// The exception is ranging over an array value
   876  		// (not a slice, not a pointer to array),
   877  		// which must make a copy to avoid seeing updates made during
   878  		// the range body. Ranging over an array value is uncommon though.
   879  
   880  		// Mark []byte(str) range expression to reuse string backing storage.
   881  		// It is safe because the storage cannot be mutated.
   882  		n := n.(*ir.RangeStmt)
   883  		if x, ok := n.X.(*ir.ConvExpr); ok {
   884  			switch x.Op() {
   885  			case ir.OSTR2BYTES:
   886  				x.SetOp(ir.OSTR2BYTESTMP)
   887  				fallthrough
   888  			case ir.OSTR2BYTESTMP:
   889  				x.MarkNonNil() // "range []byte(nil)" is fine
   890  			}
   891  		}
   892  
   893  		t := o.markTemp()
   894  		n.X = o.expr(n.X, nil)
   895  
   896  		orderBody := true
   897  		xt := typecheck.RangeExprType(n.X.Type())
   898  		switch k := xt.Kind(); {
   899  		default:
   900  			base.Fatalf("order.stmt range %v", n.Type())
   901  
   902  		case types.IsInt[k]:
   903  			// Used only once, no need to copy.
   904  
   905  		case k == types.TARRAY, k == types.TSLICE:
   906  			if n.Value == nil || ir.IsBlank(n.Value) {
   907  				// for i := range x will only use x once, to compute len(x).
   908  				// No need to copy it.
   909  				break
   910  			}
   911  			fallthrough
   912  
   913  		case k == types.TCHAN, k == types.TSTRING:
   914  			// chan, string, slice, array ranges use value multiple times.
   915  			// make copy.
   916  			r := n.X
   917  
   918  			if r.Type().IsString() && r.Type() != types.Types[types.TSTRING] {
   919  				r = ir.NewConvExpr(base.Pos, ir.OCONV, nil, r)
   920  				r.SetType(types.Types[types.TSTRING])
   921  				r = typecheck.Expr(r)
   922  			}
   923  
   924  			n.X = o.copyExpr(r)
   925  
   926  		case k == types.TMAP:
   927  			if isMapClear(n) {
   928  				// Preserve the body of the map clear pattern so it can
   929  				// be detected during walk. The loop body will not be used
   930  				// when optimizing away the range loop to a runtime call.
   931  				orderBody = false
   932  				break
   933  			}
   934  
   935  			// copy the map value in case it is a map literal.
   936  			// TODO(rsc): Make tmp = literal expressions reuse tmp.
   937  			// For maps tmp is just one word so it hardly matters.
   938  			r := n.X
   939  			n.X = o.copyExpr(r)
   940  
   941  			// n.Prealloc is the temp for the iterator.
   942  			// MapIterType contains pointers and needs to be zeroed.
   943  			if buildcfg.Experiment.SwissMap {
   944  				n.Prealloc = o.newTemp(reflectdata.SwissMapIterType(), true)
   945  			} else {
   946  				n.Prealloc = o.newTemp(reflectdata.OldMapIterType(), true)
   947  			}
   948  		}
   949  		n.Key = o.exprInPlace(n.Key)
   950  		n.Value = o.exprInPlace(n.Value)
   951  		if orderBody {
   952  			orderBlock(&n.Body, o.free)
   953  		}
   954  		o.out = append(o.out, n)
   955  		o.popTemp(t)
   956  
   957  	case ir.ORETURN:
   958  		n := n.(*ir.ReturnStmt)
   959  		o.exprList(n.Results)
   960  		o.out = append(o.out, n)
   961  
   962  	// Special: clean case temporaries in each block entry.
   963  	// Select must enter one of its blocks, so there is no
   964  	// need for a cleaning at the end.
   965  	// Doubly special: evaluation order for select is stricter
   966  	// than ordinary expressions. Even something like p.c
   967  	// has to be hoisted into a temporary, so that it cannot be
   968  	// reordered after the channel evaluation for a different
   969  	// case (if p were nil, then the timing of the fault would
   970  	// give this away).
   971  	case ir.OSELECT:
   972  		n := n.(*ir.SelectStmt)
   973  		t := o.markTemp()
   974  		for _, ncas := range n.Cases {
   975  			r := ncas.Comm
   976  			ir.SetPos(ncas)
   977  
   978  			// Append any new body prologue to ninit.
   979  			// The next loop will insert ninit into nbody.
   980  			if len(ncas.Init()) != 0 {
   981  				base.Fatalf("order select ninit")
   982  			}
   983  			if r == nil {
   984  				continue
   985  			}
   986  			switch r.Op() {
   987  			default:
   988  				ir.Dump("select case", r)
   989  				base.Fatalf("unknown op in select %v", r.Op())
   990  
   991  			case ir.OSELRECV2:
   992  				// case x, ok = <-c
   993  				r := r.(*ir.AssignListStmt)
   994  				recv := r.Rhs[0].(*ir.UnaryExpr)
   995  				recv.X = o.expr(recv.X, nil)
   996  				if !ir.IsAutoTmp(recv.X) {
   997  					recv.X = o.copyExpr(recv.X)
   998  				}
   999  				init := ir.TakeInit(r)
  1000  
  1001  				colas := r.Def
  1002  				do := func(i int, t *types.Type) {
  1003  					n := r.Lhs[i]
  1004  					if ir.IsBlank(n) {
  1005  						return
  1006  					}
  1007  					// If this is case x := <-ch or case x, y := <-ch, the case has
  1008  					// the ODCL nodes to declare x and y. We want to delay that
  1009  					// declaration (and possible allocation) until inside the case body.
  1010  					// Delete the ODCL nodes here and recreate them inside the body below.
  1011  					if colas {
  1012  						if len(init) > 0 && init[0].Op() == ir.ODCL && init[0].(*ir.Decl).X == n {
  1013  							init = init[1:]
  1014  
  1015  							// iimport may have added a default initialization assignment,
  1016  							// due to how it handles ODCL statements.
  1017  							if len(init) > 0 && init[0].Op() == ir.OAS && init[0].(*ir.AssignStmt).X == n {
  1018  								init = init[1:]
  1019  							}
  1020  						}
  1021  						dcl := typecheck.Stmt(ir.NewDecl(base.Pos, ir.ODCL, n.(*ir.Name)))
  1022  						ncas.PtrInit().Append(dcl)
  1023  					}
  1024  					tmp := o.newTemp(t, t.HasPointers())
  1025  					as := typecheck.Stmt(ir.NewAssignStmt(base.Pos, n, typecheck.Conv(tmp, n.Type())))
  1026  					ncas.PtrInit().Append(as)
  1027  					r.Lhs[i] = tmp
  1028  				}
  1029  				do(0, recv.X.Type().Elem())
  1030  				do(1, types.Types[types.TBOOL])
  1031  				if len(init) != 0 {
  1032  					ir.DumpList("ninit", init)
  1033  					base.Fatalf("ninit on select recv")
  1034  				}
  1035  				orderBlock(ncas.PtrInit(), o.free)
  1036  
  1037  			case ir.OSEND:
  1038  				r := r.(*ir.SendStmt)
  1039  				if len(r.Init()) != 0 {
  1040  					ir.DumpList("ninit", r.Init())
  1041  					base.Fatalf("ninit on select send")
  1042  				}
  1043  
  1044  				// case c <- x
  1045  				// r->left is c, r->right is x, both are always evaluated.
  1046  				r.Chan = o.expr(r.Chan, nil)
  1047  
  1048  				if !ir.IsAutoTmp(r.Chan) {
  1049  					r.Chan = o.copyExpr(r.Chan)
  1050  				}
  1051  				r.Value = o.expr(r.Value, nil)
  1052  				if !ir.IsAutoTmp(r.Value) {
  1053  					r.Value = o.copyExpr(r.Value)
  1054  				}
  1055  			}
  1056  		}
  1057  		// Now that we have accumulated all the temporaries, clean them.
  1058  		// Also insert any ninit queued during the previous loop.
  1059  		// (The temporary cleaning must follow that ninit work.)
  1060  		for _, cas := range n.Cases {
  1061  			orderBlock(&cas.Body, o.free)
  1062  
  1063  			// TODO(mdempsky): Is this actually necessary?
  1064  			// walkSelect appears to walk Ninit.
  1065  			cas.Body.Prepend(ir.TakeInit(cas)...)
  1066  		}
  1067  
  1068  		o.out = append(o.out, n)
  1069  		o.popTemp(t)
  1070  
  1071  	// Special: value being sent is passed as a pointer; make it addressable.
  1072  	case ir.OSEND:
  1073  		n := n.(*ir.SendStmt)
  1074  		t := o.markTemp()
  1075  		n.Chan = o.expr(n.Chan, nil)
  1076  		n.Value = o.expr(n.Value, nil)
  1077  		if base.Flag.Cfg.Instrumenting {
  1078  			// Force copying to the stack so that (chan T)(nil) <- x
  1079  			// is still instrumented as a read of x.
  1080  			n.Value = o.copyExpr(n.Value)
  1081  		} else {
  1082  			n.Value = o.addrTemp(n.Value)
  1083  		}
  1084  		o.out = append(o.out, n)
  1085  		o.popTemp(t)
  1086  
  1087  	// TODO(rsc): Clean temporaries more aggressively.
  1088  	// Note that because walkSwitch will rewrite some of the
  1089  	// switch into a binary search, this is not as easy as it looks.
  1090  	// (If we ran that code here we could invoke order.stmt on
  1091  	// the if-else chain instead.)
  1092  	// For now just clean all the temporaries at the end.
  1093  	// In practice that's fine.
  1094  	case ir.OSWITCH:
  1095  		n := n.(*ir.SwitchStmt)
  1096  		if base.Debug.Libfuzzer != 0 && !hasDefaultCase(n) {
  1097  			// Add empty "default:" case for instrumentation.
  1098  			n.Cases = append(n.Cases, ir.NewCaseStmt(base.Pos, nil, nil))
  1099  		}
  1100  
  1101  		t := o.markTemp()
  1102  		n.Tag = o.expr(n.Tag, nil)
  1103  		for _, ncas := range n.Cases {
  1104  			o.exprListInPlace(ncas.List)
  1105  			orderBlock(&ncas.Body, o.free)
  1106  		}
  1107  
  1108  		o.out = append(o.out, n)
  1109  		o.popTemp(t)
  1110  	}
  1111  
  1112  	base.Pos = lno
  1113  }
  1114  
  1115  func hasDefaultCase(n *ir.SwitchStmt) bool {
  1116  	for _, ncas := range n.Cases {
  1117  		if len(ncas.List) == 0 {
  1118  			return true
  1119  		}
  1120  	}
  1121  	return false
  1122  }
  1123  
  1124  // exprList orders the expression list l into o.
  1125  func (o *orderState) exprList(l ir.Nodes) {
  1126  	s := l
  1127  	for i := range s {
  1128  		s[i] = o.expr(s[i], nil)
  1129  	}
  1130  }
  1131  
  1132  // exprListInPlace orders the expression list l but saves
  1133  // the side effects on the individual expression ninit lists.
  1134  func (o *orderState) exprListInPlace(l ir.Nodes) {
  1135  	s := l
  1136  	for i := range s {
  1137  		s[i] = o.exprInPlace(s[i])
  1138  	}
  1139  }
  1140  
  1141  func (o *orderState) exprNoLHS(n ir.Node) ir.Node {
  1142  	return o.expr(n, nil)
  1143  }
  1144  
  1145  // expr orders a single expression, appending side
  1146  // effects to o.out as needed.
  1147  // If this is part of an assignment lhs = *np, lhs is given.
  1148  // Otherwise lhs == nil. (When lhs != nil it may be possible
  1149  // to avoid copying the result of the expression to a temporary.)
  1150  // The result of expr MUST be assigned back to n, e.g.
  1151  //
  1152  //	n.Left = o.expr(n.Left, lhs)
  1153  func (o *orderState) expr(n, lhs ir.Node) ir.Node {
  1154  	if n == nil {
  1155  		return n
  1156  	}
  1157  	lno := ir.SetPos(n)
  1158  	n = o.expr1(n, lhs)
  1159  	base.Pos = lno
  1160  	return n
  1161  }
  1162  
  1163  func (o *orderState) expr1(n, lhs ir.Node) ir.Node {
  1164  	o.init(n)
  1165  
  1166  	switch n.Op() {
  1167  	default:
  1168  		if o.edit == nil {
  1169  			o.edit = o.exprNoLHS // create closure once
  1170  		}
  1171  		ir.EditChildren(n, o.edit)
  1172  		return n
  1173  
  1174  	// Addition of strings turns into a function call.
  1175  	// Allocate a temporary to hold the strings.
  1176  	// Fewer than 5 strings use direct runtime helpers.
  1177  	case ir.OADDSTR:
  1178  		n := n.(*ir.AddStringExpr)
  1179  		o.exprList(n.List)
  1180  
  1181  		if len(n.List) > 5 {
  1182  			t := types.NewArray(types.Types[types.TSTRING], int64(len(n.List)))
  1183  			n.Prealloc = o.newTemp(t, false)
  1184  		}
  1185  
  1186  		// Mark string(byteSlice) arguments to reuse byteSlice backing
  1187  		// buffer during conversion. String concatenation does not
  1188  		// memorize the strings for later use, so it is safe.
  1189  		// However, we can do it only if there is at least one non-empty string literal.
  1190  		// Otherwise if all other arguments are empty strings,
  1191  		// concatstrings will return the reference to the temp string
  1192  		// to the caller.
  1193  		hasbyte := false
  1194  
  1195  		haslit := false
  1196  		for _, n1 := range n.List {
  1197  			hasbyte = hasbyte || n1.Op() == ir.OBYTES2STR
  1198  			haslit = haslit || n1.Op() == ir.OLITERAL && len(ir.StringVal(n1)) != 0
  1199  		}
  1200  
  1201  		if haslit && hasbyte {
  1202  			for _, n2 := range n.List {
  1203  				if n2.Op() == ir.OBYTES2STR {
  1204  					n2 := n2.(*ir.ConvExpr)
  1205  					n2.SetOp(ir.OBYTES2STRTMP)
  1206  				}
  1207  			}
  1208  		}
  1209  		return n
  1210  
  1211  	case ir.OINDEXMAP:
  1212  		n := n.(*ir.IndexExpr)
  1213  		n.X = o.expr(n.X, nil)
  1214  		n.Index = o.expr(n.Index, nil)
  1215  		needCopy := false
  1216  
  1217  		if !n.Assigned {
  1218  			// Enforce that any []byte slices we are not copying
  1219  			// can not be changed before the map index by forcing
  1220  			// the map index to happen immediately following the
  1221  			// conversions. See copyExpr a few lines below.
  1222  			needCopy = mapKeyReplaceStrConv(n.Index)
  1223  
  1224  			if base.Flag.Cfg.Instrumenting {
  1225  				// Race detector needs the copy.
  1226  				needCopy = true
  1227  			}
  1228  		}
  1229  
  1230  		// key may need to be addressable
  1231  		n.Index = o.mapKeyTemp(n.Pos(), n.X.Type(), n.Index)
  1232  		if needCopy {
  1233  			return o.copyExpr(n)
  1234  		}
  1235  		return n
  1236  
  1237  	// concrete type (not interface) argument might need an addressable
  1238  	// temporary to pass to the runtime conversion routine.
  1239  	case ir.OCONVIFACE:
  1240  		n := n.(*ir.ConvExpr)
  1241  		n.X = o.expr(n.X, nil)
  1242  		if n.X.Type().IsInterface() {
  1243  			return n
  1244  		}
  1245  		if _, _, needsaddr := dataWordFuncName(n.X.Type()); needsaddr || isStaticCompositeLiteral(n.X) {
  1246  			// Need a temp if we need to pass the address to the conversion function.
  1247  			// We also process static composite literal node here, making a named static global
  1248  			// whose address we can put directly in an interface (see OCONVIFACE case in walk).
  1249  			n.X = o.addrTemp(n.X)
  1250  		}
  1251  		return n
  1252  
  1253  	case ir.OCONVNOP:
  1254  		n := n.(*ir.ConvExpr)
  1255  		if n.X.Op() == ir.OCALLMETH {
  1256  			base.FatalfAt(n.X.Pos(), "OCALLMETH missed by typecheck")
  1257  		}
  1258  		if n.Type().IsKind(types.TUNSAFEPTR) && n.X.Type().IsKind(types.TUINTPTR) && (n.X.Op() == ir.OCALLFUNC || n.X.Op() == ir.OCALLINTER) {
  1259  			call := n.X.(*ir.CallExpr)
  1260  			// When reordering unsafe.Pointer(f()) into a separate
  1261  			// statement, the conversion and function call must stay
  1262  			// together. See golang.org/issue/15329.
  1263  			o.init(call)
  1264  			o.call(call)
  1265  			if lhs == nil || lhs.Op() != ir.ONAME || base.Flag.Cfg.Instrumenting {
  1266  				return o.copyExpr(n)
  1267  			}
  1268  		} else {
  1269  			n.X = o.expr(n.X, nil)
  1270  		}
  1271  		return n
  1272  
  1273  	case ir.OANDAND, ir.OOROR:
  1274  		// ... = LHS && RHS
  1275  		//
  1276  		// var r bool
  1277  		// r = LHS
  1278  		// if r {       // or !r, for OROR
  1279  		//     r = RHS
  1280  		// }
  1281  		// ... = r
  1282  
  1283  		n := n.(*ir.LogicalExpr)
  1284  		r := o.newTemp(n.Type(), false)
  1285  
  1286  		// Evaluate left-hand side.
  1287  		lhs := o.expr(n.X, nil)
  1288  		o.out = append(o.out, typecheck.Stmt(ir.NewAssignStmt(base.Pos, r, lhs)))
  1289  
  1290  		// Evaluate right-hand side, save generated code.
  1291  		saveout := o.out
  1292  		o.out = nil
  1293  		t := o.markTemp()
  1294  		o.edge()
  1295  		rhs := o.expr(n.Y, nil)
  1296  		o.out = append(o.out, typecheck.Stmt(ir.NewAssignStmt(base.Pos, r, rhs)))
  1297  		o.popTemp(t)
  1298  		gen := o.out
  1299  		o.out = saveout
  1300  
  1301  		// If left-hand side doesn't cause a short-circuit, issue right-hand side.
  1302  		nif := ir.NewIfStmt(base.Pos, r, nil, nil)
  1303  		if n.Op() == ir.OANDAND {
  1304  			nif.Body = gen
  1305  		} else {
  1306  			nif.Else = gen
  1307  		}
  1308  		o.out = append(o.out, nif)
  1309  		return r
  1310  
  1311  	case ir.OCALLMETH:
  1312  		base.FatalfAt(n.Pos(), "OCALLMETH missed by typecheck")
  1313  		panic("unreachable")
  1314  
  1315  	case ir.OCALLFUNC,
  1316  		ir.OCALLINTER,
  1317  		ir.OCAP,
  1318  		ir.OCOMPLEX,
  1319  		ir.OCOPY,
  1320  		ir.OIMAG,
  1321  		ir.OLEN,
  1322  		ir.OMAKECHAN,
  1323  		ir.OMAKEMAP,
  1324  		ir.OMAKESLICE,
  1325  		ir.OMAKESLICECOPY,
  1326  		ir.OMAX,
  1327  		ir.OMIN,
  1328  		ir.ONEW,
  1329  		ir.OREAL,
  1330  		ir.ORECOVERFP,
  1331  		ir.OSTR2BYTES,
  1332  		ir.OSTR2BYTESTMP,
  1333  		ir.OSTR2RUNES:
  1334  
  1335  		if isRuneCount(n) {
  1336  			// len([]rune(s)) is rewritten to runtime.countrunes(s) later.
  1337  			conv := n.(*ir.UnaryExpr).X.(*ir.ConvExpr)
  1338  			conv.X = o.expr(conv.X, nil)
  1339  		} else {
  1340  			o.call(n)
  1341  		}
  1342  
  1343  		if lhs == nil || lhs.Op() != ir.ONAME || base.Flag.Cfg.Instrumenting {
  1344  			return o.copyExpr(n)
  1345  		}
  1346  		return n
  1347  
  1348  	case ir.OINLCALL:
  1349  		n := n.(*ir.InlinedCallExpr)
  1350  		o.stmtList(n.Body)
  1351  		return n.SingleResult()
  1352  
  1353  	case ir.OAPPEND:
  1354  		// Check for append(x, make([]T, y)...) .
  1355  		n := n.(*ir.CallExpr)
  1356  		if isAppendOfMake(n) {
  1357  			n.Args[0] = o.expr(n.Args[0], nil) // order x
  1358  			mk := n.Args[1].(*ir.MakeExpr)
  1359  			mk.Len = o.expr(mk.Len, nil) // order y
  1360  		} else {
  1361  			o.exprList(n.Args)
  1362  		}
  1363  
  1364  		if lhs == nil || lhs.Op() != ir.ONAME && !ir.SameSafeExpr(lhs, n.Args[0]) {
  1365  			return o.copyExpr(n)
  1366  		}
  1367  		return n
  1368  
  1369  	case ir.OSLICE, ir.OSLICEARR, ir.OSLICESTR, ir.OSLICE3, ir.OSLICE3ARR:
  1370  		n := n.(*ir.SliceExpr)
  1371  		n.X = o.expr(n.X, nil)
  1372  		n.Low = o.cheapExpr(o.expr(n.Low, nil))
  1373  		n.High = o.cheapExpr(o.expr(n.High, nil))
  1374  		n.Max = o.cheapExpr(o.expr(n.Max, nil))
  1375  		if lhs == nil || lhs.Op() != ir.ONAME && !ir.SameSafeExpr(lhs, n.X) {
  1376  			return o.copyExpr(n)
  1377  		}
  1378  		return n
  1379  
  1380  	case ir.OCLOSURE:
  1381  		n := n.(*ir.ClosureExpr)
  1382  		if n.Transient() && len(n.Func.ClosureVars) > 0 {
  1383  			n.Prealloc = o.newTemp(typecheck.ClosureType(n), false)
  1384  		}
  1385  		return n
  1386  
  1387  	case ir.OMETHVALUE:
  1388  		n := n.(*ir.SelectorExpr)
  1389  		n.X = o.expr(n.X, nil)
  1390  		if n.Transient() {
  1391  			t := typecheck.MethodValueType(n)
  1392  			n.Prealloc = o.newTemp(t, false)
  1393  		}
  1394  		return n
  1395  
  1396  	case ir.OSLICELIT:
  1397  		n := n.(*ir.CompLitExpr)
  1398  		o.exprList(n.List)
  1399  		if n.Transient() {
  1400  			t := types.NewArray(n.Type().Elem(), n.Len)
  1401  			n.Prealloc = o.newTemp(t, false)
  1402  		}
  1403  		return n
  1404  
  1405  	case ir.ODOTTYPE, ir.ODOTTYPE2:
  1406  		n := n.(*ir.TypeAssertExpr)
  1407  		n.X = o.expr(n.X, nil)
  1408  		if !types.IsDirectIface(n.Type()) || base.Flag.Cfg.Instrumenting {
  1409  			return o.copyExprClear(n)
  1410  		}
  1411  		return n
  1412  
  1413  	case ir.ORECV:
  1414  		n := n.(*ir.UnaryExpr)
  1415  		n.X = o.expr(n.X, nil)
  1416  		return o.copyExprClear(n)
  1417  
  1418  	case ir.OEQ, ir.ONE, ir.OLT, ir.OLE, ir.OGT, ir.OGE:
  1419  		n := n.(*ir.BinaryExpr)
  1420  		n.X = o.expr(n.X, nil)
  1421  		n.Y = o.expr(n.Y, nil)
  1422  
  1423  		t := n.X.Type()
  1424  		switch {
  1425  		case t.IsString():
  1426  			// Mark string(byteSlice) arguments to reuse byteSlice backing
  1427  			// buffer during conversion. String comparison does not
  1428  			// memorize the strings for later use, so it is safe.
  1429  			if n.X.Op() == ir.OBYTES2STR {
  1430  				n.X.(*ir.ConvExpr).SetOp(ir.OBYTES2STRTMP)
  1431  			}
  1432  			if n.Y.Op() == ir.OBYTES2STR {
  1433  				n.Y.(*ir.ConvExpr).SetOp(ir.OBYTES2STRTMP)
  1434  			}
  1435  
  1436  		case t.IsStruct() || t.IsArray():
  1437  			// for complex comparisons, we need both args to be
  1438  			// addressable so we can pass them to the runtime.
  1439  			n.X = o.addrTemp(n.X)
  1440  			n.Y = o.addrTemp(n.Y)
  1441  		}
  1442  		return n
  1443  
  1444  	case ir.OMAPLIT:
  1445  		// Order map by converting:
  1446  		//   map[int]int{
  1447  		//     a(): b(),
  1448  		//     c(): d(),
  1449  		//     e(): f(),
  1450  		//   }
  1451  		// to
  1452  		//   m := map[int]int{}
  1453  		//   m[a()] = b()
  1454  		//   m[c()] = d()
  1455  		//   m[e()] = f()
  1456  		// Then order the result.
  1457  		// Without this special case, order would otherwise compute all
  1458  		// the keys and values before storing any of them to the map.
  1459  		// See issue 26552.
  1460  		n := n.(*ir.CompLitExpr)
  1461  		entries := n.List
  1462  		statics := entries[:0]
  1463  		var dynamics []*ir.KeyExpr
  1464  		for _, r := range entries {
  1465  			r := r.(*ir.KeyExpr)
  1466  
  1467  			if !isStaticCompositeLiteral(r.Key) || !isStaticCompositeLiteral(r.Value) {
  1468  				dynamics = append(dynamics, r)
  1469  				continue
  1470  			}
  1471  
  1472  			// Recursively ordering some static entries can change them to dynamic;
  1473  			// e.g., OCONVIFACE nodes. See #31777.
  1474  			r = o.expr(r, nil).(*ir.KeyExpr)
  1475  			if !isStaticCompositeLiteral(r.Key) || !isStaticCompositeLiteral(r.Value) {
  1476  				dynamics = append(dynamics, r)
  1477  				continue
  1478  			}
  1479  
  1480  			statics = append(statics, r)
  1481  		}
  1482  		n.List = statics
  1483  
  1484  		if len(dynamics) == 0 {
  1485  			return n
  1486  		}
  1487  
  1488  		// Emit the creation of the map (with all its static entries).
  1489  		m := o.newTemp(n.Type(), false)
  1490  		as := ir.NewAssignStmt(base.Pos, m, n)
  1491  		typecheck.Stmt(as)
  1492  		o.stmt(as)
  1493  
  1494  		// Emit eval+insert of dynamic entries, one at a time.
  1495  		for _, r := range dynamics {
  1496  			lhs := typecheck.AssignExpr(ir.NewIndexExpr(base.Pos, m, r.Key)).(*ir.IndexExpr)
  1497  			base.AssertfAt(lhs.Op() == ir.OINDEXMAP, lhs.Pos(), "want OINDEXMAP, have %+v", lhs)
  1498  			lhs.RType = n.RType
  1499  
  1500  			as := ir.NewAssignStmt(base.Pos, lhs, r.Value)
  1501  			typecheck.Stmt(as)
  1502  			o.stmt(as)
  1503  		}
  1504  
  1505  		// Remember that we issued these assignments so we can include that count
  1506  		// in the map alloc hint.
  1507  		// We're assuming here that all the keys in the map literal are distinct.
  1508  		// If any are equal, this will be an overcount. Probably not worth accounting
  1509  		// for that, as equal keys in map literals are rare, and at worst we waste
  1510  		// a bit of space.
  1511  		n.Len += int64(len(dynamics))
  1512  
  1513  		return m
  1514  	}
  1515  
  1516  	// No return - type-assertions above. Each case must return for itself.
  1517  }
  1518  
  1519  // as2func orders OAS2FUNC nodes. It creates temporaries to ensure left-to-right assignment.
  1520  // The caller should order the right-hand side of the assignment before calling order.as2func.
  1521  // It rewrites,
  1522  //
  1523  //	a, b, a = ...
  1524  //
  1525  // as
  1526  //
  1527  //	tmp1, tmp2, tmp3 = ...
  1528  //	a, b, a = tmp1, tmp2, tmp3
  1529  //
  1530  // This is necessary to ensure left to right assignment order.
  1531  func (o *orderState) as2func(n *ir.AssignListStmt) {
  1532  	results := n.Rhs[0].Type()
  1533  	as := ir.NewAssignListStmt(n.Pos(), ir.OAS2, nil, nil)
  1534  	for i, nl := range n.Lhs {
  1535  		if !ir.IsBlank(nl) {
  1536  			typ := results.Field(i).Type
  1537  			tmp := o.newTemp(typ, typ.HasPointers())
  1538  			n.Lhs[i] = tmp
  1539  			as.Lhs = append(as.Lhs, nl)
  1540  			as.Rhs = append(as.Rhs, tmp)
  1541  		}
  1542  	}
  1543  
  1544  	o.out = append(o.out, n)
  1545  	o.stmt(typecheck.Stmt(as))
  1546  }
  1547  
  1548  // as2ok orders OAS2XXX with ok.
  1549  // Just like as2func, this also adds temporaries to ensure left-to-right assignment.
  1550  func (o *orderState) as2ok(n *ir.AssignListStmt) {
  1551  	as := ir.NewAssignListStmt(n.Pos(), ir.OAS2, nil, nil)
  1552  
  1553  	do := func(i int, typ *types.Type) {
  1554  		if nl := n.Lhs[i]; !ir.IsBlank(nl) {
  1555  			var tmp ir.Node = o.newTemp(typ, typ.HasPointers())
  1556  			n.Lhs[i] = tmp
  1557  			as.Lhs = append(as.Lhs, nl)
  1558  			if i == 1 {
  1559  				// The "ok" result is an untyped boolean according to the Go
  1560  				// spec. We need to explicitly convert it to the LHS type in
  1561  				// case the latter is a defined boolean type (#8475).
  1562  				tmp = typecheck.Conv(tmp, nl.Type())
  1563  			}
  1564  			as.Rhs = append(as.Rhs, tmp)
  1565  		}
  1566  	}
  1567  
  1568  	do(0, n.Rhs[0].Type())
  1569  	do(1, types.Types[types.TBOOL])
  1570  
  1571  	o.out = append(o.out, n)
  1572  	o.stmt(typecheck.Stmt(as))
  1573  }
  1574  

View as plain text