Source file src/cmd/compile/internal/walk/order.go

     1  // Copyright 2012 The Go Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  package walk
     6  
     7  import (
     8  	"fmt"
     9  	"go/constant"
    10  	"internal/buildcfg"
    11  
    12  	"cmd/compile/internal/base"
    13  	"cmd/compile/internal/ir"
    14  	"cmd/compile/internal/reflectdata"
    15  	"cmd/compile/internal/ssa"
    16  	"cmd/compile/internal/staticinit"
    17  	"cmd/compile/internal/typecheck"
    18  	"cmd/compile/internal/types"
    19  	"cmd/internal/objabi"
    20  	"cmd/internal/src"
    21  )
    22  
    23  // Rewrite tree to use separate statements to enforce
    24  // order of evaluation. Makes walk easier, because it
    25  // can (after this runs) reorder at will within an expression.
    26  //
    27  // Rewrite m[k] op= r into m[k] = m[k] op r if op is / or %.
    28  //
    29  // Introduce temporaries as needed by runtime routines.
    30  // For example, the map runtime routines take the map key
    31  // by reference, so make sure all map keys are addressable
    32  // by copying them to temporaries as needed.
    33  // The same is true for channel operations.
    34  //
    35  // Arrange that map index expressions only appear in direct
    36  // assignments x = m[k] or m[k] = x, never in larger expressions.
    37  //
    38  // Arrange that receive expressions only appear in direct assignments
    39  // x = <-c or as standalone statements <-c, never in larger expressions.
    40  
    41  // orderState holds state during the ordering process.
    42  type orderState struct {
    43  	out  []ir.Node             // list of generated statements
    44  	temp []*ir.Name            // stack of temporary variables
    45  	free map[string][]*ir.Name // free list of unused temporaries, by type.LinkString().
    46  	edit func(ir.Node) ir.Node // cached closure of o.exprNoLHS
    47  }
    48  
    49  // order rewrites fn.Nbody to apply the ordering constraints
    50  // described in the comment at the top of the file.
    51  func order(fn *ir.Func) {
    52  	if base.Flag.W > 1 {
    53  		s := fmt.Sprintf("\nbefore order %v", fn.Sym())
    54  		ir.DumpList(s, fn.Body)
    55  	}
    56  	ir.SetPos(fn) // Set reasonable position for instrumenting code. See issue 53688.
    57  	orderBlock(&fn.Body, map[string][]*ir.Name{})
    58  }
    59  
    60  // append typechecks stmt and appends it to out.
    61  func (o *orderState) append(stmt ir.Node) {
    62  	o.out = append(o.out, typecheck.Stmt(stmt))
    63  }
    64  
    65  // newTemp allocates a new temporary with the given type,
    66  // pushes it onto the temp stack, and returns it.
    67  // If clear is true, newTemp emits code to zero the temporary.
    68  func (o *orderState) newTemp(t *types.Type, clear bool) *ir.Name {
    69  	var v *ir.Name
    70  	key := t.LinkString()
    71  	if a := o.free[key]; len(a) > 0 {
    72  		v = a[len(a)-1]
    73  		if !types.Identical(t, v.Type()) {
    74  			base.Fatalf("expected %L to have type %v", v, t)
    75  		}
    76  		o.free[key] = a[:len(a)-1]
    77  	} else {
    78  		v = typecheck.TempAt(base.Pos, ir.CurFunc, t)
    79  	}
    80  	if clear {
    81  		o.append(ir.NewAssignStmt(base.Pos, v, nil))
    82  	}
    83  
    84  	o.temp = append(o.temp, v)
    85  	return v
    86  }
    87  
    88  // copyExpr behaves like newTemp but also emits
    89  // code to initialize the temporary to the value n.
    90  func (o *orderState) copyExpr(n ir.Node) *ir.Name {
    91  	return o.copyExpr1(n, false)
    92  }
    93  
    94  // copyExprClear is like copyExpr but clears the temp before assignment.
    95  // It is provided for use when the evaluation of tmp = n turns into
    96  // a function call that is passed a pointer to the temporary as the output space.
    97  // If the call blocks before tmp has been written,
    98  // the garbage collector will still treat the temporary as live,
    99  // so we must zero it before entering that call.
   100  // Today, this only happens for channel receive operations.
   101  // (The other candidate would be map access, but map access
   102  // returns a pointer to the result data instead of taking a pointer
   103  // to be filled in.)
   104  func (o *orderState) copyExprClear(n ir.Node) *ir.Name {
   105  	return o.copyExpr1(n, true)
   106  }
   107  
   108  func (o *orderState) copyExpr1(n ir.Node, clear bool) *ir.Name {
   109  	t := n.Type()
   110  	v := o.newTemp(t, clear)
   111  	o.append(ir.NewAssignStmt(base.Pos, v, n))
   112  	return v
   113  }
   114  
   115  // cheapExpr returns a cheap version of n.
   116  // The definition of cheap is that n is a variable or constant.
   117  // If not, cheapExpr allocates a new tmp, emits tmp = n,
   118  // and then returns tmp.
   119  func (o *orderState) cheapExpr(n ir.Node) ir.Node {
   120  	if n == nil {
   121  		return nil
   122  	}
   123  
   124  	switch n.Op() {
   125  	case ir.ONAME, ir.OLITERAL, ir.ONIL:
   126  		return n
   127  	case ir.OLEN, ir.OCAP:
   128  		n := n.(*ir.UnaryExpr)
   129  		l := o.cheapExpr(n.X)
   130  		if l == n.X {
   131  			return n
   132  		}
   133  		a := ir.Copy(n).(*ir.UnaryExpr)
   134  		a.X = l
   135  		return typecheck.Expr(a)
   136  	}
   137  
   138  	return o.copyExpr(n)
   139  }
   140  
   141  // safeExpr returns a safe version of n.
   142  // The definition of safe is that n can appear multiple times
   143  // without violating the semantics of the original program,
   144  // and that assigning to the safe version has the same effect
   145  // as assigning to the original n.
   146  //
   147  // The intended use is to apply to x when rewriting x += y into x = x + y.
   148  func (o *orderState) safeExpr(n ir.Node) ir.Node {
   149  	switch n.Op() {
   150  	case ir.ONAME, ir.OLITERAL, ir.ONIL:
   151  		return n
   152  
   153  	case ir.OLEN, ir.OCAP:
   154  		n := n.(*ir.UnaryExpr)
   155  		l := o.safeExpr(n.X)
   156  		if l == n.X {
   157  			return n
   158  		}
   159  		a := ir.Copy(n).(*ir.UnaryExpr)
   160  		a.X = l
   161  		return typecheck.Expr(a)
   162  
   163  	case ir.ODOT:
   164  		n := n.(*ir.SelectorExpr)
   165  		l := o.safeExpr(n.X)
   166  		if l == n.X {
   167  			return n
   168  		}
   169  		a := ir.Copy(n).(*ir.SelectorExpr)
   170  		a.X = l
   171  		return typecheck.Expr(a)
   172  
   173  	case ir.ODOTPTR:
   174  		n := n.(*ir.SelectorExpr)
   175  		l := o.cheapExpr(n.X)
   176  		if l == n.X {
   177  			return n
   178  		}
   179  		a := ir.Copy(n).(*ir.SelectorExpr)
   180  		a.X = l
   181  		return typecheck.Expr(a)
   182  
   183  	case ir.ODEREF:
   184  		n := n.(*ir.StarExpr)
   185  		l := o.cheapExpr(n.X)
   186  		if l == n.X {
   187  			return n
   188  		}
   189  		a := ir.Copy(n).(*ir.StarExpr)
   190  		a.X = l
   191  		return typecheck.Expr(a)
   192  
   193  	case ir.OINDEX, ir.OINDEXMAP:
   194  		n := n.(*ir.IndexExpr)
   195  		var l ir.Node
   196  		if n.X.Type().IsArray() {
   197  			l = o.safeExpr(n.X)
   198  		} else {
   199  			l = o.cheapExpr(n.X)
   200  		}
   201  		r := o.cheapExpr(n.Index)
   202  		if l == n.X && r == n.Index {
   203  			return n
   204  		}
   205  		a := ir.Copy(n).(*ir.IndexExpr)
   206  		a.X = l
   207  		a.Index = r
   208  		return typecheck.Expr(a)
   209  
   210  	default:
   211  		base.Fatalf("order.safeExpr %v", n.Op())
   212  		return nil // not reached
   213  	}
   214  }
   215  
   216  // addrTemp ensures that n is okay to pass by address to runtime routines.
   217  // If the original argument n is not okay, addrTemp creates a tmp, emits
   218  // tmp = n, and then returns tmp.
   219  // The result of addrTemp MUST be assigned back to n, e.g.
   220  //
   221  //	n.Left = o.addrTemp(n.Left)
   222  func (o *orderState) addrTemp(n ir.Node) ir.Node {
   223  	// Note: Avoid addrTemp with static assignment for literal strings
   224  	// when compiling FIPS packages.
   225  	// The problem is that panic("foo") ends up creating a static RODATA temp
   226  	// for the implicit conversion of "foo" to any, and we can't handle
   227  	// the relocations in that temp.
   228  	if n.Op() == ir.ONIL || (n.Op() == ir.OLITERAL && !base.Ctxt.IsFIPS()) {
   229  		// TODO: expand this to all static composite literal nodes?
   230  		n = typecheck.DefaultLit(n, nil)
   231  		types.CalcSize(n.Type())
   232  		vstat := readonlystaticname(n.Type())
   233  		var s staticinit.Schedule
   234  		s.StaticAssign(vstat, 0, n, n.Type())
   235  		if s.Out != nil {
   236  			base.Fatalf("staticassign of const generated code: %+v", n)
   237  		}
   238  		vstat = typecheck.Expr(vstat).(*ir.Name)
   239  		return vstat
   240  	}
   241  
   242  	// Prevent taking the address of an SSA-able local variable (#63332).
   243  	//
   244  	// TODO(mdempsky): Note that OuterValue unwraps OCONVNOPs, but
   245  	// IsAddressable does not. It should be possible to skip copying for
   246  	// at least some of these OCONVNOPs (e.g., reinsert them after the
   247  	// OADDR operation), but at least walkCompare needs to be fixed to
   248  	// support that (see trybot failures on go.dev/cl/541715, PS1).
   249  	if ir.IsAddressable(n) {
   250  		if name, ok := ir.OuterValue(n).(*ir.Name); ok && name.Op() == ir.ONAME {
   251  			if name.Class == ir.PAUTO && !name.Addrtaken() && ssa.CanSSA(name.Type()) {
   252  				goto Copy
   253  			}
   254  		}
   255  
   256  		return n
   257  	}
   258  
   259  Copy:
   260  	return o.copyExpr(n)
   261  }
   262  
   263  // mapKeyTemp prepares n to be a key in a map runtime call and returns n.
   264  // The first parameter is the position of n's containing node, for use in case
   265  // that n's position is not unique (e.g., if n is an ONAME).
   266  func (o *orderState) mapKeyTemp(outerPos src.XPos, t *types.Type, n ir.Node) ir.Node {
   267  	pos := outerPos
   268  	if ir.HasUniquePos(n) {
   269  		pos = n.Pos()
   270  	}
   271  	// Most map calls need to take the address of the key.
   272  	// Exception: map*_fast* calls. See golang.org/issue/19015.
   273  	alg := mapfast(t)
   274  	if alg == mapslow {
   275  		return o.addrTemp(n)
   276  	}
   277  	var kt *types.Type
   278  	switch alg {
   279  	case mapfast32:
   280  		kt = types.Types[types.TUINT32]
   281  	case mapfast64:
   282  		kt = types.Types[types.TUINT64]
   283  	case mapfast32ptr, mapfast64ptr:
   284  		kt = types.Types[types.TUNSAFEPTR]
   285  	case mapfaststr:
   286  		kt = types.Types[types.TSTRING]
   287  	}
   288  	nt := n.Type()
   289  	switch {
   290  	case nt == kt:
   291  		return n
   292  	case nt.Kind() == kt.Kind(), nt.IsPtrShaped() && kt.IsPtrShaped():
   293  		// can directly convert (e.g. named type to underlying type, or one pointer to another)
   294  		return typecheck.Expr(ir.NewConvExpr(pos, ir.OCONVNOP, kt, n))
   295  	case nt.IsInteger() && kt.IsInteger():
   296  		// can directly convert (e.g. int32 to uint32)
   297  		if n.Op() == ir.OLITERAL && nt.IsSigned() {
   298  			// avoid constant overflow error
   299  			n = ir.NewConstExpr(constant.MakeUint64(uint64(ir.Int64Val(n))), n)
   300  			n.SetType(kt)
   301  			return n
   302  		}
   303  		return typecheck.Expr(ir.NewConvExpr(pos, ir.OCONV, kt, n))
   304  	default:
   305  		// Unsafe cast through memory.
   306  		// We'll need to do a load with type kt. Create a temporary of type kt to
   307  		// ensure sufficient alignment. nt may be under-aligned.
   308  		if uint8(kt.Alignment()) < uint8(nt.Alignment()) {
   309  			base.Fatalf("mapKeyTemp: key type is not sufficiently aligned, kt=%v nt=%v", kt, nt)
   310  		}
   311  		tmp := o.newTemp(kt, true)
   312  		// *(*nt)(&tmp) = n
   313  		var e ir.Node = typecheck.NodAddr(tmp)
   314  		e = ir.NewConvExpr(pos, ir.OCONVNOP, nt.PtrTo(), e)
   315  		e = ir.NewStarExpr(pos, e)
   316  		o.append(ir.NewAssignStmt(pos, e, n))
   317  		return tmp
   318  	}
   319  }
   320  
   321  // mapKeyReplaceStrConv replaces OBYTES2STR by OBYTES2STRTMP
   322  // in n to avoid string allocations for keys in map lookups.
   323  // Returns a bool that signals if a modification was made.
   324  //
   325  // For:
   326  //
   327  //	x = m[string(k)]
   328  //	x = m[T1{... Tn{..., string(k), ...}}]
   329  //
   330  // where k is []byte, T1 to Tn is a nesting of struct and array literals,
   331  // the allocation of backing bytes for the string can be avoided
   332  // by reusing the []byte backing array. These are special cases
   333  // for avoiding allocations when converting byte slices to strings.
   334  // It would be nice to handle these generally, but because
   335  // []byte keys are not allowed in maps, the use of string(k)
   336  // comes up in important cases in practice. See issue 3512.
   337  func mapKeyReplaceStrConv(n ir.Node) bool {
   338  	var replaced bool
   339  	switch n.Op() {
   340  	case ir.OBYTES2STR:
   341  		n := n.(*ir.ConvExpr)
   342  		n.SetOp(ir.OBYTES2STRTMP)
   343  		replaced = true
   344  	case ir.OSTRUCTLIT:
   345  		n := n.(*ir.CompLitExpr)
   346  		for _, elem := range n.List {
   347  			elem := elem.(*ir.StructKeyExpr)
   348  			if mapKeyReplaceStrConv(elem.Value) {
   349  				replaced = true
   350  			}
   351  		}
   352  	case ir.OARRAYLIT:
   353  		n := n.(*ir.CompLitExpr)
   354  		for _, elem := range n.List {
   355  			if elem.Op() == ir.OKEY {
   356  				elem = elem.(*ir.KeyExpr).Value
   357  			}
   358  			if mapKeyReplaceStrConv(elem) {
   359  				replaced = true
   360  			}
   361  		}
   362  	}
   363  	return replaced
   364  }
   365  
   366  type ordermarker int
   367  
   368  // markTemp returns the top of the temporary variable stack.
   369  func (o *orderState) markTemp() ordermarker {
   370  	return ordermarker(len(o.temp))
   371  }
   372  
   373  // popTemp pops temporaries off the stack until reaching the mark,
   374  // which must have been returned by markTemp.
   375  func (o *orderState) popTemp(mark ordermarker) {
   376  	for _, n := range o.temp[mark:] {
   377  		key := n.Type().LinkString()
   378  		o.free[key] = append(o.free[key], n)
   379  	}
   380  	o.temp = o.temp[:mark]
   381  }
   382  
   383  // stmtList orders each of the statements in the list.
   384  func (o *orderState) stmtList(l ir.Nodes) {
   385  	s := l
   386  	for i := range s {
   387  		orderMakeSliceCopy(s[i:])
   388  		o.stmt(s[i])
   389  	}
   390  }
   391  
   392  // orderMakeSliceCopy matches the pattern:
   393  //
   394  //	m = OMAKESLICE([]T, x); OCOPY(m, s)
   395  //
   396  // and rewrites it to:
   397  //
   398  //	m = OMAKESLICECOPY([]T, x, s); nil
   399  func orderMakeSliceCopy(s []ir.Node) {
   400  	if base.Flag.N != 0 || base.Flag.Cfg.Instrumenting {
   401  		return
   402  	}
   403  	if len(s) < 2 || s[0] == nil || s[0].Op() != ir.OAS || s[1] == nil || s[1].Op() != ir.OCOPY {
   404  		return
   405  	}
   406  
   407  	as := s[0].(*ir.AssignStmt)
   408  	cp := s[1].(*ir.BinaryExpr)
   409  	if as.Y == nil || as.Y.Op() != ir.OMAKESLICE || ir.IsBlank(as.X) ||
   410  		as.X.Op() != ir.ONAME || cp.X.Op() != ir.ONAME || cp.Y.Op() != ir.ONAME ||
   411  		as.X.Name() != cp.X.Name() || cp.X.Name() == cp.Y.Name() {
   412  		// The line above this one is correct with the differing equality operators:
   413  		// we want as.X and cp.X to be the same name,
   414  		// but we want the initial data to be coming from a different name.
   415  		return
   416  	}
   417  
   418  	mk := as.Y.(*ir.MakeExpr)
   419  	if mk.Esc() == ir.EscNone || mk.Len == nil || mk.Cap != nil {
   420  		return
   421  	}
   422  	mk.SetOp(ir.OMAKESLICECOPY)
   423  	mk.Cap = cp.Y
   424  	// Set bounded when m = OMAKESLICE([]T, len(s)); OCOPY(m, s)
   425  	mk.SetBounded(mk.Len.Op() == ir.OLEN && ir.SameSafeExpr(mk.Len.(*ir.UnaryExpr).X, cp.Y))
   426  	as.Y = typecheck.Expr(mk)
   427  	s[1] = nil // remove separate copy call
   428  }
   429  
   430  // edge inserts coverage instrumentation for libfuzzer.
   431  func (o *orderState) edge() {
   432  	if base.Debug.Libfuzzer == 0 {
   433  		return
   434  	}
   435  
   436  	// Create a new uint8 counter to be allocated in section __sancov_cntrs
   437  	counter := staticinit.StaticName(types.Types[types.TUINT8])
   438  	counter.SetLibfuzzer8BitCounter(true)
   439  	// As well as setting SetLibfuzzer8BitCounter, we preemptively set the
   440  	// symbol type to SLIBFUZZER_8BIT_COUNTER so that the race detector
   441  	// instrumentation pass (which does not have access to the flags set by
   442  	// SetLibfuzzer8BitCounter) knows to ignore them. This information is
   443  	// lost by the time it reaches the compile step, so SetLibfuzzer8BitCounter
   444  	// is still necessary.
   445  	counter.Linksym().Type = objabi.SLIBFUZZER_8BIT_COUNTER
   446  
   447  	// We guarantee that the counter never becomes zero again once it has been
   448  	// incremented once. This implementation follows the NeverZero optimization
   449  	// presented by the paper:
   450  	// "AFL++: Combining Incremental Steps of Fuzzing Research"
   451  	// The NeverZero policy avoids the overflow to 0 by setting the counter to one
   452  	// after it reaches 255 and so, if an edge is executed at least one time, the entry is
   453  	// never 0.
   454  	// Another policy presented in the paper is the Saturated Counters policy which
   455  	// freezes the counter when it reaches the value of 255. However, a range
   456  	// of experiments showed that that decreases overall performance.
   457  	o.append(ir.NewIfStmt(base.Pos,
   458  		ir.NewBinaryExpr(base.Pos, ir.OEQ, counter, ir.NewInt(base.Pos, 0xff)),
   459  		[]ir.Node{ir.NewAssignStmt(base.Pos, counter, ir.NewInt(base.Pos, 1))},
   460  		[]ir.Node{ir.NewAssignOpStmt(base.Pos, ir.OADD, counter, ir.NewInt(base.Pos, 1))}))
   461  }
   462  
   463  // orderBlock orders the block of statements in n into a new slice,
   464  // and then replaces the old slice in n with the new slice.
   465  // free is a map that can be used to obtain temporary variables by type.
   466  func orderBlock(n *ir.Nodes, free map[string][]*ir.Name) {
   467  	if len(*n) != 0 {
   468  		// Set reasonable position for instrumenting code. See issue 53688.
   469  		// It would be nice if ir.Nodes had a position (the opening {, probably),
   470  		// but it doesn't. So we use the first statement's position instead.
   471  		ir.SetPos((*n)[0])
   472  	}
   473  	var order orderState
   474  	order.free = free
   475  	mark := order.markTemp()
   476  	order.edge()
   477  	order.stmtList(*n)
   478  	order.popTemp(mark)
   479  	*n = order.out
   480  }
   481  
   482  // exprInPlace orders the side effects in *np and
   483  // leaves them as the init list of the final *np.
   484  // The result of exprInPlace MUST be assigned back to n, e.g.
   485  //
   486  //	n.Left = o.exprInPlace(n.Left)
   487  func (o *orderState) exprInPlace(n ir.Node) ir.Node {
   488  	var order orderState
   489  	order.free = o.free
   490  	n = order.expr(n, nil)
   491  	n = ir.InitExpr(order.out, n)
   492  
   493  	// insert new temporaries from order
   494  	// at head of outer list.
   495  	o.temp = append(o.temp, order.temp...)
   496  	return n
   497  }
   498  
   499  // orderStmtInPlace orders the side effects of the single statement *np
   500  // and replaces it with the resulting statement list.
   501  // The result of orderStmtInPlace MUST be assigned back to n, e.g.
   502  //
   503  //	n.Left = orderStmtInPlace(n.Left)
   504  //
   505  // free is a map that can be used to obtain temporary variables by type.
   506  func orderStmtInPlace(n ir.Node, free map[string][]*ir.Name) ir.Node {
   507  	var order orderState
   508  	order.free = free
   509  	mark := order.markTemp()
   510  	order.stmt(n)
   511  	order.popTemp(mark)
   512  	return ir.NewBlockStmt(src.NoXPos, order.out)
   513  }
   514  
   515  // init moves n's init list to o.out.
   516  func (o *orderState) init(n ir.Node) {
   517  	if ir.MayBeShared(n) {
   518  		// For concurrency safety, don't mutate potentially shared nodes.
   519  		// First, ensure that no work is required here.
   520  		if len(n.Init()) > 0 {
   521  			base.Fatalf("order.init shared node with ninit")
   522  		}
   523  		return
   524  	}
   525  	o.stmtList(ir.TakeInit(n))
   526  }
   527  
   528  // call orders the call expression n.
   529  // n.Op is OCALLFUNC/OCALLINTER or a builtin like OCOPY.
   530  func (o *orderState) call(nn ir.Node) {
   531  	if len(nn.Init()) > 0 {
   532  		// Caller should have already called o.init(nn).
   533  		base.Fatalf("%v with unexpected ninit", nn.Op())
   534  	}
   535  	if nn.Op() == ir.OCALLMETH {
   536  		base.FatalfAt(nn.Pos(), "OCALLMETH missed by typecheck")
   537  	}
   538  
   539  	// Builtin functions.
   540  	if nn.Op() != ir.OCALLFUNC && nn.Op() != ir.OCALLINTER {
   541  		switch n := nn.(type) {
   542  		default:
   543  			base.Fatalf("unexpected call: %+v", n)
   544  		case *ir.UnaryExpr:
   545  			n.X = o.expr(n.X, nil)
   546  		case *ir.ConvExpr:
   547  			n.X = o.expr(n.X, nil)
   548  		case *ir.BinaryExpr:
   549  			n.X = o.expr(n.X, nil)
   550  			n.Y = o.expr(n.Y, nil)
   551  		case *ir.MakeExpr:
   552  			n.Len = o.expr(n.Len, nil)
   553  			n.Cap = o.expr(n.Cap, nil)
   554  		case *ir.CallExpr:
   555  			o.exprList(n.Args)
   556  		}
   557  		return
   558  	}
   559  
   560  	n := nn.(*ir.CallExpr)
   561  	typecheck.AssertFixedCall(n)
   562  
   563  	if ir.IsFuncPCIntrinsic(n) && ir.IsIfaceOfFunc(n.Args[0]) != nil {
   564  		// For internal/abi.FuncPCABIxxx(fn), if fn is a defined function,
   565  		// do not introduce temporaries here, so it is easier to rewrite it
   566  		// to symbol address reference later in walk.
   567  		return
   568  	}
   569  
   570  	n.Fun = o.expr(n.Fun, nil)
   571  	o.exprList(n.Args)
   572  }
   573  
   574  // mapAssign appends n to o.out.
   575  func (o *orderState) mapAssign(n ir.Node) {
   576  	switch n.Op() {
   577  	default:
   578  		base.Fatalf("order.mapAssign %v", n.Op())
   579  
   580  	case ir.OAS:
   581  		n := n.(*ir.AssignStmt)
   582  		if n.X.Op() == ir.OINDEXMAP {
   583  			n.Y = o.safeMapRHS(n.Y)
   584  		}
   585  		o.out = append(o.out, n)
   586  	case ir.OASOP:
   587  		n := n.(*ir.AssignOpStmt)
   588  		if n.X.Op() == ir.OINDEXMAP {
   589  			n.Y = o.safeMapRHS(n.Y)
   590  		}
   591  		o.out = append(o.out, n)
   592  	}
   593  }
   594  
   595  func (o *orderState) safeMapRHS(r ir.Node) ir.Node {
   596  	// Make sure we evaluate the RHS before starting the map insert.
   597  	// We need to make sure the RHS won't panic.  See issue 22881.
   598  	if r.Op() == ir.OAPPEND {
   599  		r := r.(*ir.CallExpr)
   600  		s := r.Args[1:]
   601  		for i, n := range s {
   602  			s[i] = o.cheapExpr(n)
   603  		}
   604  		return r
   605  	}
   606  	return o.cheapExpr(r)
   607  }
   608  
   609  // stmt orders the statement n, appending to o.out.
   610  func (o *orderState) stmt(n ir.Node) {
   611  	if n == nil {
   612  		return
   613  	}
   614  
   615  	lno := ir.SetPos(n)
   616  	o.init(n)
   617  
   618  	switch n.Op() {
   619  	default:
   620  		base.Fatalf("order.stmt %v", n.Op())
   621  
   622  	case ir.OINLMARK:
   623  		o.out = append(o.out, n)
   624  
   625  	case ir.OAS:
   626  		n := n.(*ir.AssignStmt)
   627  		t := o.markTemp()
   628  
   629  		// There's a delicate interaction here between two OINDEXMAP
   630  		// optimizations.
   631  		//
   632  		// First, we want to handle m[k] = append(m[k], ...) with a single
   633  		// runtime call to mapassign. This requires the m[k] expressions to
   634  		// satisfy ir.SameSafeExpr in walkAssign.
   635  		//
   636  		// But if k is a slow map key type that's passed by reference (e.g.,
   637  		// byte), then we want to avoid marking user variables as addrtaken,
   638  		// if that might prevent the compiler from keeping k in a register.
   639  		//
   640  		// TODO(mdempsky): It would be better if walk was responsible for
   641  		// inserting temporaries as needed.
   642  		mapAppend := n.X.Op() == ir.OINDEXMAP && n.Y.Op() == ir.OAPPEND &&
   643  			ir.SameSafeExpr(n.X, n.Y.(*ir.CallExpr).Args[0])
   644  
   645  		n.X = o.expr(n.X, nil)
   646  		if mapAppend {
   647  			indexLHS := n.X.(*ir.IndexExpr)
   648  			indexLHS.X = o.cheapExpr(indexLHS.X)
   649  			indexLHS.Index = o.cheapExpr(indexLHS.Index)
   650  
   651  			call := n.Y.(*ir.CallExpr)
   652  			arg0 := call.Args[0]
   653  			// ir.SameSafeExpr skips OCONVNOPs, so we must do the same here (#66096).
   654  			for arg0.Op() == ir.OCONVNOP {
   655  				arg0 = arg0.(*ir.ConvExpr).X
   656  			}
   657  			indexRHS := arg0.(*ir.IndexExpr)
   658  			indexRHS.X = indexLHS.X
   659  			indexRHS.Index = indexLHS.Index
   660  
   661  			o.exprList(call.Args[1:])
   662  		} else {
   663  			n.Y = o.expr(n.Y, n.X)
   664  		}
   665  		o.mapAssign(n)
   666  		o.popTemp(t)
   667  
   668  	case ir.OASOP:
   669  		n := n.(*ir.AssignOpStmt)
   670  		t := o.markTemp()
   671  		n.X = o.expr(n.X, nil)
   672  		n.Y = o.expr(n.Y, nil)
   673  
   674  		if base.Flag.Cfg.Instrumenting || n.X.Op() == ir.OINDEXMAP && (n.AsOp == ir.ODIV || n.AsOp == ir.OMOD) {
   675  			// Rewrite m[k] op= r into m[k] = m[k] op r so
   676  			// that we can ensure that if op panics
   677  			// because r is zero, the panic happens before
   678  			// the map assignment.
   679  			// DeepCopy is a big hammer here, but safeExpr
   680  			// makes sure there is nothing too deep being copied.
   681  			l1 := o.safeExpr(n.X)
   682  			l2 := ir.DeepCopy(src.NoXPos, l1)
   683  			if l2.Op() == ir.OINDEXMAP {
   684  				l2 := l2.(*ir.IndexExpr)
   685  				l2.Assigned = false
   686  			}
   687  			l2 = o.copyExpr(l2)
   688  			r := o.expr(typecheck.Expr(ir.NewBinaryExpr(n.Pos(), n.AsOp, l2, n.Y)), nil)
   689  			as := typecheck.Stmt(ir.NewAssignStmt(n.Pos(), l1, r))
   690  			o.mapAssign(as)
   691  			o.popTemp(t)
   692  			return
   693  		}
   694  
   695  		o.mapAssign(n)
   696  		o.popTemp(t)
   697  
   698  	case ir.OAS2:
   699  		n := n.(*ir.AssignListStmt)
   700  		t := o.markTemp()
   701  		o.exprList(n.Lhs)
   702  		o.exprList(n.Rhs)
   703  		o.out = append(o.out, n)
   704  		o.popTemp(t)
   705  
   706  	// Special: avoid copy of func call n.Right
   707  	case ir.OAS2FUNC:
   708  		n := n.(*ir.AssignListStmt)
   709  		t := o.markTemp()
   710  		o.exprList(n.Lhs)
   711  		call := n.Rhs[0]
   712  		o.init(call)
   713  		if ic, ok := call.(*ir.InlinedCallExpr); ok {
   714  			o.stmtList(ic.Body)
   715  
   716  			n.SetOp(ir.OAS2)
   717  			n.Rhs = ic.ReturnVars
   718  
   719  			o.exprList(n.Rhs)
   720  			o.out = append(o.out, n)
   721  		} else {
   722  			o.call(call)
   723  			o.as2func(n)
   724  		}
   725  		o.popTemp(t)
   726  
   727  	// Special: use temporary variables to hold result,
   728  	// so that runtime can take address of temporary.
   729  	// No temporary for blank assignment.
   730  	//
   731  	// OAS2MAPR: make sure key is addressable if needed,
   732  	//           and make sure OINDEXMAP is not copied out.
   733  	case ir.OAS2DOTTYPE, ir.OAS2RECV, ir.OAS2MAPR:
   734  		n := n.(*ir.AssignListStmt)
   735  		t := o.markTemp()
   736  		o.exprList(n.Lhs)
   737  
   738  		switch r := n.Rhs[0]; r.Op() {
   739  		case ir.ODOTTYPE2:
   740  			r := r.(*ir.TypeAssertExpr)
   741  			r.X = o.expr(r.X, nil)
   742  		case ir.ODYNAMICDOTTYPE2:
   743  			r := r.(*ir.DynamicTypeAssertExpr)
   744  			r.X = o.expr(r.X, nil)
   745  			r.RType = o.expr(r.RType, nil)
   746  			r.ITab = o.expr(r.ITab, nil)
   747  		case ir.ORECV:
   748  			r := r.(*ir.UnaryExpr)
   749  			r.X = o.expr(r.X, nil)
   750  		case ir.OINDEXMAP:
   751  			r := r.(*ir.IndexExpr)
   752  			r.X = o.expr(r.X, nil)
   753  			r.Index = o.expr(r.Index, nil)
   754  			// See similar conversion for OINDEXMAP below.
   755  			_ = mapKeyReplaceStrConv(r.Index)
   756  			r.Index = o.mapKeyTemp(r.Pos(), r.X.Type(), r.Index)
   757  		default:
   758  			base.Fatalf("order.stmt: %v", r.Op())
   759  		}
   760  
   761  		o.as2ok(n)
   762  		o.popTemp(t)
   763  
   764  	// Special: does not save n onto out.
   765  	case ir.OBLOCK:
   766  		n := n.(*ir.BlockStmt)
   767  		o.stmtList(n.List)
   768  
   769  	// Special: n->left is not an expression; save as is.
   770  	case ir.OBREAK,
   771  		ir.OCONTINUE,
   772  		ir.ODCL,
   773  		ir.OFALL,
   774  		ir.OGOTO,
   775  		ir.OLABEL,
   776  		ir.OTAILCALL:
   777  		o.out = append(o.out, n)
   778  
   779  	// Special: handle call arguments.
   780  	case ir.OCALLFUNC, ir.OCALLINTER:
   781  		n := n.(*ir.CallExpr)
   782  		t := o.markTemp()
   783  		o.call(n)
   784  		o.out = append(o.out, n)
   785  		o.popTemp(t)
   786  
   787  	case ir.OINLCALL:
   788  		n := n.(*ir.InlinedCallExpr)
   789  		o.stmtList(n.Body)
   790  
   791  		// discard results; double-check for no side effects
   792  		for _, result := range n.ReturnVars {
   793  			if staticinit.AnySideEffects(result) {
   794  				base.FatalfAt(result.Pos(), "inlined call result has side effects: %v", result)
   795  			}
   796  		}
   797  
   798  	case ir.OCHECKNIL, ir.OCLEAR, ir.OCLOSE, ir.OPANIC, ir.ORECV:
   799  		n := n.(*ir.UnaryExpr)
   800  		t := o.markTemp()
   801  		n.X = o.expr(n.X, nil)
   802  		o.out = append(o.out, n)
   803  		o.popTemp(t)
   804  
   805  	case ir.OCOPY:
   806  		n := n.(*ir.BinaryExpr)
   807  		t := o.markTemp()
   808  		n.X = o.expr(n.X, nil)
   809  		n.Y = o.expr(n.Y, nil)
   810  		o.out = append(o.out, n)
   811  		o.popTemp(t)
   812  
   813  	case ir.OPRINT, ir.OPRINTLN, ir.ORECOVERFP:
   814  		n := n.(*ir.CallExpr)
   815  		t := o.markTemp()
   816  		o.call(n)
   817  		o.out = append(o.out, n)
   818  		o.popTemp(t)
   819  
   820  	// Special: order arguments to inner call but not call itself.
   821  	case ir.ODEFER, ir.OGO:
   822  		n := n.(*ir.GoDeferStmt)
   823  		t := o.markTemp()
   824  		o.init(n.Call)
   825  		o.call(n.Call)
   826  		o.out = append(o.out, n)
   827  		o.popTemp(t)
   828  
   829  	case ir.ODELETE:
   830  		n := n.(*ir.CallExpr)
   831  		t := o.markTemp()
   832  		n.Args[0] = o.expr(n.Args[0], nil)
   833  		n.Args[1] = o.expr(n.Args[1], nil)
   834  		n.Args[1] = o.mapKeyTemp(n.Pos(), n.Args[0].Type(), n.Args[1])
   835  		o.out = append(o.out, n)
   836  		o.popTemp(t)
   837  
   838  	// Clean temporaries from condition evaluation at
   839  	// beginning of loop body and after for statement.
   840  	case ir.OFOR:
   841  		n := n.(*ir.ForStmt)
   842  		t := o.markTemp()
   843  		n.Cond = o.exprInPlace(n.Cond)
   844  		orderBlock(&n.Body, o.free)
   845  		n.Post = orderStmtInPlace(n.Post, o.free)
   846  		o.out = append(o.out, n)
   847  		o.popTemp(t)
   848  
   849  	// Clean temporaries from condition at
   850  	// beginning of both branches.
   851  	case ir.OIF:
   852  		n := n.(*ir.IfStmt)
   853  		t := o.markTemp()
   854  		n.Cond = o.exprInPlace(n.Cond)
   855  		o.popTemp(t)
   856  		orderBlock(&n.Body, o.free)
   857  		orderBlock(&n.Else, o.free)
   858  		o.out = append(o.out, n)
   859  
   860  	case ir.ORANGE:
   861  		// n.Right is the expression being ranged over.
   862  		// order it, and then make a copy if we need one.
   863  		// We almost always do, to ensure that we don't
   864  		// see any value changes made during the loop.
   865  		// Usually the copy is cheap (e.g., array pointer,
   866  		// chan, slice, string are all tiny).
   867  		// The exception is ranging over an array value
   868  		// (not a slice, not a pointer to array),
   869  		// which must make a copy to avoid seeing updates made during
   870  		// the range body. Ranging over an array value is uncommon though.
   871  
   872  		// Mark []byte(str) range expression to reuse string backing storage.
   873  		// It is safe because the storage cannot be mutated.
   874  		n := n.(*ir.RangeStmt)
   875  		if x, ok := n.X.(*ir.ConvExpr); ok {
   876  			switch x.Op() {
   877  			case ir.OSTR2BYTES:
   878  				x.SetOp(ir.OSTR2BYTESTMP)
   879  				fallthrough
   880  			case ir.OSTR2BYTESTMP:
   881  				x.MarkNonNil() // "range []byte(nil)" is fine
   882  			}
   883  		}
   884  
   885  		t := o.markTemp()
   886  		n.X = o.expr(n.X, nil)
   887  
   888  		orderBody := true
   889  		xt := typecheck.RangeExprType(n.X.Type())
   890  		switch k := xt.Kind(); {
   891  		default:
   892  			base.Fatalf("order.stmt range %v", n.Type())
   893  
   894  		case types.IsInt[k]:
   895  			// Used only once, no need to copy.
   896  
   897  		case k == types.TARRAY, k == types.TSLICE:
   898  			if n.Value == nil || ir.IsBlank(n.Value) {
   899  				// for i := range x will only use x once, to compute len(x).
   900  				// No need to copy it.
   901  				break
   902  			}
   903  			fallthrough
   904  
   905  		case k == types.TCHAN, k == types.TSTRING:
   906  			// chan, string, slice, array ranges use value multiple times.
   907  			// make copy.
   908  			r := n.X
   909  
   910  			if r.Type().IsString() && r.Type() != types.Types[types.TSTRING] {
   911  				r = ir.NewConvExpr(base.Pos, ir.OCONV, nil, r)
   912  				r.SetType(types.Types[types.TSTRING])
   913  				r = typecheck.Expr(r)
   914  			}
   915  
   916  			n.X = o.copyExpr(r)
   917  
   918  		case k == types.TMAP:
   919  			if isMapClear(n) {
   920  				// Preserve the body of the map clear pattern so it can
   921  				// be detected during walk. The loop body will not be used
   922  				// when optimizing away the range loop to a runtime call.
   923  				orderBody = false
   924  				break
   925  			}
   926  
   927  			// copy the map value in case it is a map literal.
   928  			// TODO(rsc): Make tmp = literal expressions reuse tmp.
   929  			// For maps tmp is just one word so it hardly matters.
   930  			r := n.X
   931  			n.X = o.copyExpr(r)
   932  
   933  			// n.Prealloc is the temp for the iterator.
   934  			// MapIterType contains pointers and needs to be zeroed.
   935  			if buildcfg.Experiment.SwissMap {
   936  				n.Prealloc = o.newTemp(reflectdata.SwissMapIterType(), true)
   937  			} else {
   938  				n.Prealloc = o.newTemp(reflectdata.OldMapIterType(), true)
   939  			}
   940  		}
   941  		n.Key = o.exprInPlace(n.Key)
   942  		n.Value = o.exprInPlace(n.Value)
   943  		if orderBody {
   944  			orderBlock(&n.Body, o.free)
   945  		}
   946  		o.out = append(o.out, n)
   947  		o.popTemp(t)
   948  
   949  	case ir.ORETURN:
   950  		n := n.(*ir.ReturnStmt)
   951  		o.exprList(n.Results)
   952  		o.out = append(o.out, n)
   953  
   954  	// Special: clean case temporaries in each block entry.
   955  	// Select must enter one of its blocks, so there is no
   956  	// need for a cleaning at the end.
   957  	// Doubly special: evaluation order for select is stricter
   958  	// than ordinary expressions. Even something like p.c
   959  	// has to be hoisted into a temporary, so that it cannot be
   960  	// reordered after the channel evaluation for a different
   961  	// case (if p were nil, then the timing of the fault would
   962  	// give this away).
   963  	case ir.OSELECT:
   964  		n := n.(*ir.SelectStmt)
   965  		t := o.markTemp()
   966  		for _, ncas := range n.Cases {
   967  			r := ncas.Comm
   968  			ir.SetPos(ncas)
   969  
   970  			// Append any new body prologue to ninit.
   971  			// The next loop will insert ninit into nbody.
   972  			if len(ncas.Init()) != 0 {
   973  				base.Fatalf("order select ninit")
   974  			}
   975  			if r == nil {
   976  				continue
   977  			}
   978  			switch r.Op() {
   979  			default:
   980  				ir.Dump("select case", r)
   981  				base.Fatalf("unknown op in select %v", r.Op())
   982  
   983  			case ir.OSELRECV2:
   984  				// case x, ok = <-c
   985  				r := r.(*ir.AssignListStmt)
   986  				recv := r.Rhs[0].(*ir.UnaryExpr)
   987  				recv.X = o.expr(recv.X, nil)
   988  				if !ir.IsAutoTmp(recv.X) {
   989  					recv.X = o.copyExpr(recv.X)
   990  				}
   991  				init := ir.TakeInit(r)
   992  
   993  				colas := r.Def
   994  				do := func(i int, t *types.Type) {
   995  					n := r.Lhs[i]
   996  					if ir.IsBlank(n) {
   997  						return
   998  					}
   999  					// If this is case x := <-ch or case x, y := <-ch, the case has
  1000  					// the ODCL nodes to declare x and y. We want to delay that
  1001  					// declaration (and possible allocation) until inside the case body.
  1002  					// Delete the ODCL nodes here and recreate them inside the body below.
  1003  					if colas {
  1004  						if len(init) > 0 && init[0].Op() == ir.ODCL && init[0].(*ir.Decl).X == n {
  1005  							init = init[1:]
  1006  
  1007  							// iimport may have added a default initialization assignment,
  1008  							// due to how it handles ODCL statements.
  1009  							if len(init) > 0 && init[0].Op() == ir.OAS && init[0].(*ir.AssignStmt).X == n {
  1010  								init = init[1:]
  1011  							}
  1012  						}
  1013  						dcl := typecheck.Stmt(ir.NewDecl(base.Pos, ir.ODCL, n.(*ir.Name)))
  1014  						ncas.PtrInit().Append(dcl)
  1015  					}
  1016  					tmp := o.newTemp(t, t.HasPointers())
  1017  					as := typecheck.Stmt(ir.NewAssignStmt(base.Pos, n, typecheck.Conv(tmp, n.Type())))
  1018  					ncas.PtrInit().Append(as)
  1019  					r.Lhs[i] = tmp
  1020  				}
  1021  				do(0, recv.X.Type().Elem())
  1022  				do(1, types.Types[types.TBOOL])
  1023  				if len(init) != 0 {
  1024  					ir.DumpList("ninit", init)
  1025  					base.Fatalf("ninit on select recv")
  1026  				}
  1027  				orderBlock(ncas.PtrInit(), o.free)
  1028  
  1029  			case ir.OSEND:
  1030  				r := r.(*ir.SendStmt)
  1031  				if len(r.Init()) != 0 {
  1032  					ir.DumpList("ninit", r.Init())
  1033  					base.Fatalf("ninit on select send")
  1034  				}
  1035  
  1036  				// case c <- x
  1037  				// r->left is c, r->right is x, both are always evaluated.
  1038  				r.Chan = o.expr(r.Chan, nil)
  1039  
  1040  				if !ir.IsAutoTmp(r.Chan) {
  1041  					r.Chan = o.copyExpr(r.Chan)
  1042  				}
  1043  				r.Value = o.expr(r.Value, nil)
  1044  				if !ir.IsAutoTmp(r.Value) {
  1045  					r.Value = o.copyExpr(r.Value)
  1046  				}
  1047  			}
  1048  		}
  1049  		// Now that we have accumulated all the temporaries, clean them.
  1050  		// Also insert any ninit queued during the previous loop.
  1051  		// (The temporary cleaning must follow that ninit work.)
  1052  		for _, cas := range n.Cases {
  1053  			orderBlock(&cas.Body, o.free)
  1054  
  1055  			// TODO(mdempsky): Is this actually necessary?
  1056  			// walkSelect appears to walk Ninit.
  1057  			cas.Body.Prepend(ir.TakeInit(cas)...)
  1058  		}
  1059  
  1060  		o.out = append(o.out, n)
  1061  		o.popTemp(t)
  1062  
  1063  	// Special: value being sent is passed as a pointer; make it addressable.
  1064  	case ir.OSEND:
  1065  		n := n.(*ir.SendStmt)
  1066  		t := o.markTemp()
  1067  		n.Chan = o.expr(n.Chan, nil)
  1068  		n.Value = o.expr(n.Value, nil)
  1069  		if base.Flag.Cfg.Instrumenting {
  1070  			// Force copying to the stack so that (chan T)(nil) <- x
  1071  			// is still instrumented as a read of x.
  1072  			n.Value = o.copyExpr(n.Value)
  1073  		} else {
  1074  			n.Value = o.addrTemp(n.Value)
  1075  		}
  1076  		o.out = append(o.out, n)
  1077  		o.popTemp(t)
  1078  
  1079  	// TODO(rsc): Clean temporaries more aggressively.
  1080  	// Note that because walkSwitch will rewrite some of the
  1081  	// switch into a binary search, this is not as easy as it looks.
  1082  	// (If we ran that code here we could invoke order.stmt on
  1083  	// the if-else chain instead.)
  1084  	// For now just clean all the temporaries at the end.
  1085  	// In practice that's fine.
  1086  	case ir.OSWITCH:
  1087  		n := n.(*ir.SwitchStmt)
  1088  		if base.Debug.Libfuzzer != 0 && !hasDefaultCase(n) {
  1089  			// Add empty "default:" case for instrumentation.
  1090  			n.Cases = append(n.Cases, ir.NewCaseStmt(base.Pos, nil, nil))
  1091  		}
  1092  
  1093  		t := o.markTemp()
  1094  		n.Tag = o.expr(n.Tag, nil)
  1095  		for _, ncas := range n.Cases {
  1096  			o.exprListInPlace(ncas.List)
  1097  			orderBlock(&ncas.Body, o.free)
  1098  		}
  1099  
  1100  		o.out = append(o.out, n)
  1101  		o.popTemp(t)
  1102  	}
  1103  
  1104  	base.Pos = lno
  1105  }
  1106  
  1107  func hasDefaultCase(n *ir.SwitchStmt) bool {
  1108  	for _, ncas := range n.Cases {
  1109  		if len(ncas.List) == 0 {
  1110  			return true
  1111  		}
  1112  	}
  1113  	return false
  1114  }
  1115  
  1116  // exprList orders the expression list l into o.
  1117  func (o *orderState) exprList(l ir.Nodes) {
  1118  	s := l
  1119  	for i := range s {
  1120  		s[i] = o.expr(s[i], nil)
  1121  	}
  1122  }
  1123  
  1124  // exprListInPlace orders the expression list l but saves
  1125  // the side effects on the individual expression ninit lists.
  1126  func (o *orderState) exprListInPlace(l ir.Nodes) {
  1127  	s := l
  1128  	for i := range s {
  1129  		s[i] = o.exprInPlace(s[i])
  1130  	}
  1131  }
  1132  
  1133  func (o *orderState) exprNoLHS(n ir.Node) ir.Node {
  1134  	return o.expr(n, nil)
  1135  }
  1136  
  1137  // expr orders a single expression, appending side
  1138  // effects to o.out as needed.
  1139  // If this is part of an assignment lhs = *np, lhs is given.
  1140  // Otherwise lhs == nil. (When lhs != nil it may be possible
  1141  // to avoid copying the result of the expression to a temporary.)
  1142  // The result of expr MUST be assigned back to n, e.g.
  1143  //
  1144  //	n.Left = o.expr(n.Left, lhs)
  1145  func (o *orderState) expr(n, lhs ir.Node) ir.Node {
  1146  	if n == nil {
  1147  		return n
  1148  	}
  1149  	lno := ir.SetPos(n)
  1150  	n = o.expr1(n, lhs)
  1151  	base.Pos = lno
  1152  	return n
  1153  }
  1154  
  1155  func (o *orderState) expr1(n, lhs ir.Node) ir.Node {
  1156  	o.init(n)
  1157  
  1158  	switch n.Op() {
  1159  	default:
  1160  		if o.edit == nil {
  1161  			o.edit = o.exprNoLHS // create closure once
  1162  		}
  1163  		ir.EditChildren(n, o.edit)
  1164  		return n
  1165  
  1166  	// Addition of strings turns into a function call.
  1167  	// Allocate a temporary to hold the strings.
  1168  	// Fewer than 5 strings use direct runtime helpers.
  1169  	case ir.OADDSTR:
  1170  		n := n.(*ir.AddStringExpr)
  1171  		o.exprList(n.List)
  1172  
  1173  		if len(n.List) > 5 {
  1174  			t := types.NewArray(types.Types[types.TSTRING], int64(len(n.List)))
  1175  			n.Prealloc = o.newTemp(t, false)
  1176  		}
  1177  
  1178  		// Mark string(byteSlice) arguments to reuse byteSlice backing
  1179  		// buffer during conversion. String concatenation does not
  1180  		// memorize the strings for later use, so it is safe.
  1181  		// However, we can do it only if there is at least one non-empty string literal.
  1182  		// Otherwise if all other arguments are empty strings,
  1183  		// concatstrings will return the reference to the temp string
  1184  		// to the caller.
  1185  		hasbyte := false
  1186  
  1187  		haslit := false
  1188  		for _, n1 := range n.List {
  1189  			hasbyte = hasbyte || n1.Op() == ir.OBYTES2STR
  1190  			haslit = haslit || n1.Op() == ir.OLITERAL && len(ir.StringVal(n1)) != 0
  1191  		}
  1192  
  1193  		if haslit && hasbyte {
  1194  			for _, n2 := range n.List {
  1195  				if n2.Op() == ir.OBYTES2STR {
  1196  					n2 := n2.(*ir.ConvExpr)
  1197  					n2.SetOp(ir.OBYTES2STRTMP)
  1198  				}
  1199  			}
  1200  		}
  1201  		return n
  1202  
  1203  	case ir.OINDEXMAP:
  1204  		n := n.(*ir.IndexExpr)
  1205  		n.X = o.expr(n.X, nil)
  1206  		n.Index = o.expr(n.Index, nil)
  1207  		needCopy := false
  1208  
  1209  		if !n.Assigned {
  1210  			// Enforce that any []byte slices we are not copying
  1211  			// can not be changed before the map index by forcing
  1212  			// the map index to happen immediately following the
  1213  			// conversions. See copyExpr a few lines below.
  1214  			needCopy = mapKeyReplaceStrConv(n.Index)
  1215  
  1216  			if base.Flag.Cfg.Instrumenting {
  1217  				// Race detector needs the copy.
  1218  				needCopy = true
  1219  			}
  1220  		}
  1221  
  1222  		// key may need to be be addressable
  1223  		n.Index = o.mapKeyTemp(n.Pos(), n.X.Type(), n.Index)
  1224  		if needCopy {
  1225  			return o.copyExpr(n)
  1226  		}
  1227  		return n
  1228  
  1229  	// concrete type (not interface) argument might need an addressable
  1230  	// temporary to pass to the runtime conversion routine.
  1231  	case ir.OCONVIFACE:
  1232  		n := n.(*ir.ConvExpr)
  1233  		n.X = o.expr(n.X, nil)
  1234  		if n.X.Type().IsInterface() {
  1235  			return n
  1236  		}
  1237  		if _, _, needsaddr := dataWordFuncName(n.X.Type()); needsaddr || isStaticCompositeLiteral(n.X) {
  1238  			// Need a temp if we need to pass the address to the conversion function.
  1239  			// We also process static composite literal node here, making a named static global
  1240  			// whose address we can put directly in an interface (see OCONVIFACE case in walk).
  1241  			n.X = o.addrTemp(n.X)
  1242  		}
  1243  		return n
  1244  
  1245  	case ir.OCONVNOP:
  1246  		n := n.(*ir.ConvExpr)
  1247  		if n.X.Op() == ir.OCALLMETH {
  1248  			base.FatalfAt(n.X.Pos(), "OCALLMETH missed by typecheck")
  1249  		}
  1250  		if n.Type().IsKind(types.TUNSAFEPTR) && n.X.Type().IsKind(types.TUINTPTR) && (n.X.Op() == ir.OCALLFUNC || n.X.Op() == ir.OCALLINTER) {
  1251  			call := n.X.(*ir.CallExpr)
  1252  			// When reordering unsafe.Pointer(f()) into a separate
  1253  			// statement, the conversion and function call must stay
  1254  			// together. See golang.org/issue/15329.
  1255  			o.init(call)
  1256  			o.call(call)
  1257  			if lhs == nil || lhs.Op() != ir.ONAME || base.Flag.Cfg.Instrumenting {
  1258  				return o.copyExpr(n)
  1259  			}
  1260  		} else {
  1261  			n.X = o.expr(n.X, nil)
  1262  		}
  1263  		return n
  1264  
  1265  	case ir.OANDAND, ir.OOROR:
  1266  		// ... = LHS && RHS
  1267  		//
  1268  		// var r bool
  1269  		// r = LHS
  1270  		// if r {       // or !r, for OROR
  1271  		//     r = RHS
  1272  		// }
  1273  		// ... = r
  1274  
  1275  		n := n.(*ir.LogicalExpr)
  1276  		r := o.newTemp(n.Type(), false)
  1277  
  1278  		// Evaluate left-hand side.
  1279  		lhs := o.expr(n.X, nil)
  1280  		o.out = append(o.out, typecheck.Stmt(ir.NewAssignStmt(base.Pos, r, lhs)))
  1281  
  1282  		// Evaluate right-hand side, save generated code.
  1283  		saveout := o.out
  1284  		o.out = nil
  1285  		t := o.markTemp()
  1286  		o.edge()
  1287  		rhs := o.expr(n.Y, nil)
  1288  		o.out = append(o.out, typecheck.Stmt(ir.NewAssignStmt(base.Pos, r, rhs)))
  1289  		o.popTemp(t)
  1290  		gen := o.out
  1291  		o.out = saveout
  1292  
  1293  		// If left-hand side doesn't cause a short-circuit, issue right-hand side.
  1294  		nif := ir.NewIfStmt(base.Pos, r, nil, nil)
  1295  		if n.Op() == ir.OANDAND {
  1296  			nif.Body = gen
  1297  		} else {
  1298  			nif.Else = gen
  1299  		}
  1300  		o.out = append(o.out, nif)
  1301  		return r
  1302  
  1303  	case ir.OCALLMETH:
  1304  		base.FatalfAt(n.Pos(), "OCALLMETH missed by typecheck")
  1305  		panic("unreachable")
  1306  
  1307  	case ir.OCALLFUNC,
  1308  		ir.OCALLINTER,
  1309  		ir.OCAP,
  1310  		ir.OCOMPLEX,
  1311  		ir.OCOPY,
  1312  		ir.OIMAG,
  1313  		ir.OLEN,
  1314  		ir.OMAKECHAN,
  1315  		ir.OMAKEMAP,
  1316  		ir.OMAKESLICE,
  1317  		ir.OMAKESLICECOPY,
  1318  		ir.OMAX,
  1319  		ir.OMIN,
  1320  		ir.ONEW,
  1321  		ir.OREAL,
  1322  		ir.ORECOVERFP,
  1323  		ir.OSTR2BYTES,
  1324  		ir.OSTR2BYTESTMP,
  1325  		ir.OSTR2RUNES:
  1326  
  1327  		if isRuneCount(n) {
  1328  			// len([]rune(s)) is rewritten to runtime.countrunes(s) later.
  1329  			conv := n.(*ir.UnaryExpr).X.(*ir.ConvExpr)
  1330  			conv.X = o.expr(conv.X, nil)
  1331  		} else {
  1332  			o.call(n)
  1333  		}
  1334  
  1335  		if lhs == nil || lhs.Op() != ir.ONAME || base.Flag.Cfg.Instrumenting {
  1336  			return o.copyExpr(n)
  1337  		}
  1338  		return n
  1339  
  1340  	case ir.OINLCALL:
  1341  		n := n.(*ir.InlinedCallExpr)
  1342  		o.stmtList(n.Body)
  1343  		return n.SingleResult()
  1344  
  1345  	case ir.OAPPEND:
  1346  		// Check for append(x, make([]T, y)...) .
  1347  		n := n.(*ir.CallExpr)
  1348  		if isAppendOfMake(n) {
  1349  			n.Args[0] = o.expr(n.Args[0], nil) // order x
  1350  			mk := n.Args[1].(*ir.MakeExpr)
  1351  			mk.Len = o.expr(mk.Len, nil) // order y
  1352  		} else {
  1353  			o.exprList(n.Args)
  1354  		}
  1355  
  1356  		if lhs == nil || lhs.Op() != ir.ONAME && !ir.SameSafeExpr(lhs, n.Args[0]) {
  1357  			return o.copyExpr(n)
  1358  		}
  1359  		return n
  1360  
  1361  	case ir.OSLICE, ir.OSLICEARR, ir.OSLICESTR, ir.OSLICE3, ir.OSLICE3ARR:
  1362  		n := n.(*ir.SliceExpr)
  1363  		n.X = o.expr(n.X, nil)
  1364  		n.Low = o.cheapExpr(o.expr(n.Low, nil))
  1365  		n.High = o.cheapExpr(o.expr(n.High, nil))
  1366  		n.Max = o.cheapExpr(o.expr(n.Max, nil))
  1367  		if lhs == nil || lhs.Op() != ir.ONAME && !ir.SameSafeExpr(lhs, n.X) {
  1368  			return o.copyExpr(n)
  1369  		}
  1370  		return n
  1371  
  1372  	case ir.OCLOSURE:
  1373  		n := n.(*ir.ClosureExpr)
  1374  		if n.Transient() && len(n.Func.ClosureVars) > 0 {
  1375  			n.Prealloc = o.newTemp(typecheck.ClosureType(n), false)
  1376  		}
  1377  		return n
  1378  
  1379  	case ir.OMETHVALUE:
  1380  		n := n.(*ir.SelectorExpr)
  1381  		n.X = o.expr(n.X, nil)
  1382  		if n.Transient() {
  1383  			t := typecheck.MethodValueType(n)
  1384  			n.Prealloc = o.newTemp(t, false)
  1385  		}
  1386  		return n
  1387  
  1388  	case ir.OSLICELIT:
  1389  		n := n.(*ir.CompLitExpr)
  1390  		o.exprList(n.List)
  1391  		if n.Transient() {
  1392  			t := types.NewArray(n.Type().Elem(), n.Len)
  1393  			n.Prealloc = o.newTemp(t, false)
  1394  		}
  1395  		return n
  1396  
  1397  	case ir.ODOTTYPE, ir.ODOTTYPE2:
  1398  		n := n.(*ir.TypeAssertExpr)
  1399  		n.X = o.expr(n.X, nil)
  1400  		if !types.IsDirectIface(n.Type()) || base.Flag.Cfg.Instrumenting {
  1401  			return o.copyExprClear(n)
  1402  		}
  1403  		return n
  1404  
  1405  	case ir.ORECV:
  1406  		n := n.(*ir.UnaryExpr)
  1407  		n.X = o.expr(n.X, nil)
  1408  		return o.copyExprClear(n)
  1409  
  1410  	case ir.OEQ, ir.ONE, ir.OLT, ir.OLE, ir.OGT, ir.OGE:
  1411  		n := n.(*ir.BinaryExpr)
  1412  		n.X = o.expr(n.X, nil)
  1413  		n.Y = o.expr(n.Y, nil)
  1414  
  1415  		t := n.X.Type()
  1416  		switch {
  1417  		case t.IsString():
  1418  			// Mark string(byteSlice) arguments to reuse byteSlice backing
  1419  			// buffer during conversion. String comparison does not
  1420  			// memorize the strings for later use, so it is safe.
  1421  			if n.X.Op() == ir.OBYTES2STR {
  1422  				n.X.(*ir.ConvExpr).SetOp(ir.OBYTES2STRTMP)
  1423  			}
  1424  			if n.Y.Op() == ir.OBYTES2STR {
  1425  				n.Y.(*ir.ConvExpr).SetOp(ir.OBYTES2STRTMP)
  1426  			}
  1427  
  1428  		case t.IsStruct() || t.IsArray():
  1429  			// for complex comparisons, we need both args to be
  1430  			// addressable so we can pass them to the runtime.
  1431  			n.X = o.addrTemp(n.X)
  1432  			n.Y = o.addrTemp(n.Y)
  1433  		}
  1434  		return n
  1435  
  1436  	case ir.OMAPLIT:
  1437  		// Order map by converting:
  1438  		//   map[int]int{
  1439  		//     a(): b(),
  1440  		//     c(): d(),
  1441  		//     e(): f(),
  1442  		//   }
  1443  		// to
  1444  		//   m := map[int]int{}
  1445  		//   m[a()] = b()
  1446  		//   m[c()] = d()
  1447  		//   m[e()] = f()
  1448  		// Then order the result.
  1449  		// Without this special case, order would otherwise compute all
  1450  		// the keys and values before storing any of them to the map.
  1451  		// See issue 26552.
  1452  		n := n.(*ir.CompLitExpr)
  1453  		entries := n.List
  1454  		statics := entries[:0]
  1455  		var dynamics []*ir.KeyExpr
  1456  		for _, r := range entries {
  1457  			r := r.(*ir.KeyExpr)
  1458  
  1459  			if !isStaticCompositeLiteral(r.Key) || !isStaticCompositeLiteral(r.Value) {
  1460  				dynamics = append(dynamics, r)
  1461  				continue
  1462  			}
  1463  
  1464  			// Recursively ordering some static entries can change them to dynamic;
  1465  			// e.g., OCONVIFACE nodes. See #31777.
  1466  			r = o.expr(r, nil).(*ir.KeyExpr)
  1467  			if !isStaticCompositeLiteral(r.Key) || !isStaticCompositeLiteral(r.Value) {
  1468  				dynamics = append(dynamics, r)
  1469  				continue
  1470  			}
  1471  
  1472  			statics = append(statics, r)
  1473  		}
  1474  		n.List = statics
  1475  
  1476  		if len(dynamics) == 0 {
  1477  			return n
  1478  		}
  1479  
  1480  		// Emit the creation of the map (with all its static entries).
  1481  		m := o.newTemp(n.Type(), false)
  1482  		as := ir.NewAssignStmt(base.Pos, m, n)
  1483  		typecheck.Stmt(as)
  1484  		o.stmt(as)
  1485  
  1486  		// Emit eval+insert of dynamic entries, one at a time.
  1487  		for _, r := range dynamics {
  1488  			lhs := typecheck.AssignExpr(ir.NewIndexExpr(base.Pos, m, r.Key)).(*ir.IndexExpr)
  1489  			base.AssertfAt(lhs.Op() == ir.OINDEXMAP, lhs.Pos(), "want OINDEXMAP, have %+v", lhs)
  1490  			lhs.RType = n.RType
  1491  
  1492  			as := ir.NewAssignStmt(base.Pos, lhs, r.Value)
  1493  			typecheck.Stmt(as)
  1494  			o.stmt(as)
  1495  		}
  1496  
  1497  		// Remember that we issued these assignments so we can include that count
  1498  		// in the map alloc hint.
  1499  		// We're assuming here that all the keys in the map literal are distinct.
  1500  		// If any are equal, this will be an overcount. Probably not worth accounting
  1501  		// for that, as equal keys in map literals are rare, and at worst we waste
  1502  		// a bit of space.
  1503  		n.Len += int64(len(dynamics))
  1504  
  1505  		return m
  1506  	}
  1507  
  1508  	// No return - type-assertions above. Each case must return for itself.
  1509  }
  1510  
  1511  // as2func orders OAS2FUNC nodes. It creates temporaries to ensure left-to-right assignment.
  1512  // The caller should order the right-hand side of the assignment before calling order.as2func.
  1513  // It rewrites,
  1514  //
  1515  //	a, b, a = ...
  1516  //
  1517  // as
  1518  //
  1519  //	tmp1, tmp2, tmp3 = ...
  1520  //	a, b, a = tmp1, tmp2, tmp3
  1521  //
  1522  // This is necessary to ensure left to right assignment order.
  1523  func (o *orderState) as2func(n *ir.AssignListStmt) {
  1524  	results := n.Rhs[0].Type()
  1525  	as := ir.NewAssignListStmt(n.Pos(), ir.OAS2, nil, nil)
  1526  	for i, nl := range n.Lhs {
  1527  		if !ir.IsBlank(nl) {
  1528  			typ := results.Field(i).Type
  1529  			tmp := o.newTemp(typ, typ.HasPointers())
  1530  			n.Lhs[i] = tmp
  1531  			as.Lhs = append(as.Lhs, nl)
  1532  			as.Rhs = append(as.Rhs, tmp)
  1533  		}
  1534  	}
  1535  
  1536  	o.out = append(o.out, n)
  1537  	o.stmt(typecheck.Stmt(as))
  1538  }
  1539  
  1540  // as2ok orders OAS2XXX with ok.
  1541  // Just like as2func, this also adds temporaries to ensure left-to-right assignment.
  1542  func (o *orderState) as2ok(n *ir.AssignListStmt) {
  1543  	as := ir.NewAssignListStmt(n.Pos(), ir.OAS2, nil, nil)
  1544  
  1545  	do := func(i int, typ *types.Type) {
  1546  		if nl := n.Lhs[i]; !ir.IsBlank(nl) {
  1547  			var tmp ir.Node = o.newTemp(typ, typ.HasPointers())
  1548  			n.Lhs[i] = tmp
  1549  			as.Lhs = append(as.Lhs, nl)
  1550  			if i == 1 {
  1551  				// The "ok" result is an untyped boolean according to the Go
  1552  				// spec. We need to explicitly convert it to the LHS type in
  1553  				// case the latter is a defined boolean type (#8475).
  1554  				tmp = typecheck.Conv(tmp, nl.Type())
  1555  			}
  1556  			as.Rhs = append(as.Rhs, tmp)
  1557  		}
  1558  	}
  1559  
  1560  	do(0, n.Rhs[0].Type())
  1561  	do(1, types.Types[types.TBOOL])
  1562  
  1563  	o.out = append(o.out, n)
  1564  	o.stmt(typecheck.Stmt(as))
  1565  }
  1566  

View as plain text