Source file src/cmd/compile/internal/walk/order.go

     1  // Copyright 2012 The Go Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  package walk
     6  
     7  import (
     8  	"fmt"
     9  	"go/constant"
    10  	"internal/abi"
    11  	"internal/buildcfg"
    12  
    13  	"cmd/compile/internal/base"
    14  	"cmd/compile/internal/ir"
    15  	"cmd/compile/internal/reflectdata"
    16  	"cmd/compile/internal/ssa"
    17  	"cmd/compile/internal/staticinit"
    18  	"cmd/compile/internal/typecheck"
    19  	"cmd/compile/internal/types"
    20  	"cmd/internal/objabi"
    21  	"cmd/internal/src"
    22  )
    23  
    24  // Rewrite tree to use separate statements to enforce
    25  // order of evaluation. Makes walk easier, because it
    26  // can (after this runs) reorder at will within an expression.
    27  //
    28  // Rewrite m[k] op= r into m[k] = m[k] op r if op is / or %.
    29  //
    30  // Introduce temporaries as needed by runtime routines.
    31  // For example, the map runtime routines take the map key
    32  // by reference, so make sure all map keys are addressable
    33  // by copying them to temporaries as needed.
    34  // The same is true for channel operations.
    35  //
    36  // Arrange that map index expressions only appear in direct
    37  // assignments x = m[k] or m[k] = x, never in larger expressions.
    38  //
    39  // Arrange that receive expressions only appear in direct assignments
    40  // x = <-c or as standalone statements <-c, never in larger expressions.
    41  
    42  // orderState holds state during the ordering process.
    43  type orderState struct {
    44  	out  []ir.Node             // list of generated statements
    45  	temp []*ir.Name            // stack of temporary variables
    46  	free map[string][]*ir.Name // free list of unused temporaries, by type.LinkString().
    47  	edit func(ir.Node) ir.Node // cached closure of o.exprNoLHS
    48  }
    49  
    50  // order rewrites fn.Nbody to apply the ordering constraints
    51  // described in the comment at the top of the file.
    52  func order(fn *ir.Func) {
    53  	if base.Flag.W > 1 {
    54  		s := fmt.Sprintf("\nbefore order %v", fn.Sym())
    55  		ir.DumpList(s, fn.Body)
    56  	}
    57  	ir.SetPos(fn) // Set reasonable position for instrumenting code. See issue 53688.
    58  	orderBlock(&fn.Body, map[string][]*ir.Name{})
    59  }
    60  
    61  // append typechecks stmt and appends it to out.
    62  func (o *orderState) append(stmt ir.Node) {
    63  	o.out = append(o.out, typecheck.Stmt(stmt))
    64  }
    65  
    66  // newTemp allocates a new temporary with the given type,
    67  // pushes it onto the temp stack, and returns it.
    68  // If clear is true, newTemp emits code to zero the temporary.
    69  func (o *orderState) newTemp(t *types.Type, clear bool) *ir.Name {
    70  	var v *ir.Name
    71  	key := t.LinkString()
    72  	if a := o.free[key]; len(a) > 0 {
    73  		v = a[len(a)-1]
    74  		if !types.Identical(t, v.Type()) {
    75  			base.Fatalf("expected %L to have type %v", v, t)
    76  		}
    77  		o.free[key] = a[:len(a)-1]
    78  	} else {
    79  		v = typecheck.TempAt(base.Pos, ir.CurFunc, t)
    80  	}
    81  	if clear {
    82  		o.append(ir.NewAssignStmt(base.Pos, v, nil))
    83  	}
    84  
    85  	o.temp = append(o.temp, v)
    86  	return v
    87  }
    88  
    89  // copyExpr behaves like newTemp but also emits
    90  // code to initialize the temporary to the value n.
    91  func (o *orderState) copyExpr(n ir.Node) *ir.Name {
    92  	return o.copyExpr1(n, false)
    93  }
    94  
    95  // copyExprClear is like copyExpr but clears the temp before assignment.
    96  // It is provided for use when the evaluation of tmp = n turns into
    97  // a function call that is passed a pointer to the temporary as the output space.
    98  // If the call blocks before tmp has been written,
    99  // the garbage collector will still treat the temporary as live,
   100  // so we must zero it before entering that call.
   101  // Today, this only happens for channel receive operations.
   102  // (The other candidate would be map access, but map access
   103  // returns a pointer to the result data instead of taking a pointer
   104  // to be filled in.)
   105  func (o *orderState) copyExprClear(n ir.Node) *ir.Name {
   106  	return o.copyExpr1(n, true)
   107  }
   108  
   109  func (o *orderState) copyExpr1(n ir.Node, clear bool) *ir.Name {
   110  	t := n.Type()
   111  	v := o.newTemp(t, clear)
   112  	o.append(ir.NewAssignStmt(base.Pos, v, n))
   113  	return v
   114  }
   115  
   116  // cheapExpr returns a cheap version of n.
   117  // The definition of cheap is that n is a variable or constant.
   118  // If not, cheapExpr allocates a new tmp, emits tmp = n,
   119  // and then returns tmp.
   120  func (o *orderState) cheapExpr(n ir.Node) ir.Node {
   121  	if n == nil {
   122  		return nil
   123  	}
   124  
   125  	switch n.Op() {
   126  	case ir.ONAME, ir.OLITERAL, ir.ONIL:
   127  		return n
   128  	case ir.OLEN, ir.OCAP:
   129  		n := n.(*ir.UnaryExpr)
   130  		l := o.cheapExpr(n.X)
   131  		if l == n.X {
   132  			return n
   133  		}
   134  		a := ir.Copy(n).(*ir.UnaryExpr)
   135  		a.X = l
   136  		return typecheck.Expr(a)
   137  	}
   138  
   139  	return o.copyExpr(n)
   140  }
   141  
   142  // safeExpr returns a safe version of n.
   143  // The definition of safe is that n can appear multiple times
   144  // without violating the semantics of the original program,
   145  // and that assigning to the safe version has the same effect
   146  // as assigning to the original n.
   147  //
   148  // The intended use is to apply to x when rewriting x += y into x = x + y.
   149  func (o *orderState) safeExpr(n ir.Node) ir.Node {
   150  	switch n.Op() {
   151  	case ir.ONAME, ir.OLITERAL, ir.ONIL:
   152  		return n
   153  
   154  	case ir.OLEN, ir.OCAP:
   155  		n := n.(*ir.UnaryExpr)
   156  		l := o.safeExpr(n.X)
   157  		if l == n.X {
   158  			return n
   159  		}
   160  		a := ir.Copy(n).(*ir.UnaryExpr)
   161  		a.X = l
   162  		return typecheck.Expr(a)
   163  
   164  	case ir.ODOT:
   165  		n := n.(*ir.SelectorExpr)
   166  		l := o.safeExpr(n.X)
   167  		if l == n.X {
   168  			return n
   169  		}
   170  		a := ir.Copy(n).(*ir.SelectorExpr)
   171  		a.X = l
   172  		return typecheck.Expr(a)
   173  
   174  	case ir.ODOTPTR:
   175  		n := n.(*ir.SelectorExpr)
   176  		l := o.cheapExpr(n.X)
   177  		if l == n.X {
   178  			return n
   179  		}
   180  		a := ir.Copy(n).(*ir.SelectorExpr)
   181  		a.X = l
   182  		return typecheck.Expr(a)
   183  
   184  	case ir.ODEREF:
   185  		n := n.(*ir.StarExpr)
   186  		l := o.cheapExpr(n.X)
   187  		if l == n.X {
   188  			return n
   189  		}
   190  		a := ir.Copy(n).(*ir.StarExpr)
   191  		a.X = l
   192  		return typecheck.Expr(a)
   193  
   194  	case ir.OINDEX, ir.OINDEXMAP:
   195  		n := n.(*ir.IndexExpr)
   196  		var l ir.Node
   197  		if n.X.Type().IsArray() {
   198  			l = o.safeExpr(n.X)
   199  		} else {
   200  			l = o.cheapExpr(n.X)
   201  		}
   202  		r := o.cheapExpr(n.Index)
   203  		if l == n.X && r == n.Index {
   204  			return n
   205  		}
   206  		a := ir.Copy(n).(*ir.IndexExpr)
   207  		a.X = l
   208  		a.Index = r
   209  		return typecheck.Expr(a)
   210  
   211  	default:
   212  		base.Fatalf("order.safeExpr %v", n.Op())
   213  		return nil // not reached
   214  	}
   215  }
   216  
   217  // addrTemp ensures that n is okay to pass by address to runtime routines.
   218  // If the original argument n is not okay, addrTemp creates a tmp, emits
   219  // tmp = n, and then returns tmp.
   220  // The result of addrTemp MUST be assigned back to n, e.g.
   221  //
   222  //	n.Left = o.addrTemp(n.Left)
   223  func (o *orderState) addrTemp(n ir.Node) ir.Node {
   224  	// Note: Avoid addrTemp with static assignment for literal strings
   225  	// when compiling FIPS packages.
   226  	// The problem is that panic("foo") ends up creating a static RODATA temp
   227  	// for the implicit conversion of "foo" to any, and we can't handle
   228  	// the relocations in that temp.
   229  	if n.Op() == ir.ONIL || (n.Op() == ir.OLITERAL && !base.Ctxt.IsFIPS()) {
   230  		// This is a basic literal or nil that we can store
   231  		// directly in the read-only data section.
   232  		n = typecheck.DefaultLit(n, nil)
   233  		types.CalcSize(n.Type())
   234  		vstat := readonlystaticname(n.Type())
   235  		var s staticinit.Schedule
   236  		s.StaticAssign(vstat, 0, n, n.Type())
   237  		if s.Out != nil {
   238  			base.Fatalf("staticassign of const generated code: %+v", n)
   239  		}
   240  		vstat = typecheck.Expr(vstat).(*ir.Name)
   241  		return vstat
   242  	}
   243  
   244  	// Check now for a composite literal to possibly store in the read-only data section.
   245  	v := staticValue(n)
   246  	if v == nil {
   247  		v = n
   248  	}
   249  	optEnabled := func(n ir.Node) bool {
   250  		// Do this optimization only when enabled for this node.
   251  		return base.LiteralAllocHash.MatchPos(n.Pos(), nil)
   252  	}
   253  	if (v.Op() == ir.OSTRUCTLIT || v.Op() == ir.OARRAYLIT) && !base.Ctxt.IsFIPS() {
   254  		if ir.IsZero(v) && 0 < v.Type().Size() && v.Type().Size() <= abi.ZeroValSize && optEnabled(n) {
   255  			// This zero value can be represented by the read-only zeroVal.
   256  			zeroVal := ir.NewLinksymExpr(v.Pos(), ir.Syms.ZeroVal, n.Type())
   257  			vstat := typecheck.Expr(zeroVal).(*ir.LinksymOffsetExpr)
   258  			return vstat
   259  		}
   260  		if isStaticCompositeLiteral(v) && optEnabled(n) {
   261  			// v can be directly represented in the read-only data section.
   262  			lit := v.(*ir.CompLitExpr)
   263  			vstat := readonlystaticname(n.Type())
   264  			fixedlit(inInitFunction, initKindStatic, lit, vstat, nil) // nil init
   265  			vstat = typecheck.Expr(vstat).(*ir.Name)
   266  			return vstat
   267  		}
   268  	}
   269  
   270  	// Prevent taking the address of an SSA-able local variable (#63332).
   271  	//
   272  	// TODO(mdempsky): Note that OuterValue unwraps OCONVNOPs, but
   273  	// IsAddressable does not. It should be possible to skip copying for
   274  	// at least some of these OCONVNOPs (e.g., reinsert them after the
   275  	// OADDR operation), but at least walkCompare needs to be fixed to
   276  	// support that (see trybot failures on go.dev/cl/541715, PS1).
   277  	if ir.IsAddressable(n) {
   278  		if name, ok := ir.OuterValue(n).(*ir.Name); ok && name.Op() == ir.ONAME {
   279  			if name.Class == ir.PAUTO && !name.Addrtaken() && ssa.CanSSA(name.Type()) {
   280  				goto Copy
   281  			}
   282  		}
   283  
   284  		return n
   285  	}
   286  
   287  Copy:
   288  	return o.copyExpr(n)
   289  }
   290  
   291  // mapKeyTemp prepares n to be a key in a map runtime call and returns n.
   292  // The first parameter is the position of n's containing node, for use in case
   293  // that n's position is not unique (e.g., if n is an ONAME).
   294  func (o *orderState) mapKeyTemp(outerPos src.XPos, t *types.Type, n ir.Node) ir.Node {
   295  	pos := outerPos
   296  	if ir.HasUniquePos(n) {
   297  		pos = n.Pos()
   298  	}
   299  	// Most map calls need to take the address of the key.
   300  	// Exception: map*_fast* calls. See golang.org/issue/19015.
   301  	alg := mapfast(t)
   302  	if alg == mapslow {
   303  		return o.addrTemp(n)
   304  	}
   305  	var kt *types.Type
   306  	switch alg {
   307  	case mapfast32:
   308  		kt = types.Types[types.TUINT32]
   309  	case mapfast64:
   310  		kt = types.Types[types.TUINT64]
   311  	case mapfast32ptr, mapfast64ptr:
   312  		kt = types.Types[types.TUNSAFEPTR]
   313  	case mapfaststr:
   314  		kt = types.Types[types.TSTRING]
   315  	}
   316  	nt := n.Type()
   317  	switch {
   318  	case nt == kt:
   319  		return n
   320  	case nt.Kind() == kt.Kind(), nt.IsPtrShaped() && kt.IsPtrShaped():
   321  		// can directly convert (e.g. named type to underlying type, or one pointer to another)
   322  		return typecheck.Expr(ir.NewConvExpr(pos, ir.OCONVNOP, kt, n))
   323  	case nt.IsInteger() && kt.IsInteger():
   324  		// can directly convert (e.g. int32 to uint32)
   325  		if n.Op() == ir.OLITERAL && nt.IsSigned() {
   326  			// avoid constant overflow error
   327  			n = ir.NewConstExpr(constant.MakeUint64(uint64(ir.Int64Val(n))), n)
   328  			n.SetType(kt)
   329  			return n
   330  		}
   331  		return typecheck.Expr(ir.NewConvExpr(pos, ir.OCONV, kt, n))
   332  	default:
   333  		// Unsafe cast through memory.
   334  		// We'll need to do a load with type kt. Create a temporary of type kt to
   335  		// ensure sufficient alignment. nt may be under-aligned.
   336  		if uint8(kt.Alignment()) < uint8(nt.Alignment()) {
   337  			base.Fatalf("mapKeyTemp: key type is not sufficiently aligned, kt=%v nt=%v", kt, nt)
   338  		}
   339  		tmp := o.newTemp(kt, true)
   340  		// *(*nt)(&tmp) = n
   341  		var e ir.Node = typecheck.NodAddr(tmp)
   342  		e = ir.NewConvExpr(pos, ir.OCONVNOP, nt.PtrTo(), e)
   343  		e = ir.NewStarExpr(pos, e)
   344  		o.append(ir.NewAssignStmt(pos, e, n))
   345  		return tmp
   346  	}
   347  }
   348  
   349  // mapKeyReplaceStrConv replaces OBYTES2STR by OBYTES2STRTMP
   350  // in n to avoid string allocations for keys in map lookups.
   351  // Returns a bool that signals if a modification was made.
   352  //
   353  // For:
   354  //
   355  //	x = m[string(k)]
   356  //	x = m[T1{... Tn{..., string(k), ...}}]
   357  //
   358  // where k is []byte, T1 to Tn is a nesting of struct and array literals,
   359  // the allocation of backing bytes for the string can be avoided
   360  // by reusing the []byte backing array. These are special cases
   361  // for avoiding allocations when converting byte slices to strings.
   362  // It would be nice to handle these generally, but because
   363  // []byte keys are not allowed in maps, the use of string(k)
   364  // comes up in important cases in practice. See issue 3512.
   365  //
   366  // Note that this code does not handle the case:
   367  //
   368  //	s := string(k)
   369  //	x = m[s]
   370  //
   371  // Cases like this are handled during SSA, search for slicebytetostring
   372  // in ../ssa/_gen/generic.rules.
   373  func mapKeyReplaceStrConv(n ir.Node) bool {
   374  	var replaced bool
   375  	switch n.Op() {
   376  	case ir.OBYTES2STR:
   377  		n := n.(*ir.ConvExpr)
   378  		n.SetOp(ir.OBYTES2STRTMP)
   379  		replaced = true
   380  	case ir.OSTRUCTLIT:
   381  		n := n.(*ir.CompLitExpr)
   382  		for _, elem := range n.List {
   383  			elem := elem.(*ir.StructKeyExpr)
   384  			if mapKeyReplaceStrConv(elem.Value) {
   385  				replaced = true
   386  			}
   387  		}
   388  	case ir.OARRAYLIT:
   389  		n := n.(*ir.CompLitExpr)
   390  		for _, elem := range n.List {
   391  			if elem.Op() == ir.OKEY {
   392  				elem = elem.(*ir.KeyExpr).Value
   393  			}
   394  			if mapKeyReplaceStrConv(elem) {
   395  				replaced = true
   396  			}
   397  		}
   398  	}
   399  	return replaced
   400  }
   401  
   402  type ordermarker int
   403  
   404  // markTemp returns the top of the temporary variable stack.
   405  func (o *orderState) markTemp() ordermarker {
   406  	return ordermarker(len(o.temp))
   407  }
   408  
   409  // popTemp pops temporaries off the stack until reaching the mark,
   410  // which must have been returned by markTemp.
   411  func (o *orderState) popTemp(mark ordermarker) {
   412  	for _, n := range o.temp[mark:] {
   413  		key := n.Type().LinkString()
   414  		o.free[key] = append(o.free[key], n)
   415  	}
   416  	o.temp = o.temp[:mark]
   417  }
   418  
   419  // stmtList orders each of the statements in the list.
   420  func (o *orderState) stmtList(l ir.Nodes) {
   421  	s := l
   422  	for i := range s {
   423  		orderMakeSliceCopy(s[i:])
   424  		o.stmt(s[i])
   425  	}
   426  }
   427  
   428  // orderMakeSliceCopy matches the pattern:
   429  //
   430  //	m = OMAKESLICE([]T, x); OCOPY(m, s)
   431  //
   432  // and rewrites it to:
   433  //
   434  //	m = OMAKESLICECOPY([]T, x, s); nil
   435  func orderMakeSliceCopy(s []ir.Node) {
   436  	if base.Flag.N != 0 || base.Flag.Cfg.Instrumenting {
   437  		return
   438  	}
   439  	if len(s) < 2 || s[0] == nil || s[0].Op() != ir.OAS || s[1] == nil || s[1].Op() != ir.OCOPY {
   440  		return
   441  	}
   442  
   443  	as := s[0].(*ir.AssignStmt)
   444  	cp := s[1].(*ir.BinaryExpr)
   445  	if as.Y == nil || as.Y.Op() != ir.OMAKESLICE || ir.IsBlank(as.X) ||
   446  		as.X.Op() != ir.ONAME || cp.X.Op() != ir.ONAME || cp.Y.Op() != ir.ONAME ||
   447  		as.X.Name() != cp.X.Name() || cp.X.Name() == cp.Y.Name() {
   448  		// The line above this one is correct with the differing equality operators:
   449  		// we want as.X and cp.X to be the same name,
   450  		// but we want the initial data to be coming from a different name.
   451  		return
   452  	}
   453  
   454  	mk := as.Y.(*ir.MakeExpr)
   455  	if mk.Esc() == ir.EscNone || mk.Len == nil || mk.Cap != nil {
   456  		return
   457  	}
   458  	mk.SetOp(ir.OMAKESLICECOPY)
   459  	mk.Cap = cp.Y
   460  	// Set bounded when m = OMAKESLICE([]T, len(s)); OCOPY(m, s)
   461  	mk.SetBounded(mk.Len.Op() == ir.OLEN && ir.SameSafeExpr(mk.Len.(*ir.UnaryExpr).X, cp.Y))
   462  	as.Y = typecheck.Expr(mk)
   463  	s[1] = nil // remove separate copy call
   464  }
   465  
   466  // edge inserts coverage instrumentation for libfuzzer.
   467  func (o *orderState) edge() {
   468  	if base.Debug.Libfuzzer == 0 {
   469  		return
   470  	}
   471  
   472  	// Create a new uint8 counter to be allocated in section __sancov_cntrs
   473  	counter := staticinit.StaticName(types.Types[types.TUINT8])
   474  	counter.SetLibfuzzer8BitCounter(true)
   475  	// As well as setting SetLibfuzzer8BitCounter, we preemptively set the
   476  	// symbol type to SLIBFUZZER_8BIT_COUNTER so that the race detector
   477  	// instrumentation pass (which does not have access to the flags set by
   478  	// SetLibfuzzer8BitCounter) knows to ignore them. This information is
   479  	// lost by the time it reaches the compile step, so SetLibfuzzer8BitCounter
   480  	// is still necessary.
   481  	counter.Linksym().Type = objabi.SLIBFUZZER_8BIT_COUNTER
   482  
   483  	// We guarantee that the counter never becomes zero again once it has been
   484  	// incremented once. This implementation follows the NeverZero optimization
   485  	// presented by the paper:
   486  	// "AFL++: Combining Incremental Steps of Fuzzing Research"
   487  	// The NeverZero policy avoids the overflow to 0 by setting the counter to one
   488  	// after it reaches 255 and so, if an edge is executed at least one time, the entry is
   489  	// never 0.
   490  	// Another policy presented in the paper is the Saturated Counters policy which
   491  	// freezes the counter when it reaches the value of 255. However, a range
   492  	// of experiments showed that doing so decreases overall performance.
   493  	o.append(ir.NewIfStmt(base.Pos,
   494  		ir.NewBinaryExpr(base.Pos, ir.OEQ, counter, ir.NewInt(base.Pos, 0xff)),
   495  		[]ir.Node{ir.NewAssignStmt(base.Pos, counter, ir.NewInt(base.Pos, 1))},
   496  		[]ir.Node{ir.NewAssignOpStmt(base.Pos, ir.OADD, counter, ir.NewInt(base.Pos, 1))}))
   497  }
   498  
   499  // orderBlock orders the block of statements in n into a new slice,
   500  // and then replaces the old slice in n with the new slice.
   501  // free is a map that can be used to obtain temporary variables by type.
   502  func orderBlock(n *ir.Nodes, free map[string][]*ir.Name) {
   503  	if len(*n) != 0 {
   504  		// Set reasonable position for instrumenting code. See issue 53688.
   505  		// It would be nice if ir.Nodes had a position (the opening {, probably),
   506  		// but it doesn't. So we use the first statement's position instead.
   507  		ir.SetPos((*n)[0])
   508  	}
   509  	var order orderState
   510  	order.free = free
   511  	mark := order.markTemp()
   512  	order.edge()
   513  	order.stmtList(*n)
   514  	order.popTemp(mark)
   515  	*n = order.out
   516  }
   517  
   518  // exprInPlace orders the side effects in *np and
   519  // leaves them as the init list of the final *np.
   520  // The result of exprInPlace MUST be assigned back to n, e.g.
   521  //
   522  //	n.Left = o.exprInPlace(n.Left)
   523  func (o *orderState) exprInPlace(n ir.Node) ir.Node {
   524  	var order orderState
   525  	order.free = o.free
   526  	n = order.expr(n, nil)
   527  	n = ir.InitExpr(order.out, n)
   528  
   529  	// insert new temporaries from order
   530  	// at head of outer list.
   531  	o.temp = append(o.temp, order.temp...)
   532  	return n
   533  }
   534  
   535  // orderStmtInPlace orders the side effects of the single statement *np
   536  // and replaces it with the resulting statement list.
   537  // The result of orderStmtInPlace MUST be assigned back to n, e.g.
   538  //
   539  //	n.Left = orderStmtInPlace(n.Left)
   540  //
   541  // free is a map that can be used to obtain temporary variables by type.
   542  func orderStmtInPlace(n ir.Node, free map[string][]*ir.Name) ir.Node {
   543  	var order orderState
   544  	order.free = free
   545  	mark := order.markTemp()
   546  	order.stmt(n)
   547  	order.popTemp(mark)
   548  	return ir.NewBlockStmt(src.NoXPos, order.out)
   549  }
   550  
   551  // init moves n's init list to o.out.
   552  func (o *orderState) init(n ir.Node) {
   553  	if ir.MayBeShared(n) {
   554  		// For concurrency safety, don't mutate potentially shared nodes.
   555  		// First, ensure that no work is required here.
   556  		if len(n.Init()) > 0 {
   557  			base.Fatalf("order.init shared node with ninit")
   558  		}
   559  		return
   560  	}
   561  	o.stmtList(ir.TakeInit(n))
   562  }
   563  
   564  // call orders the call expression n.
   565  // n.Op is OCALLFUNC/OCALLINTER or a builtin like OCOPY.
   566  func (o *orderState) call(nn ir.Node) {
   567  	if len(nn.Init()) > 0 {
   568  		// Caller should have already called o.init(nn).
   569  		base.Fatalf("%v with unexpected ninit", nn.Op())
   570  	}
   571  	if nn.Op() == ir.OCALLMETH {
   572  		base.FatalfAt(nn.Pos(), "OCALLMETH missed by typecheck")
   573  	}
   574  
   575  	// Builtin functions.
   576  	if nn.Op() != ir.OCALLFUNC && nn.Op() != ir.OCALLINTER {
   577  		switch n := nn.(type) {
   578  		default:
   579  			base.Fatalf("unexpected call: %+v", n)
   580  		case *ir.UnaryExpr:
   581  			n.X = o.expr(n.X, nil)
   582  		case *ir.ConvExpr:
   583  			n.X = o.expr(n.X, nil)
   584  		case *ir.BinaryExpr:
   585  			n.X = o.expr(n.X, nil)
   586  			n.Y = o.expr(n.Y, nil)
   587  		case *ir.MakeExpr:
   588  			n.Len = o.expr(n.Len, nil)
   589  			n.Cap = o.expr(n.Cap, nil)
   590  		case *ir.CallExpr:
   591  			o.exprList(n.Args)
   592  		}
   593  		return
   594  	}
   595  
   596  	n := nn.(*ir.CallExpr)
   597  	typecheck.AssertFixedCall(n)
   598  
   599  	if ir.IsFuncPCIntrinsic(n) && ir.IsIfaceOfFunc(n.Args[0]) != nil {
   600  		// For internal/abi.FuncPCABIxxx(fn), if fn is a defined function,
   601  		// do not introduce temporaries here, so it is easier to rewrite it
   602  		// to symbol address reference later in walk.
   603  		return
   604  	}
   605  
   606  	n.Fun = o.expr(n.Fun, nil)
   607  	o.exprList(n.Args)
   608  }
   609  
   610  // mapAssign appends n to o.out.
   611  func (o *orderState) mapAssign(n ir.Node) {
   612  	switch n.Op() {
   613  	default:
   614  		base.Fatalf("order.mapAssign %v", n.Op())
   615  
   616  	case ir.OAS:
   617  		n := n.(*ir.AssignStmt)
   618  		if n.X.Op() == ir.OINDEXMAP {
   619  			n.Y = o.safeMapRHS(n.Y)
   620  		}
   621  		o.out = append(o.out, n)
   622  	case ir.OASOP:
   623  		n := n.(*ir.AssignOpStmt)
   624  		if n.X.Op() == ir.OINDEXMAP {
   625  			n.Y = o.safeMapRHS(n.Y)
   626  		}
   627  		o.out = append(o.out, n)
   628  	}
   629  }
   630  
   631  func (o *orderState) safeMapRHS(r ir.Node) ir.Node {
   632  	// Make sure we evaluate the RHS before starting the map insert.
   633  	// We need to make sure the RHS won't panic.  See issue 22881.
   634  	if r.Op() == ir.OAPPEND {
   635  		r := r.(*ir.CallExpr)
   636  		s := r.Args[1:]
   637  		for i, n := range s {
   638  			s[i] = o.cheapExpr(n)
   639  		}
   640  		return r
   641  	}
   642  	return o.cheapExpr(r)
   643  }
   644  
   645  // stmt orders the statement n, appending to o.out.
   646  func (o *orderState) stmt(n ir.Node) {
   647  	if n == nil {
   648  		return
   649  	}
   650  
   651  	lno := ir.SetPos(n)
   652  	o.init(n)
   653  
   654  	switch n.Op() {
   655  	default:
   656  		base.Fatalf("order.stmt %v", n.Op())
   657  
   658  	case ir.OINLMARK:
   659  		o.out = append(o.out, n)
   660  
   661  	case ir.OAS:
   662  		n := n.(*ir.AssignStmt)
   663  		t := o.markTemp()
   664  
   665  		// There's a delicate interaction here between two OINDEXMAP
   666  		// optimizations.
   667  		//
   668  		// First, we want to handle m[k] = append(m[k], ...) with a single
   669  		// runtime call to mapassign. This requires the m[k] expressions to
   670  		// satisfy ir.SameSafeExpr in walkAssign.
   671  		//
   672  		// But if k is a slow map key type that's passed by reference (e.g.,
   673  		// byte), then we want to avoid marking user variables as addrtaken,
   674  		// if that might prevent the compiler from keeping k in a register.
   675  		//
   676  		// TODO(mdempsky): It would be better if walk was responsible for
   677  		// inserting temporaries as needed.
   678  		mapAppend := n.X.Op() == ir.OINDEXMAP && n.Y.Op() == ir.OAPPEND &&
   679  			ir.SameSafeExpr(n.X, n.Y.(*ir.CallExpr).Args[0])
   680  
   681  		n.X = o.expr(n.X, nil)
   682  		if mapAppend {
   683  			indexLHS := n.X.(*ir.IndexExpr)
   684  			indexLHS.X = o.cheapExpr(indexLHS.X)
   685  			indexLHS.Index = o.cheapExpr(indexLHS.Index)
   686  
   687  			call := n.Y.(*ir.CallExpr)
   688  			arg0 := call.Args[0]
   689  			// ir.SameSafeExpr skips OCONVNOPs, so we must do the same here (#66096).
   690  			for arg0.Op() == ir.OCONVNOP {
   691  				arg0 = arg0.(*ir.ConvExpr).X
   692  			}
   693  			indexRHS := arg0.(*ir.IndexExpr)
   694  			indexRHS.X = indexLHS.X
   695  			indexRHS.Index = indexLHS.Index
   696  
   697  			o.exprList(call.Args[1:])
   698  		} else {
   699  			n.Y = o.expr(n.Y, n.X)
   700  		}
   701  		o.mapAssign(n)
   702  		o.popTemp(t)
   703  
   704  	case ir.OASOP:
   705  		n := n.(*ir.AssignOpStmt)
   706  		t := o.markTemp()
   707  		n.X = o.expr(n.X, nil)
   708  		n.Y = o.expr(n.Y, nil)
   709  
   710  		if base.Flag.Cfg.Instrumenting || n.X.Op() == ir.OINDEXMAP && (n.AsOp == ir.ODIV || n.AsOp == ir.OMOD) {
   711  			// Rewrite m[k] op= r into m[k] = m[k] op r so
   712  			// that we can ensure that if op panics
   713  			// because r is zero, the panic happens before
   714  			// the map assignment.
   715  			// DeepCopy is a big hammer here, but safeExpr
   716  			// makes sure there is nothing too deep being copied.
   717  			l1 := o.safeExpr(n.X)
   718  			l2 := ir.DeepCopy(src.NoXPos, l1)
   719  			if l2.Op() == ir.OINDEXMAP {
   720  				l2 := l2.(*ir.IndexExpr)
   721  				l2.Assigned = false
   722  			}
   723  			l2 = o.copyExpr(l2)
   724  			r := o.expr(typecheck.Expr(ir.NewBinaryExpr(n.Pos(), n.AsOp, l2, n.Y)), nil)
   725  			as := typecheck.Stmt(ir.NewAssignStmt(n.Pos(), l1, r))
   726  			o.mapAssign(as)
   727  			o.popTemp(t)
   728  			return
   729  		}
   730  
   731  		o.mapAssign(n)
   732  		o.popTemp(t)
   733  
   734  	case ir.OAS2:
   735  		n := n.(*ir.AssignListStmt)
   736  		t := o.markTemp()
   737  		o.exprList(n.Lhs)
   738  		o.exprList(n.Rhs)
   739  		o.out = append(o.out, n)
   740  		o.popTemp(t)
   741  
   742  	// Special: avoid copy of func call n.Right
   743  	case ir.OAS2FUNC:
   744  		n := n.(*ir.AssignListStmt)
   745  		t := o.markTemp()
   746  		o.exprList(n.Lhs)
   747  		call := n.Rhs[0]
   748  		o.init(call)
   749  		if ic, ok := call.(*ir.InlinedCallExpr); ok {
   750  			o.stmtList(ic.Body)
   751  
   752  			n.SetOp(ir.OAS2)
   753  			n.Rhs = ic.ReturnVars
   754  
   755  			o.exprList(n.Rhs)
   756  			o.out = append(o.out, n)
   757  		} else {
   758  			o.call(call)
   759  			o.as2func(n)
   760  		}
   761  		o.popTemp(t)
   762  
   763  	// Special: use temporary variables to hold result,
   764  	// so that runtime can take address of temporary.
   765  	// No temporary for blank assignment.
   766  	//
   767  	// OAS2MAPR: make sure key is addressable if needed,
   768  	//           and make sure OINDEXMAP is not copied out.
   769  	case ir.OAS2DOTTYPE, ir.OAS2RECV, ir.OAS2MAPR:
   770  		n := n.(*ir.AssignListStmt)
   771  		t := o.markTemp()
   772  		o.exprList(n.Lhs)
   773  
   774  		switch r := n.Rhs[0]; r.Op() {
   775  		case ir.ODOTTYPE2:
   776  			r := r.(*ir.TypeAssertExpr)
   777  			r.X = o.expr(r.X, nil)
   778  		case ir.ODYNAMICDOTTYPE2:
   779  			r := r.(*ir.DynamicTypeAssertExpr)
   780  			r.X = o.expr(r.X, nil)
   781  			r.RType = o.expr(r.RType, nil)
   782  			r.ITab = o.expr(r.ITab, nil)
   783  		case ir.ORECV:
   784  			r := r.(*ir.UnaryExpr)
   785  			r.X = o.expr(r.X, nil)
   786  		case ir.OINDEXMAP:
   787  			r := r.(*ir.IndexExpr)
   788  			r.X = o.expr(r.X, nil)
   789  			r.Index = o.expr(r.Index, nil)
   790  			// See similar conversion for OINDEXMAP below.
   791  			_ = mapKeyReplaceStrConv(r.Index)
   792  			r.Index = o.mapKeyTemp(r.Pos(), r.X.Type(), r.Index)
   793  		default:
   794  			base.Fatalf("order.stmt: %v", r.Op())
   795  		}
   796  
   797  		o.as2ok(n)
   798  		o.popTemp(t)
   799  
   800  	// Special: does not save n onto out.
   801  	case ir.OBLOCK:
   802  		n := n.(*ir.BlockStmt)
   803  		o.stmtList(n.List)
   804  
   805  	// Special: n->left is not an expression; save as is.
   806  	case ir.OBREAK,
   807  		ir.OCONTINUE,
   808  		ir.ODCL,
   809  		ir.OFALL,
   810  		ir.OGOTO,
   811  		ir.OLABEL,
   812  		ir.OTAILCALL:
   813  		o.out = append(o.out, n)
   814  
   815  	// Special: handle call arguments.
   816  	case ir.OCALLFUNC, ir.OCALLINTER:
   817  		n := n.(*ir.CallExpr)
   818  		t := o.markTemp()
   819  		o.call(n)
   820  		o.out = append(o.out, n)
   821  		o.popTemp(t)
   822  
   823  	case ir.OINLCALL:
   824  		n := n.(*ir.InlinedCallExpr)
   825  		o.stmtList(n.Body)
   826  
   827  		// discard results; double-check for no side effects
   828  		for _, result := range n.ReturnVars {
   829  			if staticinit.AnySideEffects(result) {
   830  				base.FatalfAt(result.Pos(), "inlined call result has side effects: %v", result)
   831  			}
   832  		}
   833  
   834  	case ir.OCHECKNIL, ir.OCLEAR, ir.OCLOSE, ir.OPANIC, ir.ORECV:
   835  		n := n.(*ir.UnaryExpr)
   836  		t := o.markTemp()
   837  		n.X = o.expr(n.X, nil)
   838  		o.out = append(o.out, n)
   839  		o.popTemp(t)
   840  
   841  	case ir.OCOPY:
   842  		n := n.(*ir.BinaryExpr)
   843  		t := o.markTemp()
   844  		n.X = o.expr(n.X, nil)
   845  		n.Y = o.expr(n.Y, nil)
   846  		o.out = append(o.out, n)
   847  		o.popTemp(t)
   848  
   849  	case ir.OPRINT, ir.OPRINTLN, ir.ORECOVERFP:
   850  		n := n.(*ir.CallExpr)
   851  		t := o.markTemp()
   852  		o.call(n)
   853  		o.out = append(o.out, n)
   854  		o.popTemp(t)
   855  
   856  	// Special: order arguments to inner call but not call itself.
   857  	case ir.ODEFER, ir.OGO:
   858  		n := n.(*ir.GoDeferStmt)
   859  		t := o.markTemp()
   860  		o.init(n.Call)
   861  		o.call(n.Call)
   862  		o.out = append(o.out, n)
   863  		o.popTemp(t)
   864  
   865  	case ir.ODELETE:
   866  		n := n.(*ir.CallExpr)
   867  		t := o.markTemp()
   868  		n.Args[0] = o.expr(n.Args[0], nil)
   869  		n.Args[1] = o.expr(n.Args[1], nil)
   870  		n.Args[1] = o.mapKeyTemp(n.Pos(), n.Args[0].Type(), n.Args[1])
   871  		o.out = append(o.out, n)
   872  		o.popTemp(t)
   873  
   874  	// Clean temporaries from condition evaluation at
   875  	// beginning of loop body and after for statement.
   876  	case ir.OFOR:
   877  		n := n.(*ir.ForStmt)
   878  		t := o.markTemp()
   879  		n.Cond = o.exprInPlace(n.Cond)
   880  		orderBlock(&n.Body, o.free)
   881  		n.Post = orderStmtInPlace(n.Post, o.free)
   882  		o.out = append(o.out, n)
   883  		o.popTemp(t)
   884  
   885  	// Clean temporaries from condition at
   886  	// beginning of both branches.
   887  	case ir.OIF:
   888  		n := n.(*ir.IfStmt)
   889  		t := o.markTemp()
   890  		n.Cond = o.exprInPlace(n.Cond)
   891  		o.popTemp(t)
   892  		orderBlock(&n.Body, o.free)
   893  		orderBlock(&n.Else, o.free)
   894  		o.out = append(o.out, n)
   895  
   896  	case ir.ORANGE:
   897  		// n.Right is the expression being ranged over.
   898  		// order it, and then make a copy if we need one.
   899  		// We almost always do, to ensure that we don't
   900  		// see any value changes made during the loop.
   901  		// Usually the copy is cheap (e.g., array pointer,
   902  		// chan, slice, string are all tiny).
   903  		// The exception is ranging over an array value
   904  		// (not a slice, not a pointer to array),
   905  		// which must make a copy to avoid seeing updates made during
   906  		// the range body. Ranging over an array value is uncommon though.
   907  
   908  		// Mark []byte(str) range expression to reuse string backing storage.
   909  		// It is safe because the storage cannot be mutated.
   910  		n := n.(*ir.RangeStmt)
   911  		if x, ok := n.X.(*ir.ConvExpr); ok {
   912  			switch x.Op() {
   913  			case ir.OSTR2BYTES:
   914  				x.SetOp(ir.OSTR2BYTESTMP)
   915  				fallthrough
   916  			case ir.OSTR2BYTESTMP:
   917  				x.MarkNonNil() // "range []byte(nil)" is fine
   918  			}
   919  		}
   920  
   921  		t := o.markTemp()
   922  		n.X = o.expr(n.X, nil)
   923  
   924  		orderBody := true
   925  		xt := typecheck.RangeExprType(n.X.Type())
   926  		switch k := xt.Kind(); {
   927  		default:
   928  			base.Fatalf("order.stmt range %v", n.Type())
   929  
   930  		case types.IsInt[k]:
   931  			// Used only once, no need to copy.
   932  
   933  		case k == types.TARRAY, k == types.TSLICE:
   934  			if n.Value == nil || ir.IsBlank(n.Value) {
   935  				// for i := range x will only use x once, to compute len(x).
   936  				// No need to copy it.
   937  				break
   938  			}
   939  			fallthrough
   940  
   941  		case k == types.TCHAN, k == types.TSTRING:
   942  			// chan, string, slice, array ranges use value multiple times.
   943  			// make copy.
   944  			r := n.X
   945  
   946  			if r.Type().IsString() && r.Type() != types.Types[types.TSTRING] {
   947  				r = ir.NewConvExpr(base.Pos, ir.OCONV, nil, r)
   948  				r.SetType(types.Types[types.TSTRING])
   949  				r = typecheck.Expr(r)
   950  			}
   951  
   952  			n.X = o.copyExpr(r)
   953  
   954  		case k == types.TMAP:
   955  			if isMapClear(n) {
   956  				// Preserve the body of the map clear pattern so it can
   957  				// be detected during walk. The loop body will not be used
   958  				// when optimizing away the range loop to a runtime call.
   959  				orderBody = false
   960  				break
   961  			}
   962  
   963  			// copy the map value in case it is a map literal.
   964  			// TODO(rsc): Make tmp = literal expressions reuse tmp.
   965  			// For maps tmp is just one word so it hardly matters.
   966  			r := n.X
   967  			n.X = o.copyExpr(r)
   968  
   969  			// n.Prealloc is the temp for the iterator.
   970  			// MapIterType contains pointers and needs to be zeroed.
   971  			if buildcfg.Experiment.SwissMap {
   972  				n.Prealloc = o.newTemp(reflectdata.SwissMapIterType(), true)
   973  			} else {
   974  				n.Prealloc = o.newTemp(reflectdata.OldMapIterType(), true)
   975  			}
   976  		}
   977  		n.Key = o.exprInPlace(n.Key)
   978  		n.Value = o.exprInPlace(n.Value)
   979  		if orderBody {
   980  			orderBlock(&n.Body, o.free)
   981  		}
   982  		o.out = append(o.out, n)
   983  		o.popTemp(t)
   984  
   985  	case ir.ORETURN:
   986  		n := n.(*ir.ReturnStmt)
   987  		o.exprList(n.Results)
   988  		o.out = append(o.out, n)
   989  
   990  	// Special: clean case temporaries in each block entry.
   991  	// Select must enter one of its blocks, so there is no
   992  	// need for a cleaning at the end.
   993  	// Doubly special: evaluation order for select is stricter
   994  	// than ordinary expressions. Even something like p.c
   995  	// has to be hoisted into a temporary, so that it cannot be
   996  	// reordered after the channel evaluation for a different
   997  	// case (if p were nil, then the timing of the fault would
   998  	// give this away).
   999  	case ir.OSELECT:
  1000  		n := n.(*ir.SelectStmt)
  1001  		t := o.markTemp()
  1002  		for _, ncas := range n.Cases {
  1003  			r := ncas.Comm
  1004  			ir.SetPos(ncas)
  1005  
  1006  			// Append any new body prologue to ninit.
  1007  			// The next loop will insert ninit into nbody.
  1008  			if len(ncas.Init()) != 0 {
  1009  				base.Fatalf("order select ninit")
  1010  			}
  1011  			if r == nil {
  1012  				continue
  1013  			}
  1014  			switch r.Op() {
  1015  			default:
  1016  				ir.Dump("select case", r)
  1017  				base.Fatalf("unknown op in select %v", r.Op())
  1018  
  1019  			case ir.OSELRECV2:
  1020  				// case x, ok = <-c
  1021  				r := r.(*ir.AssignListStmt)
  1022  				recv := r.Rhs[0].(*ir.UnaryExpr)
  1023  				recv.X = o.expr(recv.X, nil)
  1024  				if !ir.IsAutoTmp(recv.X) {
  1025  					recv.X = o.copyExpr(recv.X)
  1026  				}
  1027  				init := ir.TakeInit(r)
  1028  
  1029  				colas := r.Def
  1030  				do := func(i int, t *types.Type) {
  1031  					n := r.Lhs[i]
  1032  					if ir.IsBlank(n) {
  1033  						return
  1034  					}
  1035  					// If this is case x := <-ch or case x, y := <-ch, the case has
  1036  					// the ODCL nodes to declare x and y. We want to delay that
  1037  					// declaration (and possible allocation) until inside the case body.
  1038  					// Delete the ODCL nodes here and recreate them inside the body below.
  1039  					if colas {
  1040  						if len(init) > 0 && init[0].Op() == ir.ODCL && init[0].(*ir.Decl).X == n {
  1041  							init = init[1:]
  1042  
  1043  							// iimport may have added a default initialization assignment,
  1044  							// due to how it handles ODCL statements.
  1045  							if len(init) > 0 && init[0].Op() == ir.OAS && init[0].(*ir.AssignStmt).X == n {
  1046  								init = init[1:]
  1047  							}
  1048  						}
  1049  						dcl := typecheck.Stmt(ir.NewDecl(base.Pos, ir.ODCL, n.(*ir.Name)))
  1050  						ncas.PtrInit().Append(dcl)
  1051  					}
  1052  					tmp := o.newTemp(t, t.HasPointers())
  1053  					as := typecheck.Stmt(ir.NewAssignStmt(base.Pos, n, typecheck.Conv(tmp, n.Type())))
  1054  					ncas.PtrInit().Append(as)
  1055  					r.Lhs[i] = tmp
  1056  				}
  1057  				do(0, recv.X.Type().Elem())
  1058  				do(1, types.Types[types.TBOOL])
  1059  				if len(init) != 0 {
  1060  					ir.DumpList("ninit", init)
  1061  					base.Fatalf("ninit on select recv")
  1062  				}
  1063  				orderBlock(ncas.PtrInit(), o.free)
  1064  
  1065  			case ir.OSEND:
  1066  				r := r.(*ir.SendStmt)
  1067  				if len(r.Init()) != 0 {
  1068  					ir.DumpList("ninit", r.Init())
  1069  					base.Fatalf("ninit on select send")
  1070  				}
  1071  
  1072  				// case c <- x
  1073  				// r->left is c, r->right is x, both are always evaluated.
  1074  				r.Chan = o.expr(r.Chan, nil)
  1075  
  1076  				if !ir.IsAutoTmp(r.Chan) {
  1077  					r.Chan = o.copyExpr(r.Chan)
  1078  				}
  1079  				r.Value = o.expr(r.Value, nil)
  1080  				if !ir.IsAutoTmp(r.Value) {
  1081  					r.Value = o.copyExpr(r.Value)
  1082  				}
  1083  			}
  1084  		}
  1085  		// Now that we have accumulated all the temporaries, clean them.
  1086  		// Also insert any ninit queued during the previous loop.
  1087  		// (The temporary cleaning must follow that ninit work.)
  1088  		for _, cas := range n.Cases {
  1089  			orderBlock(&cas.Body, o.free)
  1090  
  1091  			// TODO(mdempsky): Is this actually necessary?
  1092  			// walkSelect appears to walk Ninit.
  1093  			cas.Body.Prepend(ir.TakeInit(cas)...)
  1094  		}
  1095  
  1096  		o.out = append(o.out, n)
  1097  		o.popTemp(t)
  1098  
  1099  	// Special: value being sent is passed as a pointer; make it addressable.
  1100  	case ir.OSEND:
  1101  		n := n.(*ir.SendStmt)
  1102  		t := o.markTemp()
  1103  		n.Chan = o.expr(n.Chan, nil)
  1104  		n.Value = o.expr(n.Value, nil)
  1105  		if base.Flag.Cfg.Instrumenting {
  1106  			// Force copying to the stack so that (chan T)(nil) <- x
  1107  			// is still instrumented as a read of x.
  1108  			n.Value = o.copyExpr(n.Value)
  1109  		} else {
  1110  			n.Value = o.addrTemp(n.Value)
  1111  		}
  1112  		o.out = append(o.out, n)
  1113  		o.popTemp(t)
  1114  
  1115  	// TODO(rsc): Clean temporaries more aggressively.
  1116  	// Note that because walkSwitch will rewrite some of the
  1117  	// switch into a binary search, this is not as easy as it looks.
  1118  	// (If we ran that code here we could invoke order.stmt on
  1119  	// the if-else chain instead.)
  1120  	// For now just clean all the temporaries at the end.
  1121  	// In practice that's fine.
  1122  	case ir.OSWITCH:
  1123  		n := n.(*ir.SwitchStmt)
  1124  		if base.Debug.Libfuzzer != 0 && !hasDefaultCase(n) {
  1125  			// Add empty "default:" case for instrumentation.
  1126  			n.Cases = append(n.Cases, ir.NewCaseStmt(base.Pos, nil, nil))
  1127  		}
  1128  
  1129  		t := o.markTemp()
  1130  		n.Tag = o.expr(n.Tag, nil)
  1131  		for _, ncas := range n.Cases {
  1132  			o.exprListInPlace(ncas.List)
  1133  			orderBlock(&ncas.Body, o.free)
  1134  		}
  1135  
  1136  		o.out = append(o.out, n)
  1137  		o.popTemp(t)
  1138  	}
  1139  
  1140  	base.Pos = lno
  1141  }
  1142  
  1143  func hasDefaultCase(n *ir.SwitchStmt) bool {
  1144  	for _, ncas := range n.Cases {
  1145  		if len(ncas.List) == 0 {
  1146  			return true
  1147  		}
  1148  	}
  1149  	return false
  1150  }
  1151  
  1152  // exprList orders the expression list l into o.
  1153  func (o *orderState) exprList(l ir.Nodes) {
  1154  	s := l
  1155  	for i := range s {
  1156  		s[i] = o.expr(s[i], nil)
  1157  	}
  1158  }
  1159  
  1160  // exprListInPlace orders the expression list l but saves
  1161  // the side effects on the individual expression ninit lists.
  1162  func (o *orderState) exprListInPlace(l ir.Nodes) {
  1163  	s := l
  1164  	for i := range s {
  1165  		s[i] = o.exprInPlace(s[i])
  1166  	}
  1167  }
  1168  
  1169  func (o *orderState) exprNoLHS(n ir.Node) ir.Node {
  1170  	return o.expr(n, nil)
  1171  }
  1172  
  1173  // expr orders a single expression, appending side
  1174  // effects to o.out as needed.
  1175  // If this is part of an assignment lhs = *np, lhs is given.
  1176  // Otherwise lhs == nil. (When lhs != nil it may be possible
  1177  // to avoid copying the result of the expression to a temporary.)
  1178  // The result of expr MUST be assigned back to n, e.g.
  1179  //
  1180  //	n.Left = o.expr(n.Left, lhs)
  1181  func (o *orderState) expr(n, lhs ir.Node) ir.Node {
  1182  	if n == nil {
  1183  		return n
  1184  	}
  1185  	lno := ir.SetPos(n)
  1186  	n = o.expr1(n, lhs)
  1187  	base.Pos = lno
  1188  	return n
  1189  }
  1190  
  1191  func (o *orderState) expr1(n, lhs ir.Node) ir.Node {
  1192  	o.init(n)
  1193  
  1194  	switch n.Op() {
  1195  	default:
  1196  		if o.edit == nil {
  1197  			o.edit = o.exprNoLHS // create closure once
  1198  		}
  1199  		ir.EditChildren(n, o.edit)
  1200  		return n
  1201  
  1202  	// Addition of strings turns into a function call.
  1203  	// Allocate a temporary to hold the strings.
  1204  	// Fewer than 5 strings use direct runtime helpers.
  1205  	case ir.OADDSTR:
  1206  		n := n.(*ir.AddStringExpr)
  1207  		o.exprList(n.List)
  1208  
  1209  		if len(n.List) > 5 {
  1210  			t := types.NewArray(types.Types[types.TSTRING], int64(len(n.List)))
  1211  			n.Prealloc = o.newTemp(t, false)
  1212  		}
  1213  
  1214  		// Mark string(byteSlice) arguments to reuse byteSlice backing
  1215  		// buffer during conversion. String concatenation does not
  1216  		// memorize the strings for later use, so it is safe.
  1217  		// However, we can do it only if there is at least one non-empty string literal.
  1218  		// Otherwise if all other arguments are empty strings,
  1219  		// concatstrings will return the reference to the temp string
  1220  		// to the caller.
  1221  		hasbyte := false
  1222  
  1223  		haslit := false
  1224  		for _, n1 := range n.List {
  1225  			hasbyte = hasbyte || n1.Op() == ir.OBYTES2STR
  1226  			haslit = haslit || n1.Op() == ir.OLITERAL && len(ir.StringVal(n1)) != 0
  1227  		}
  1228  
  1229  		if haslit && hasbyte {
  1230  			for _, n2 := range n.List {
  1231  				if n2.Op() == ir.OBYTES2STR {
  1232  					n2 := n2.(*ir.ConvExpr)
  1233  					n2.SetOp(ir.OBYTES2STRTMP)
  1234  				}
  1235  			}
  1236  		}
  1237  		return n
  1238  
  1239  	case ir.OINDEXMAP:
  1240  		n := n.(*ir.IndexExpr)
  1241  		n.X = o.expr(n.X, nil)
  1242  		n.Index = o.expr(n.Index, nil)
  1243  		needCopy := false
  1244  
  1245  		if !n.Assigned {
  1246  			// Enforce that any []byte slices we are not copying
  1247  			// can not be changed before the map index by forcing
  1248  			// the map index to happen immediately following the
  1249  			// conversions. See copyExpr a few lines below.
  1250  			needCopy = mapKeyReplaceStrConv(n.Index)
  1251  
  1252  			if base.Flag.Cfg.Instrumenting {
  1253  				// Race detector needs the copy.
  1254  				needCopy = true
  1255  			}
  1256  		}
  1257  
  1258  		// key may need to be addressable
  1259  		n.Index = o.mapKeyTemp(n.Pos(), n.X.Type(), n.Index)
  1260  		if needCopy {
  1261  			return o.copyExpr(n)
  1262  		}
  1263  		return n
  1264  
  1265  	// concrete type (not interface) argument might need an addressable
  1266  	// temporary to pass to the runtime conversion routine.
  1267  	case ir.OCONVIFACE:
  1268  		n := n.(*ir.ConvExpr)
  1269  		n.X = o.expr(n.X, nil)
  1270  		if n.X.Type().IsInterface() {
  1271  			return n
  1272  		}
  1273  		if _, _, needsaddr := dataWordFuncName(n.X.Type()); needsaddr || isStaticCompositeLiteral(n.X) {
  1274  			// Need a temp if we need to pass the address to the conversion function.
  1275  			// We also process static composite literal node here, making a named static global
  1276  			// whose address we can put directly in an interface (see OCONVIFACE case in walk).
  1277  			n.X = o.addrTemp(n.X)
  1278  		}
  1279  		return n
  1280  
  1281  	case ir.OCONVNOP:
  1282  		n := n.(*ir.ConvExpr)
  1283  		if n.X.Op() == ir.OCALLMETH {
  1284  			base.FatalfAt(n.X.Pos(), "OCALLMETH missed by typecheck")
  1285  		}
  1286  		if n.Type().IsKind(types.TUNSAFEPTR) && n.X.Type().IsKind(types.TUINTPTR) && (n.X.Op() == ir.OCALLFUNC || n.X.Op() == ir.OCALLINTER) {
  1287  			call := n.X.(*ir.CallExpr)
  1288  			// When reordering unsafe.Pointer(f()) into a separate
  1289  			// statement, the conversion and function call must stay
  1290  			// together. See golang.org/issue/15329.
  1291  			o.init(call)
  1292  			o.call(call)
  1293  			if lhs == nil || lhs.Op() != ir.ONAME || base.Flag.Cfg.Instrumenting {
  1294  				return o.copyExpr(n)
  1295  			}
  1296  		} else {
  1297  			n.X = o.expr(n.X, nil)
  1298  		}
  1299  		return n
  1300  
  1301  	case ir.OANDAND, ir.OOROR:
  1302  		// ... = LHS && RHS
  1303  		//
  1304  		// var r bool
  1305  		// r = LHS
  1306  		// if r {       // or !r, for OROR
  1307  		//     r = RHS
  1308  		// }
  1309  		// ... = r
  1310  
  1311  		n := n.(*ir.LogicalExpr)
  1312  		r := o.newTemp(n.Type(), false)
  1313  
  1314  		// Evaluate left-hand side.
  1315  		lhs := o.expr(n.X, nil)
  1316  		o.out = append(o.out, typecheck.Stmt(ir.NewAssignStmt(base.Pos, r, lhs)))
  1317  
  1318  		// Evaluate right-hand side, save generated code.
  1319  		saveout := o.out
  1320  		o.out = nil
  1321  		t := o.markTemp()
  1322  		o.edge()
  1323  		rhs := o.expr(n.Y, nil)
  1324  		o.out = append(o.out, typecheck.Stmt(ir.NewAssignStmt(base.Pos, r, rhs)))
  1325  		o.popTemp(t)
  1326  		gen := o.out
  1327  		o.out = saveout
  1328  
  1329  		// If left-hand side doesn't cause a short-circuit, issue right-hand side.
  1330  		nif := ir.NewIfStmt(base.Pos, r, nil, nil)
  1331  		if n.Op() == ir.OANDAND {
  1332  			nif.Body = gen
  1333  		} else {
  1334  			nif.Else = gen
  1335  		}
  1336  		o.out = append(o.out, nif)
  1337  		return r
  1338  
  1339  	case ir.OCALLMETH:
  1340  		base.FatalfAt(n.Pos(), "OCALLMETH missed by typecheck")
  1341  		panic("unreachable")
  1342  
  1343  	case ir.OCALLFUNC,
  1344  		ir.OCALLINTER,
  1345  		ir.OCAP,
  1346  		ir.OCOMPLEX,
  1347  		ir.OCOPY,
  1348  		ir.OIMAG,
  1349  		ir.OLEN,
  1350  		ir.OMAKECHAN,
  1351  		ir.OMAKEMAP,
  1352  		ir.OMAKESLICE,
  1353  		ir.OMAKESLICECOPY,
  1354  		ir.OMAX,
  1355  		ir.OMIN,
  1356  		ir.ONEW,
  1357  		ir.OREAL,
  1358  		ir.ORECOVERFP,
  1359  		ir.OSTR2BYTES,
  1360  		ir.OSTR2BYTESTMP,
  1361  		ir.OSTR2RUNES:
  1362  
  1363  		if isRuneCount(n) {
  1364  			// len([]rune(s)) is rewritten to runtime.countrunes(s) later.
  1365  			conv := n.(*ir.UnaryExpr).X.(*ir.ConvExpr)
  1366  			conv.X = o.expr(conv.X, nil)
  1367  		} else {
  1368  			o.call(n)
  1369  		}
  1370  
  1371  		if lhs == nil || lhs.Op() != ir.ONAME || base.Flag.Cfg.Instrumenting {
  1372  			return o.copyExpr(n)
  1373  		}
  1374  		return n
  1375  
  1376  	case ir.OINLCALL:
  1377  		n := n.(*ir.InlinedCallExpr)
  1378  		o.stmtList(n.Body)
  1379  		return n.SingleResult()
  1380  
  1381  	case ir.OAPPEND:
  1382  		// Check for append(x, make([]T, y)...) .
  1383  		n := n.(*ir.CallExpr)
  1384  		if isAppendOfMake(n) {
  1385  			n.Args[0] = o.expr(n.Args[0], nil) // order x
  1386  			mk := n.Args[1].(*ir.MakeExpr)
  1387  			mk.Len = o.expr(mk.Len, nil) // order y
  1388  		} else {
  1389  			o.exprList(n.Args)
  1390  		}
  1391  
  1392  		if lhs == nil || lhs.Op() != ir.ONAME && !ir.SameSafeExpr(lhs, n.Args[0]) {
  1393  			return o.copyExpr(n)
  1394  		}
  1395  		return n
  1396  
  1397  	case ir.OSLICE, ir.OSLICEARR, ir.OSLICESTR, ir.OSLICE3, ir.OSLICE3ARR:
  1398  		n := n.(*ir.SliceExpr)
  1399  		n.X = o.expr(n.X, nil)
  1400  		n.Low = o.cheapExpr(o.expr(n.Low, nil))
  1401  		n.High = o.cheapExpr(o.expr(n.High, nil))
  1402  		n.Max = o.cheapExpr(o.expr(n.Max, nil))
  1403  		if lhs == nil || lhs.Op() != ir.ONAME && !ir.SameSafeExpr(lhs, n.X) {
  1404  			return o.copyExpr(n)
  1405  		}
  1406  		return n
  1407  
  1408  	case ir.OCLOSURE:
  1409  		n := n.(*ir.ClosureExpr)
  1410  		if n.Transient() && len(n.Func.ClosureVars) > 0 {
  1411  			n.Prealloc = o.newTemp(typecheck.ClosureType(n), false)
  1412  		}
  1413  		return n
  1414  
  1415  	case ir.OMETHVALUE:
  1416  		n := n.(*ir.SelectorExpr)
  1417  		n.X = o.expr(n.X, nil)
  1418  		if n.Transient() {
  1419  			t := typecheck.MethodValueType(n)
  1420  			n.Prealloc = o.newTemp(t, false)
  1421  		}
  1422  		return n
  1423  
  1424  	case ir.OSLICELIT:
  1425  		n := n.(*ir.CompLitExpr)
  1426  		o.exprList(n.List)
  1427  		if n.Transient() {
  1428  			t := types.NewArray(n.Type().Elem(), n.Len)
  1429  			n.Prealloc = o.newTemp(t, false)
  1430  		}
  1431  		return n
  1432  
  1433  	case ir.ODOTTYPE, ir.ODOTTYPE2:
  1434  		n := n.(*ir.TypeAssertExpr)
  1435  		n.X = o.expr(n.X, nil)
  1436  		if !types.IsDirectIface(n.Type()) || base.Flag.Cfg.Instrumenting {
  1437  			return o.copyExprClear(n)
  1438  		}
  1439  		return n
  1440  
  1441  	case ir.ORECV:
  1442  		n := n.(*ir.UnaryExpr)
  1443  		n.X = o.expr(n.X, nil)
  1444  		return o.copyExprClear(n)
  1445  
  1446  	case ir.OEQ, ir.ONE, ir.OLT, ir.OLE, ir.OGT, ir.OGE:
  1447  		n := n.(*ir.BinaryExpr)
  1448  		n.X = o.expr(n.X, nil)
  1449  		n.Y = o.expr(n.Y, nil)
  1450  
  1451  		t := n.X.Type()
  1452  		switch {
  1453  		case t.IsString():
  1454  			// Mark string(byteSlice) arguments to reuse byteSlice backing
  1455  			// buffer during conversion. String comparison does not
  1456  			// memorize the strings for later use, so it is safe.
  1457  			if n.X.Op() == ir.OBYTES2STR {
  1458  				n.X.(*ir.ConvExpr).SetOp(ir.OBYTES2STRTMP)
  1459  			}
  1460  			if n.Y.Op() == ir.OBYTES2STR {
  1461  				n.Y.(*ir.ConvExpr).SetOp(ir.OBYTES2STRTMP)
  1462  			}
  1463  
  1464  		case t.IsStruct() || t.IsArray():
  1465  			// for complex comparisons, we need both args to be
  1466  			// addressable so we can pass them to the runtime.
  1467  			n.X = o.addrTemp(n.X)
  1468  			n.Y = o.addrTemp(n.Y)
  1469  		}
  1470  		return n
  1471  
  1472  	case ir.OMAPLIT:
  1473  		// Order map by converting:
  1474  		//   map[int]int{
  1475  		//     a(): b(),
  1476  		//     c(): d(),
  1477  		//     e(): f(),
  1478  		//   }
  1479  		// to
  1480  		//   m := map[int]int{}
  1481  		//   m[a()] = b()
  1482  		//   m[c()] = d()
  1483  		//   m[e()] = f()
  1484  		// Then order the result.
  1485  		// Without this special case, order would otherwise compute all
  1486  		// the keys and values before storing any of them to the map.
  1487  		// See issue 26552.
  1488  		n := n.(*ir.CompLitExpr)
  1489  		entries := n.List
  1490  		statics := entries[:0]
  1491  		var dynamics []*ir.KeyExpr
  1492  		for _, r := range entries {
  1493  			r := r.(*ir.KeyExpr)
  1494  
  1495  			if !isStaticCompositeLiteral(r.Key) || !isStaticCompositeLiteral(r.Value) {
  1496  				dynamics = append(dynamics, r)
  1497  				continue
  1498  			}
  1499  
  1500  			// Recursively ordering some static entries can change them to dynamic;
  1501  			// e.g., OCONVIFACE nodes. See #31777.
  1502  			r = o.expr(r, nil).(*ir.KeyExpr)
  1503  			if !isStaticCompositeLiteral(r.Key) || !isStaticCompositeLiteral(r.Value) {
  1504  				dynamics = append(dynamics, r)
  1505  				continue
  1506  			}
  1507  
  1508  			statics = append(statics, r)
  1509  		}
  1510  		n.List = statics
  1511  
  1512  		if len(dynamics) == 0 {
  1513  			return n
  1514  		}
  1515  
  1516  		// Emit the creation of the map (with all its static entries).
  1517  		m := o.newTemp(n.Type(), false)
  1518  		as := ir.NewAssignStmt(base.Pos, m, n)
  1519  		typecheck.Stmt(as)
  1520  		o.stmt(as)
  1521  
  1522  		// Emit eval+insert of dynamic entries, one at a time.
  1523  		for _, r := range dynamics {
  1524  			lhs := typecheck.AssignExpr(ir.NewIndexExpr(base.Pos, m, r.Key)).(*ir.IndexExpr)
  1525  			base.AssertfAt(lhs.Op() == ir.OINDEXMAP, lhs.Pos(), "want OINDEXMAP, have %+v", lhs)
  1526  			lhs.RType = n.RType
  1527  
  1528  			as := ir.NewAssignStmt(base.Pos, lhs, r.Value)
  1529  			typecheck.Stmt(as)
  1530  			o.stmt(as)
  1531  		}
  1532  
  1533  		// Remember that we issued these assignments so we can include that count
  1534  		// in the map alloc hint.
  1535  		// We're assuming here that all the keys in the map literal are distinct.
  1536  		// If any are equal, this will be an overcount. Probably not worth accounting
  1537  		// for that, as equal keys in map literals are rare, and at worst we waste
  1538  		// a bit of space.
  1539  		n.Len += int64(len(dynamics))
  1540  
  1541  		return m
  1542  	}
  1543  
  1544  	// No return - type-assertions above. Each case must return for itself.
  1545  }
  1546  
  1547  // as2func orders OAS2FUNC nodes. It creates temporaries to ensure left-to-right assignment.
  1548  // The caller should order the right-hand side of the assignment before calling order.as2func.
  1549  // It rewrites,
  1550  //
  1551  //	a, b, a = ...
  1552  //
  1553  // as
  1554  //
  1555  //	tmp1, tmp2, tmp3 = ...
  1556  //	a, b, a = tmp1, tmp2, tmp3
  1557  //
  1558  // This is necessary to ensure left to right assignment order.
  1559  func (o *orderState) as2func(n *ir.AssignListStmt) {
  1560  	results := n.Rhs[0].Type()
  1561  	as := ir.NewAssignListStmt(n.Pos(), ir.OAS2, nil, nil)
  1562  	for i, nl := range n.Lhs {
  1563  		if !ir.IsBlank(nl) {
  1564  			typ := results.Field(i).Type
  1565  			tmp := o.newTemp(typ, typ.HasPointers())
  1566  			n.Lhs[i] = tmp
  1567  			as.Lhs = append(as.Lhs, nl)
  1568  			as.Rhs = append(as.Rhs, tmp)
  1569  		}
  1570  	}
  1571  
  1572  	o.out = append(o.out, n)
  1573  	o.stmt(typecheck.Stmt(as))
  1574  }
  1575  
  1576  // as2ok orders OAS2XXX with ok.
  1577  // Just like as2func, this also adds temporaries to ensure left-to-right assignment.
  1578  func (o *orderState) as2ok(n *ir.AssignListStmt) {
  1579  	as := ir.NewAssignListStmt(n.Pos(), ir.OAS2, nil, nil)
  1580  
  1581  	do := func(i int, typ *types.Type) {
  1582  		if nl := n.Lhs[i]; !ir.IsBlank(nl) {
  1583  			var tmp ir.Node = o.newTemp(typ, typ.HasPointers())
  1584  			n.Lhs[i] = tmp
  1585  			as.Lhs = append(as.Lhs, nl)
  1586  			if i == 1 {
  1587  				// The "ok" result is an untyped boolean according to the Go
  1588  				// spec. We need to explicitly convert it to the LHS type in
  1589  				// case the latter is a defined boolean type (#8475).
  1590  				tmp = typecheck.Conv(tmp, nl.Type())
  1591  			}
  1592  			as.Rhs = append(as.Rhs, tmp)
  1593  		}
  1594  	}
  1595  
  1596  	do(0, n.Rhs[0].Type())
  1597  	do(1, types.Types[types.TBOOL])
  1598  
  1599  	o.out = append(o.out, n)
  1600  	o.stmt(typecheck.Stmt(as))
  1601  }
  1602  

View as plain text