Source file src/cmd/compile/internal/staticinit/sched.go

     1  // Copyright 2009 The Go Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  package staticinit
     6  
     7  import (
     8  	"fmt"
     9  	"go/constant"
    10  	"go/token"
    11  	"os"
    12  	"slices"
    13  	"strings"
    14  
    15  	"cmd/compile/internal/base"
    16  	"cmd/compile/internal/ir"
    17  	"cmd/compile/internal/reflectdata"
    18  	"cmd/compile/internal/staticdata"
    19  	"cmd/compile/internal/typecheck"
    20  	"cmd/compile/internal/types"
    21  	"cmd/internal/obj"
    22  	"cmd/internal/objabi"
    23  	"cmd/internal/src"
    24  )
    25  
    26  type Entry struct {
    27  	Xoffset int64   // struct, array only
    28  	Expr    ir.Node // bytes of run-time computed expressions
    29  }
    30  
    31  type Plan struct {
    32  	E []Entry
    33  }
    34  
    35  // An Schedule is used to decompose assignment statements into
    36  // static and dynamic initialization parts. Static initializations are
    37  // handled by populating variables' linker symbol data, while dynamic
    38  // initializations are accumulated to be executed in order.
    39  type Schedule struct {
    40  	// Out is the ordered list of dynamic initialization
    41  	// statements.
    42  	Out []ir.Node
    43  
    44  	Plans map[ir.Node]*Plan
    45  	Temps map[ir.Node]*ir.Name
    46  
    47  	// seenMutation tracks whether we've seen an initialization
    48  	// expression that may have modified other package-scope variables
    49  	// within this package.
    50  	seenMutation bool
    51  }
    52  
    53  func (s *Schedule) append(n ir.Node) {
    54  	s.Out = append(s.Out, n)
    55  }
    56  
    57  // StaticInit adds an initialization statement n to the schedule.
    58  func (s *Schedule) StaticInit(n ir.Node) {
    59  	if !s.tryStaticInit(n) {
    60  		if base.Flag.Percent != 0 {
    61  			ir.Dump("StaticInit failed", n)
    62  		}
    63  		s.append(n)
    64  	}
    65  }
    66  
    67  // varToMapInit holds book-keeping state for global map initialization;
    68  // it records the init function created by the compiler to host the
    69  // initialization code for the map in question.
    70  var varToMapInit map[*ir.Name]*ir.Func
    71  
    72  // MapInitToVar is the inverse of VarToMapInit; it maintains a mapping
    73  // from a compiler-generated init function to the map the function is
    74  // initializing.
    75  var MapInitToVar map[*ir.Func]*ir.Name
    76  
    77  // recordFuncForVar establishes a mapping between global map var "v" and
    78  // outlined init function "fn" (and vice versa); so that we can use
    79  // the mappings later on to update relocations.
    80  func recordFuncForVar(v *ir.Name, fn *ir.Func) {
    81  	if varToMapInit == nil {
    82  		varToMapInit = make(map[*ir.Name]*ir.Func)
    83  		MapInitToVar = make(map[*ir.Func]*ir.Name)
    84  	}
    85  	varToMapInit[v] = fn
    86  	MapInitToVar[fn] = v
    87  }
    88  
    89  // allBlank reports whether every node in exprs is blank.
    90  func allBlank(exprs []ir.Node) bool {
    91  	for _, expr := range exprs {
    92  		if !ir.IsBlank(expr) {
    93  			return false
    94  		}
    95  	}
    96  	return true
    97  }
    98  
    99  // tryStaticInit attempts to statically execute an initialization
   100  // statement and reports whether it succeeded.
   101  func (s *Schedule) tryStaticInit(n ir.Node) bool {
   102  	var lhs []ir.Node
   103  	var rhs ir.Node
   104  
   105  	switch n.Op() {
   106  	default:
   107  		base.FatalfAt(n.Pos(), "unexpected initialization statement: %v", n)
   108  	case ir.OAS:
   109  		n := n.(*ir.AssignStmt)
   110  		lhs, rhs = []ir.Node{n.X}, n.Y
   111  	case ir.OAS2:
   112  		// Usually OAS2 has been rewritten to separate OASes by types2.
   113  		// What's left here is "var a, b = tmp1, tmp2" as a result from rewriting
   114  		// "var a, b = f()" that needs type conversion, which is not static.
   115  		n := n.(*ir.AssignListStmt)
   116  		for _, rhs := range n.Rhs {
   117  			for rhs.Op() == ir.OCONVNOP {
   118  				rhs = rhs.(*ir.ConvExpr).X
   119  			}
   120  			if name, ok := rhs.(*ir.Name); !ok || !name.AutoTemp() {
   121  				base.FatalfAt(n.Pos(), "unexpected rhs, not an autotmp: %+v", rhs)
   122  			}
   123  		}
   124  		return false
   125  	case ir.OAS2DOTTYPE, ir.OAS2FUNC, ir.OAS2MAPR, ir.OAS2RECV:
   126  		n := n.(*ir.AssignListStmt)
   127  		if len(n.Lhs) < 2 || len(n.Rhs) != 1 {
   128  			base.FatalfAt(n.Pos(), "unexpected shape for %v: %v", n.Op(), n)
   129  		}
   130  		lhs, rhs = n.Lhs, n.Rhs[0]
   131  	case ir.OCALLFUNC:
   132  		return false // outlined map init call; no mutations
   133  	}
   134  
   135  	if !s.seenMutation {
   136  		s.seenMutation = mayModifyPkgVar(rhs)
   137  	}
   138  
   139  	if allBlank(lhs) && !AnySideEffects(rhs) {
   140  		return true // discard
   141  	}
   142  
   143  	// Only worry about simple "l = r" assignments. The OAS2*
   144  	// assignments mostly necessitate dynamic execution anyway.
   145  	if len(lhs) > 1 {
   146  		return false
   147  	}
   148  
   149  	lno := ir.SetPos(n)
   150  	defer func() { base.Pos = lno }()
   151  
   152  	nam := lhs[0].(*ir.Name)
   153  	return s.StaticAssign(nam, 0, rhs, nam.Type())
   154  }
   155  
   156  // like staticassign but we are copying an already
   157  // initialized value r.
   158  func (s *Schedule) staticcopy(l *ir.Name, loff int64, rn *ir.Name, typ *types.Type) bool {
   159  	if rn.Class == ir.PFUNC {
   160  		// TODO if roff != 0 { panic }
   161  		staticdata.InitAddr(l, loff, staticdata.FuncLinksym(rn))
   162  		return true
   163  	}
   164  	if rn.Class != ir.PEXTERN || rn.Sym().Pkg != types.LocalPkg {
   165  		return false
   166  	}
   167  	if rn.Defn == nil {
   168  		// No explicit initialization value. Probably zeroed but perhaps
   169  		// supplied externally and of unknown value.
   170  		return false
   171  	}
   172  	if rn.Defn.Op() != ir.OAS {
   173  		return false
   174  	}
   175  	if rn.Type().IsString() { // perhaps overwritten by cmd/link -X (#34675)
   176  		return false
   177  	}
   178  	if rn.Embed != nil {
   179  		return false
   180  	}
   181  	orig := rn
   182  	r := rn.Defn.(*ir.AssignStmt).Y
   183  	if r == nil {
   184  		// types2.InitOrder doesn't include default initializers.
   185  		base.Fatalf("unexpected initializer: %v", rn.Defn)
   186  	}
   187  
   188  	// Variable may have been reassigned by a user-written function call
   189  	// that was invoked to initialize another global variable (#51913).
   190  	if s.seenMutation {
   191  		if base.Debug.StaticCopy != 0 {
   192  			base.WarnfAt(l.Pos(), "skipping static copy of %v+%v with %v", l, loff, r)
   193  		}
   194  		return false
   195  	}
   196  
   197  	for r.Op() == ir.OCONVNOP && !types.Identical(r.Type(), typ) {
   198  		r = r.(*ir.ConvExpr).X
   199  	}
   200  
   201  	switch r.Op() {
   202  	case ir.OMETHEXPR:
   203  		r = r.(*ir.SelectorExpr).FuncName()
   204  		fallthrough
   205  	case ir.ONAME:
   206  		r := r.(*ir.Name)
   207  		if s.staticcopy(l, loff, r, typ) {
   208  			return true
   209  		}
   210  		// We may have skipped past one or more OCONVNOPs, so
   211  		// use conv to ensure r is assignable to l (#13263).
   212  		dst := ir.Node(l)
   213  		if loff != 0 || !types.Identical(typ, l.Type()) {
   214  			dst = ir.NewNameOffsetExpr(base.Pos, l, loff, typ)
   215  		}
   216  		s.append(ir.NewAssignStmt(base.Pos, dst, typecheck.Conv(r, typ)))
   217  		return true
   218  
   219  	case ir.ONIL:
   220  		return true
   221  
   222  	case ir.OLITERAL:
   223  		if ir.IsZero(r) {
   224  			return true
   225  		}
   226  		staticdata.InitConst(l, loff, r, int(typ.Size()))
   227  		return true
   228  
   229  	case ir.OADDR:
   230  		r := r.(*ir.AddrExpr)
   231  		if a, ok := r.X.(*ir.Name); ok && a.Op() == ir.ONAME {
   232  			if a.Class != ir.PEXTERN {
   233  				return false // e.g. local from new(expr)
   234  			}
   235  			staticdata.InitAddr(l, loff, staticdata.GlobalLinksym(a))
   236  			return true
   237  		}
   238  
   239  	case ir.OPTRLIT:
   240  		r := r.(*ir.AddrExpr)
   241  		switch r.X.Op() {
   242  		case ir.OARRAYLIT, ir.OSLICELIT, ir.OSTRUCTLIT, ir.OMAPLIT:
   243  			// copy pointer
   244  			staticdata.InitAddr(l, loff, staticdata.GlobalLinksym(s.Temps[r]))
   245  			return true
   246  		}
   247  
   248  	case ir.OSLICELIT:
   249  		r := r.(*ir.CompLitExpr)
   250  		// copy slice
   251  		staticdata.InitSlice(l, loff, staticdata.GlobalLinksym(s.Temps[r]), r.Len)
   252  		return true
   253  
   254  	case ir.OARRAYLIT, ir.OSTRUCTLIT:
   255  		r := r.(*ir.CompLitExpr)
   256  		p := s.Plans[r]
   257  		for i := range p.E {
   258  			e := &p.E[i]
   259  			typ := e.Expr.Type()
   260  			if e.Expr.Op() == ir.OLITERAL || e.Expr.Op() == ir.ONIL {
   261  				staticdata.InitConst(l, loff+e.Xoffset, e.Expr, int(typ.Size()))
   262  				continue
   263  			}
   264  			x := e.Expr
   265  			if x.Op() == ir.OMETHEXPR {
   266  				x = x.(*ir.SelectorExpr).FuncName()
   267  			}
   268  			if x.Op() == ir.ONAME && s.staticcopy(l, loff+e.Xoffset, x.(*ir.Name), typ) {
   269  				continue
   270  			}
   271  			// Requires computation, but we're
   272  			// copying someone else's computation.
   273  			ll := ir.NewNameOffsetExpr(base.Pos, l, loff+e.Xoffset, typ)
   274  			rr := ir.NewNameOffsetExpr(base.Pos, orig, e.Xoffset, typ)
   275  			ir.SetPos(rr)
   276  			s.append(ir.NewAssignStmt(base.Pos, ll, rr))
   277  		}
   278  
   279  		return true
   280  	}
   281  
   282  	return false
   283  }
   284  
   285  func (s *Schedule) StaticAssign(l *ir.Name, loff int64, r ir.Node, typ *types.Type) bool {
   286  	// If we're building for FIPS, avoid global data relocations
   287  	// by treating all address-of operations as non-static.
   288  	// See ../../../internal/obj/fips.go for more context.
   289  	// We do this even in non-PIE mode to avoid generating
   290  	// static temporaries that would go into SRODATAFIPS
   291  	// but need relocations. We can't handle that in the verification.
   292  	disableGlobalAddrs := base.Ctxt.IsFIPS()
   293  
   294  	if r == nil {
   295  		// No explicit initialization value. Either zero or supplied
   296  		// externally.
   297  		return true
   298  	}
   299  	for r.Op() == ir.OCONVNOP {
   300  		r = r.(*ir.ConvExpr).X
   301  	}
   302  
   303  	assign := func(pos src.XPos, a *ir.Name, aoff int64, v ir.Node) {
   304  		if s.StaticAssign(a, aoff, v, v.Type()) {
   305  			return
   306  		}
   307  		var lhs ir.Node
   308  		if ir.IsBlank(a) {
   309  			// Don't use NameOffsetExpr with blank (#43677).
   310  			lhs = ir.BlankNode
   311  		} else {
   312  			lhs = ir.NewNameOffsetExpr(pos, a, aoff, v.Type())
   313  		}
   314  		s.append(ir.NewAssignStmt(pos, lhs, v))
   315  	}
   316  
   317  	switch r.Op() {
   318  	case ir.ONAME:
   319  		if disableGlobalAddrs {
   320  			return false
   321  		}
   322  		r := r.(*ir.Name)
   323  		return s.staticcopy(l, loff, r, typ)
   324  
   325  	case ir.OMETHEXPR:
   326  		if disableGlobalAddrs {
   327  			return false
   328  		}
   329  		r := r.(*ir.SelectorExpr)
   330  		return s.staticcopy(l, loff, r.FuncName(), typ)
   331  
   332  	case ir.ONIL:
   333  		return true
   334  
   335  	case ir.OLITERAL:
   336  		if ir.IsZero(r) {
   337  			return true
   338  		}
   339  		if disableGlobalAddrs && r.Type().IsString() {
   340  			return false
   341  		}
   342  		staticdata.InitConst(l, loff, r, int(typ.Size()))
   343  		return true
   344  
   345  	case ir.OADDR:
   346  		if disableGlobalAddrs {
   347  			return false
   348  		}
   349  		r := r.(*ir.AddrExpr)
   350  		if name, offset, ok := StaticLoc(r.X); ok && name.Class == ir.PEXTERN {
   351  			staticdata.InitAddrOffset(l, loff, name.Linksym(), offset)
   352  			return true
   353  		}
   354  		fallthrough
   355  
   356  	case ir.OPTRLIT:
   357  		if disableGlobalAddrs {
   358  			return false
   359  		}
   360  		r := r.(*ir.AddrExpr)
   361  		switch r.X.Op() {
   362  		case ir.OARRAYLIT, ir.OSLICELIT, ir.OMAPLIT, ir.OSTRUCTLIT:
   363  			// Init pointer.
   364  			a := StaticName(r.X.Type())
   365  
   366  			s.Temps[r] = a
   367  			staticdata.InitAddr(l, loff, a.Linksym())
   368  
   369  			// Init underlying literal.
   370  			assign(base.Pos, a, 0, r.X)
   371  			return true
   372  		}
   373  		//dump("not static ptrlit", r);
   374  
   375  	case ir.OSTR2BYTES:
   376  		if disableGlobalAddrs {
   377  			return false
   378  		}
   379  		r := r.(*ir.ConvExpr)
   380  		if l.Class == ir.PEXTERN && r.X.Op() == ir.OLITERAL {
   381  			sval := ir.StringVal(r.X)
   382  			staticdata.InitSliceBytes(l, loff, sval)
   383  			return true
   384  		}
   385  
   386  	case ir.OSLICELIT:
   387  		if disableGlobalAddrs {
   388  			return false
   389  		}
   390  		r := r.(*ir.CompLitExpr)
   391  		s.initplan(r)
   392  		// Init slice.
   393  		ta := types.NewArray(r.Type().Elem(), r.Len)
   394  		ta.SetNoalg(true)
   395  		a := StaticName(ta)
   396  		s.Temps[r] = a
   397  		staticdata.InitSlice(l, loff, a.Linksym(), r.Len)
   398  		// Fall through to init underlying array.
   399  		l = a
   400  		loff = 0
   401  		fallthrough
   402  
   403  	case ir.OARRAYLIT, ir.OSTRUCTLIT:
   404  		r := r.(*ir.CompLitExpr)
   405  		s.initplan(r)
   406  
   407  		p := s.Plans[r]
   408  		for i := range p.E {
   409  			e := &p.E[i]
   410  			if e.Expr.Op() == ir.OLITERAL && !disableGlobalAddrs || e.Expr.Op() == ir.ONIL {
   411  				staticdata.InitConst(l, loff+e.Xoffset, e.Expr, int(e.Expr.Type().Size()))
   412  				continue
   413  			}
   414  			ir.SetPos(e.Expr)
   415  			assign(base.Pos, l, loff+e.Xoffset, e.Expr)
   416  		}
   417  
   418  		return true
   419  
   420  	case ir.OMAPLIT:
   421  		break
   422  
   423  	case ir.OCLOSURE:
   424  		if disableGlobalAddrs {
   425  			return false
   426  		}
   427  		r := r.(*ir.ClosureExpr)
   428  		if !r.Func.IsClosure() {
   429  			if base.Debug.Closure > 0 {
   430  				base.WarnfAt(r.Pos(), "closure converted to global")
   431  			}
   432  			// Closures with no captured variables are globals,
   433  			// so the assignment can be done at link time.
   434  			// TODO if roff != 0 { panic }
   435  			staticdata.InitAddr(l, loff, staticdata.FuncLinksym(r.Func.Nname))
   436  			return true
   437  		}
   438  		ir.ClosureDebugRuntimeCheck(r)
   439  
   440  	case ir.OCONVIFACE:
   441  		// This logic is mirrored in isStaticCompositeLiteral.
   442  		// If you change something here, change it there, and vice versa.
   443  
   444  		if disableGlobalAddrs {
   445  			return false
   446  		}
   447  
   448  		// Determine the underlying concrete type and value we are converting from.
   449  		r := r.(*ir.ConvExpr)
   450  		val := ir.Node(r)
   451  		for val.Op() == ir.OCONVIFACE {
   452  			val = val.(*ir.ConvExpr).X
   453  		}
   454  
   455  		if val.Type().IsInterface() {
   456  			// val is an interface type.
   457  			// If val is nil, we can statically initialize l;
   458  			// both words are zero and so there no work to do, so report success.
   459  			// If val is non-nil, we have no concrete type to record,
   460  			// and we won't be able to statically initialize its value, so report failure.
   461  			return val.Op() == ir.ONIL
   462  		}
   463  
   464  		if val.Type().HasShape() {
   465  			// See comment in cmd/compile/internal/walk/convert.go:walkConvInterface
   466  			return false
   467  		}
   468  
   469  		reflectdata.MarkTypeUsedInInterface(val.Type(), l.Linksym())
   470  
   471  		var itab *ir.AddrExpr
   472  		if typ.IsEmptyInterface() {
   473  			itab = reflectdata.TypePtrAt(base.Pos, val.Type())
   474  		} else {
   475  			itab = reflectdata.ITabAddrAt(base.Pos, val.Type(), typ)
   476  		}
   477  
   478  		// Create a copy of l to modify while we emit data.
   479  
   480  		// Emit itab, advance offset.
   481  		staticdata.InitAddr(l, loff, itab.X.(*ir.LinksymOffsetExpr).Linksym)
   482  
   483  		// Emit data.
   484  		if types.IsDirectIface(val.Type()) {
   485  			if val.Op() == ir.ONIL {
   486  				// Nil is zero, nothing to do.
   487  				return true
   488  			}
   489  			// Copy val directly into n.
   490  			ir.SetPos(val)
   491  			assign(base.Pos, l, loff+int64(types.PtrSize), val)
   492  		} else {
   493  			// Construct temp to hold val, write pointer to temp into n.
   494  			a := StaticName(val.Type())
   495  			s.Temps[val] = a
   496  			assign(base.Pos, a, 0, val)
   497  			staticdata.InitAddr(l, loff+int64(types.PtrSize), a.Linksym())
   498  		}
   499  
   500  		return true
   501  
   502  	case ir.OINLCALL:
   503  		if disableGlobalAddrs {
   504  			return false
   505  		}
   506  		r := r.(*ir.InlinedCallExpr)
   507  		return s.staticAssignInlinedCall(l, loff, r, typ)
   508  	}
   509  
   510  	if base.Flag.Percent != 0 {
   511  		ir.Dump("not static", r)
   512  	}
   513  	return false
   514  }
   515  
   516  func (s *Schedule) initplan(n ir.Node) {
   517  	if s.Plans[n] != nil {
   518  		return
   519  	}
   520  	p := new(Plan)
   521  	s.Plans[n] = p
   522  	switch n.Op() {
   523  	default:
   524  		base.Fatalf("initplan")
   525  
   526  	case ir.OARRAYLIT, ir.OSLICELIT:
   527  		n := n.(*ir.CompLitExpr)
   528  		var k int64
   529  		for _, a := range n.List {
   530  			if a.Op() == ir.OKEY {
   531  				kv := a.(*ir.KeyExpr)
   532  				k = typecheck.IndexConst(kv.Key)
   533  				a = kv.Value
   534  			}
   535  			s.addvalue(p, k*n.Type().Elem().Size(), a)
   536  			k++
   537  		}
   538  
   539  	case ir.OSTRUCTLIT:
   540  		n := n.(*ir.CompLitExpr)
   541  		for _, a := range n.List {
   542  			if a.Op() != ir.OSTRUCTKEY {
   543  				base.Fatalf("initplan structlit")
   544  			}
   545  			a := a.(*ir.StructKeyExpr)
   546  			if a.Sym().IsBlank() {
   547  				continue
   548  			}
   549  			s.addvalue(p, a.Field.Offset, a.Value)
   550  		}
   551  
   552  	case ir.OMAPLIT:
   553  		n := n.(*ir.CompLitExpr)
   554  		for _, a := range n.List {
   555  			if a.Op() != ir.OKEY {
   556  				base.Fatalf("initplan maplit")
   557  			}
   558  			a := a.(*ir.KeyExpr)
   559  			s.addvalue(p, -1, a.Value)
   560  		}
   561  	}
   562  }
   563  
   564  func (s *Schedule) addvalue(p *Plan, xoffset int64, n ir.Node) {
   565  	// special case: zero can be dropped entirely
   566  	if ir.IsZero(n) {
   567  		return
   568  	}
   569  
   570  	// special case: inline struct and array (not slice) literals
   571  	if isvaluelit(n) {
   572  		s.initplan(n)
   573  		q := s.Plans[n]
   574  		for _, qe := range q.E {
   575  			// qe is a copy; we are not modifying entries in q.E
   576  			qe.Xoffset += xoffset
   577  			p.E = append(p.E, qe)
   578  		}
   579  		return
   580  	}
   581  
   582  	// add to plan
   583  	p.E = append(p.E, Entry{Xoffset: xoffset, Expr: n})
   584  }
   585  
   586  func (s *Schedule) staticAssignInlinedCall(l *ir.Name, loff int64, call *ir.InlinedCallExpr, typ *types.Type) bool {
   587  	if base.Debug.InlStaticInit == 0 {
   588  		return false
   589  	}
   590  
   591  	// Handle the special case of an inlined call of
   592  	// a function body with a single return statement,
   593  	// which turns into a single assignment plus a goto.
   594  	//
   595  	// For example code like this:
   596  	//
   597  	//	type T struct{ x int }
   598  	//	func F(x int) *T { return &T{x} }
   599  	//	var Global = F(400)
   600  	//
   601  	// turns into IR like this:
   602  	//
   603  	// 	INLCALL-init
   604  	// 	.   AS2-init
   605  	// 	.   .   DCL # x.go:18:13
   606  	// 	.   .   .   NAME-p.x Class:PAUTO Offset:0 InlFormal OnStack Used int tc(1) # x.go:14:9,x.go:18:13
   607  	// 	.   AS2 Def tc(1) # x.go:18:13
   608  	// 	.   AS2-Lhs
   609  	// 	.   .   NAME-p.x Class:PAUTO Offset:0 InlFormal OnStack Used int tc(1) # x.go:14:9,x.go:18:13
   610  	// 	.   AS2-Rhs
   611  	// 	.   .   LITERAL-400 int tc(1) # x.go:18:14
   612  	// 	.   INLMARK Index:1 # +x.go:18:13
   613  	// 	INLCALL PTR-*T tc(1) # x.go:18:13
   614  	// 	INLCALL-Body
   615  	// 	.   BLOCK tc(1) # x.go:18:13
   616  	// 	.   BLOCK-List
   617  	// 	.   .   DCL tc(1) # x.go:18:13
   618  	// 	.   .   .   NAME-p.~R0 Class:PAUTO Offset:0 OnStack Used PTR-*T tc(1) # x.go:18:13
   619  	// 	.   .   AS2 tc(1) # x.go:18:13
   620  	// 	.   .   AS2-Lhs
   621  	// 	.   .   .   NAME-p.~R0 Class:PAUTO Offset:0 OnStack Used PTR-*T tc(1) # x.go:18:13
   622  	// 	.   .   AS2-Rhs
   623  	// 	.   .   .   INLINED RETURN ARGUMENT HERE
   624  	// 	.   .   GOTO p..i1 tc(1) # x.go:18:13
   625  	// 	.   LABEL p..i1 # x.go:18:13
   626  	// 	INLCALL-ReturnVars
   627  	// 	.   NAME-p.~R0 Class:PAUTO Offset:0 OnStack Used PTR-*T tc(1) # x.go:18:13
   628  	//
   629  	// If the init values are side-effect-free and each either only
   630  	// appears once in the function body or is safely repeatable,
   631  	// then we inline the value expressions into the return argument
   632  	// and then call StaticAssign to handle that copy.
   633  	//
   634  	// This handles simple cases like
   635  	//
   636  	//	var myError = errors.New("mine")
   637  	//
   638  	// where errors.New is
   639  	//
   640  	//	func New(text string) error {
   641  	//		return &errorString{text}
   642  	//	}
   643  	//
   644  	// We could make things more sophisticated but this kind of initializer
   645  	// is the most important case for us to get right.
   646  
   647  	init := call.Init()
   648  	if len(init) != 2 || init[0].Op() != ir.OAS2 || init[1].Op() != ir.OINLMARK {
   649  		return false
   650  	}
   651  	as2init := init[0].(*ir.AssignListStmt)
   652  
   653  	if len(call.Body) != 2 || call.Body[0].Op() != ir.OBLOCK || call.Body[1].Op() != ir.OLABEL {
   654  		return false
   655  	}
   656  	label := call.Body[1].(*ir.LabelStmt).Label
   657  	block := call.Body[0].(*ir.BlockStmt)
   658  	list := block.List
   659  	if len(list) != 3 ||
   660  		list[0].Op() != ir.ODCL ||
   661  		list[1].Op() != ir.OAS2 ||
   662  		list[2].Op() != ir.OGOTO ||
   663  		list[2].(*ir.BranchStmt).Label != label {
   664  		return false
   665  	}
   666  	dcl := list[0].(*ir.Decl)
   667  	as2body := list[1].(*ir.AssignListStmt)
   668  	if len(as2body.Lhs) != 1 || as2body.Lhs[0] != dcl.X {
   669  		return false
   670  	}
   671  
   672  	// Can't remove the parameter variables if an address is taken.
   673  	for _, v := range as2init.Lhs {
   674  		if v.(*ir.Name).Addrtaken() {
   675  			return false
   676  		}
   677  	}
   678  	// Can't move the computation of the args if they have side effects.
   679  	for _, r := range as2init.Rhs {
   680  		if AnySideEffects(r) {
   681  			return false
   682  		}
   683  	}
   684  
   685  	// Can only substitute arg for param if param is used
   686  	// at most once or is repeatable.
   687  	count := make(map[*ir.Name]int)
   688  	for _, x := range as2init.Lhs {
   689  		count[x.(*ir.Name)] = 0
   690  	}
   691  
   692  	hasClosure := false
   693  	ir.Visit(as2body.Rhs[0], func(n ir.Node) {
   694  		if name, ok := n.(*ir.Name); ok {
   695  			if c, ok := count[name]; ok {
   696  				count[name] = c + 1
   697  			}
   698  		}
   699  		if clo, ok := n.(*ir.ClosureExpr); ok {
   700  			hasClosure = hasClosure || clo.Func.IsClosure()
   701  		}
   702  	})
   703  
   704  	// If there's a closure, it has captured the param,
   705  	// so we can't substitute arg for param.
   706  	if hasClosure {
   707  		return false
   708  	}
   709  
   710  	for name, c := range count {
   711  		if c > 1 {
   712  			// Check whether corresponding initializer can be repeated.
   713  			// Something like 1 can be; make(chan int) or &T{} cannot,
   714  			// because they need to evaluate to the same result in each use.
   715  			for i, n := range as2init.Lhs {
   716  				if n == name && !canRepeat(as2init.Rhs[i]) {
   717  					return false
   718  				}
   719  			}
   720  		}
   721  	}
   722  
   723  	// Possible static init.
   724  	// Build tree with args substituted for params and try it.
   725  	args := make(map[*ir.Name]ir.Node)
   726  	for i, v := range as2init.Lhs {
   727  		if ir.IsBlank(v) {
   728  			continue
   729  		}
   730  		args[v.(*ir.Name)] = as2init.Rhs[i]
   731  	}
   732  	r, ok := subst(as2body.Rhs[0], args)
   733  	if !ok {
   734  		return false
   735  	}
   736  	ok = s.StaticAssign(l, loff, r, typ)
   737  
   738  	if ok && base.Flag.Percent != 0 {
   739  		ir.Dump("static inlined-LEFT", l)
   740  		ir.Dump("static inlined-ORIG", call)
   741  		ir.Dump("static inlined-RIGHT", r)
   742  	}
   743  	return ok
   744  }
   745  
   746  // from here down is the walk analysis
   747  // of composite literals.
   748  // most of the work is to generate
   749  // data statements for the constant
   750  // part of the composite literal.
   751  
   752  var statuniqgen int // name generator for static temps
   753  
   754  // StaticName returns a name backed by a (writable) static data symbol.
   755  func StaticName(t *types.Type) *ir.Name {
   756  	// Don't use LookupNum; it interns the resulting string, but these are all unique.
   757  	sym := typecheck.Lookup(fmt.Sprintf("%s%d", obj.StaticNamePrefix, statuniqgen))
   758  	statuniqgen++
   759  
   760  	n := ir.NewNameAt(base.Pos, sym, t)
   761  	sym.Def = n
   762  
   763  	n.Class = ir.PEXTERN
   764  	typecheck.Target.Externs = append(typecheck.Target.Externs, n)
   765  
   766  	n.Linksym().Set(obj.AttrStatic, true)
   767  	return n
   768  }
   769  
   770  // StaticLoc returns the static address of n, if n has one, or else nil.
   771  func StaticLoc(n ir.Node) (name *ir.Name, offset int64, ok bool) {
   772  	if n == nil {
   773  		return nil, 0, false
   774  	}
   775  
   776  	switch n.Op() {
   777  	case ir.ONAME:
   778  		n := n.(*ir.Name)
   779  		return n, 0, true
   780  
   781  	case ir.OMETHEXPR:
   782  		n := n.(*ir.SelectorExpr)
   783  		return StaticLoc(n.FuncName())
   784  
   785  	case ir.ODOT:
   786  		n := n.(*ir.SelectorExpr)
   787  		if name, offset, ok = StaticLoc(n.X); !ok {
   788  			break
   789  		}
   790  		offset += n.Offset()
   791  		return name, offset, true
   792  
   793  	case ir.OINDEX:
   794  		n := n.(*ir.IndexExpr)
   795  		if n.X.Type().IsSlice() {
   796  			break
   797  		}
   798  		if name, offset, ok = StaticLoc(n.X); !ok {
   799  			break
   800  		}
   801  		l := getlit(n.Index)
   802  		if l < 0 {
   803  			break
   804  		}
   805  
   806  		// Check for overflow.
   807  		if n.Type().Size() != 0 && types.MaxWidth/n.Type().Size() <= int64(l) {
   808  			break
   809  		}
   810  		offset += int64(l) * n.Type().Size()
   811  		return name, offset, true
   812  	}
   813  
   814  	return nil, 0, false
   815  }
   816  
   817  func isSideEffect(n ir.Node) bool {
   818  	switch n.Op() {
   819  	// Assume side effects unless we know otherwise.
   820  	default:
   821  		return true
   822  
   823  	// No side effects here (arguments are checked separately).
   824  	case ir.ONAME,
   825  		ir.ONONAME,
   826  		ir.OTYPE,
   827  		ir.OLITERAL,
   828  		ir.ONIL,
   829  		ir.OADD,
   830  		ir.OSUB,
   831  		ir.OOR,
   832  		ir.OXOR,
   833  		ir.OADDSTR,
   834  		ir.OADDR,
   835  		ir.OANDAND,
   836  		ir.OBYTES2STR,
   837  		ir.ORUNES2STR,
   838  		ir.OSTR2BYTES,
   839  		ir.OSTR2RUNES,
   840  		ir.OCAP,
   841  		ir.OCOMPLIT,
   842  		ir.OMAPLIT,
   843  		ir.OSTRUCTLIT,
   844  		ir.OARRAYLIT,
   845  		ir.OSLICELIT,
   846  		ir.OPTRLIT,
   847  		ir.OCONV,
   848  		ir.OCONVIFACE,
   849  		ir.OCONVNOP,
   850  		ir.ODOT,
   851  		ir.OEQ,
   852  		ir.ONE,
   853  		ir.OLT,
   854  		ir.OLE,
   855  		ir.OGT,
   856  		ir.OGE,
   857  		ir.OKEY,
   858  		ir.OSTRUCTKEY,
   859  		ir.OLEN,
   860  		ir.OMUL,
   861  		ir.OLSH,
   862  		ir.ORSH,
   863  		ir.OAND,
   864  		ir.OANDNOT,
   865  		ir.ONEW,
   866  		ir.ONOT,
   867  		ir.OBITNOT,
   868  		ir.OPLUS,
   869  		ir.ONEG,
   870  		ir.OOROR,
   871  		ir.OPAREN,
   872  		ir.ORUNESTR,
   873  		ir.OREAL,
   874  		ir.OIMAG,
   875  		ir.OCOMPLEX:
   876  		return false
   877  
   878  	// Only possible side effect is division by zero.
   879  	case ir.ODIV, ir.OMOD:
   880  		n := n.(*ir.BinaryExpr)
   881  		if n.Y.Op() != ir.OLITERAL || constant.Sign(n.Y.Val()) == 0 {
   882  			return true
   883  		}
   884  
   885  	// Only possible side effect is panic on invalid size,
   886  	// but many makechan and makemap use size zero, which is definitely OK.
   887  	case ir.OMAKECHAN, ir.OMAKEMAP:
   888  		n := n.(*ir.MakeExpr)
   889  		if !ir.IsConst(n.Len, constant.Int) || constant.Sign(n.Len.Val()) != 0 {
   890  			return true
   891  		}
   892  
   893  	// Only possible side effect is panic on invalid size.
   894  	// TODO(rsc): Merge with previous case (probably breaks toolstash -cmp).
   895  	case ir.OMAKESLICE, ir.OMAKESLICECOPY:
   896  		return true
   897  	}
   898  	return false
   899  }
   900  
   901  // AnySideEffects reports whether n contains any operations that could have observable side effects.
   902  func AnySideEffects(n ir.Node) bool {
   903  	return ir.Any(n, isSideEffect)
   904  }
   905  
   906  // mayModifyPkgVar reports whether expression n may modify any
   907  // package-scope variables declared within the current package.
   908  func mayModifyPkgVar(n ir.Node) bool {
   909  	// safeLHS reports whether the assigned-to variable lhs is either a
   910  	// local variable or a global from another package.
   911  	safeLHS := func(lhs ir.Node) bool {
   912  		outer := ir.OuterValue(lhs)
   913  		// "*p = ..." should be safe if p is a local variable.
   914  		// TODO: Should ir.OuterValue handle this?
   915  		for outer.Op() == ir.ODEREF {
   916  			outer = outer.(*ir.StarExpr).X
   917  		}
   918  		v, ok := outer.(*ir.Name)
   919  		return ok && v.Op() == ir.ONAME && !(v.Class == ir.PEXTERN && v.Sym().Pkg == types.LocalPkg)
   920  	}
   921  
   922  	return ir.Any(n, func(n ir.Node) bool {
   923  		switch n.Op() {
   924  		case ir.OCALLFUNC, ir.OCALLINTER:
   925  			return !ir.IsFuncPCIntrinsic(n.(*ir.CallExpr))
   926  
   927  		case ir.OAPPEND, ir.OCLEAR, ir.OCOPY:
   928  			return true // could mutate a global array
   929  
   930  		case ir.OASOP:
   931  			n := n.(*ir.AssignOpStmt)
   932  			if !safeLHS(n.X) {
   933  				return true
   934  			}
   935  
   936  		case ir.OAS:
   937  			n := n.(*ir.AssignStmt)
   938  			if !safeLHS(n.X) {
   939  				return true
   940  			}
   941  
   942  		case ir.OAS2, ir.OAS2DOTTYPE, ir.OAS2FUNC, ir.OAS2MAPR, ir.OAS2RECV:
   943  			n := n.(*ir.AssignListStmt)
   944  			for _, lhs := range n.Lhs {
   945  				if !safeLHS(lhs) {
   946  					return true
   947  				}
   948  			}
   949  		}
   950  
   951  		return false
   952  	})
   953  }
   954  
   955  // canRepeat reports whether executing n multiple times has the same effect as
   956  // assigning n to a single variable and using that variable multiple times.
   957  func canRepeat(n ir.Node) bool {
   958  	bad := func(n ir.Node) bool {
   959  		if isSideEffect(n) {
   960  			return true
   961  		}
   962  		switch n.Op() {
   963  		case ir.OMAKECHAN,
   964  			ir.OMAKEMAP,
   965  			ir.OMAKESLICE,
   966  			ir.OMAKESLICECOPY,
   967  			ir.OMAPLIT,
   968  			ir.ONEW,
   969  			ir.OPTRLIT,
   970  			ir.OSLICELIT,
   971  			ir.OSTR2BYTES,
   972  			ir.OSTR2RUNES:
   973  			return true
   974  		}
   975  		return false
   976  	}
   977  	return !ir.Any(n, bad)
   978  }
   979  
   980  func getlit(lit ir.Node) int {
   981  	if ir.IsSmallIntConst(lit) {
   982  		return int(ir.Int64Val(lit))
   983  	}
   984  	return -1
   985  }
   986  
   987  func isvaluelit(n ir.Node) bool {
   988  	return n.Op() == ir.OARRAYLIT || n.Op() == ir.OSTRUCTLIT
   989  }
   990  
   991  func subst(n ir.Node, m map[*ir.Name]ir.Node) (ir.Node, bool) {
   992  	valid := true
   993  	var edit func(ir.Node) ir.Node
   994  	edit = func(x ir.Node) ir.Node {
   995  		switch x.Op() {
   996  		case ir.ONAME:
   997  			x := x.(*ir.Name)
   998  			if v, ok := m[x]; ok {
   999  				return ir.DeepCopy(v.Pos(), v)
  1000  			}
  1001  			return x
  1002  		case ir.ONONAME, ir.OLITERAL, ir.ONIL, ir.OTYPE:
  1003  			return x
  1004  		}
  1005  		x = ir.Copy(x)
  1006  		ir.EditChildrenWithHidden(x, edit)
  1007  
  1008  		// TODO: handle more operations, see details discussion in go.dev/cl/466277.
  1009  		switch x.Op() {
  1010  		case ir.OCONV:
  1011  			x := x.(*ir.ConvExpr)
  1012  			if x.X.Op() == ir.OLITERAL {
  1013  				if x, ok := truncate(x.X, x.Type()); ok {
  1014  					return x
  1015  				}
  1016  				valid = false
  1017  				return x
  1018  			}
  1019  		case ir.OADDSTR:
  1020  			return addStr(x.(*ir.AddStringExpr))
  1021  		}
  1022  		return x
  1023  	}
  1024  	n = edit(n)
  1025  	return n, valid
  1026  }
  1027  
  1028  // truncate returns the result of force converting c to type t,
  1029  // truncating its value as needed, like a conversion of a variable.
  1030  // If the conversion is too difficult, truncate returns nil, false.
  1031  func truncate(c ir.Node, t *types.Type) (ir.Node, bool) {
  1032  	ct := c.Type()
  1033  	cv := c.Val()
  1034  	if ct.Kind() != t.Kind() {
  1035  		switch {
  1036  		default:
  1037  			// Note: float -> float/integer and complex -> complex are valid but subtle.
  1038  			// For example a float32(float64 1e300) evaluates to +Inf at runtime
  1039  			// and the compiler doesn't have any concept of +Inf, so that would
  1040  			// have to be left for runtime code evaluation.
  1041  			// For now
  1042  			return nil, false
  1043  
  1044  		case ct.IsInteger() && t.IsInteger():
  1045  			// truncate or sign extend
  1046  			bits := t.Size() * 8
  1047  			cv = constant.BinaryOp(cv, token.AND, constant.MakeUint64(1<<bits-1))
  1048  			if t.IsSigned() && constant.Compare(cv, token.GEQ, constant.MakeUint64(1<<(bits-1))) {
  1049  				cv = constant.BinaryOp(cv, token.OR, constant.MakeInt64(-1<<(bits-1)))
  1050  			}
  1051  		}
  1052  	}
  1053  	c = ir.NewConstExpr(cv, c)
  1054  	c.SetType(t)
  1055  	return c, true
  1056  }
  1057  
  1058  func addStr(n *ir.AddStringExpr) ir.Node {
  1059  	// Merge adjacent constants in the argument list.
  1060  	s := n.List
  1061  	need := 0
  1062  	for i := 0; i < len(s); i++ {
  1063  		if i == 0 || !ir.IsConst(s[i-1], constant.String) || !ir.IsConst(s[i], constant.String) {
  1064  			// Can't merge s[i] into s[i-1]; need a slot in the list.
  1065  			need++
  1066  		}
  1067  	}
  1068  	if need == len(s) {
  1069  		return n
  1070  	}
  1071  	if need == 1 {
  1072  		var strs []string
  1073  		for _, c := range s {
  1074  			strs = append(strs, ir.StringVal(c))
  1075  		}
  1076  		return ir.NewConstExpr(constant.MakeString(strings.Join(strs, "")), n)
  1077  	}
  1078  	newList := make([]ir.Node, 0, need)
  1079  	for i := 0; i < len(s); i++ {
  1080  		if ir.IsConst(s[i], constant.String) && i+1 < len(s) && ir.IsConst(s[i+1], constant.String) {
  1081  			// merge from i up to but not including i2
  1082  			var strs []string
  1083  			i2 := i
  1084  			for i2 < len(s) && ir.IsConst(s[i2], constant.String) {
  1085  				strs = append(strs, ir.StringVal(s[i2]))
  1086  				i2++
  1087  			}
  1088  
  1089  			newList = append(newList, ir.NewConstExpr(constant.MakeString(strings.Join(strs, "")), s[i]))
  1090  			i = i2 - 1
  1091  		} else {
  1092  			newList = append(newList, s[i])
  1093  		}
  1094  	}
  1095  
  1096  	nn := ir.Copy(n).(*ir.AddStringExpr)
  1097  	nn.List = newList
  1098  	return nn
  1099  }
  1100  
  1101  const wrapGlobalMapInitSizeThreshold = 20
  1102  
  1103  // tryWrapGlobalInit returns a new outlined function to contain global
  1104  // initializer statement n, if possible and worthwhile. Otherwise, it
  1105  // returns nil.
  1106  //
  1107  // Currently, it outlines map assignment statements with large,
  1108  // side-effect-free RHS expressions.
  1109  func tryWrapGlobalInit(n ir.Node) *ir.Func {
  1110  	// Look for "X = ..." where X has map type.
  1111  	// FIXME: might also be worth trying to look for cases where
  1112  	// the LHS is of interface type but RHS is map type.
  1113  	if n.Op() != ir.OAS {
  1114  		return nil
  1115  	}
  1116  	as := n.(*ir.AssignStmt)
  1117  	if ir.IsBlank(as.X) || as.X.Op() != ir.ONAME {
  1118  		return nil
  1119  	}
  1120  	nm := as.X.(*ir.Name)
  1121  	if !nm.Type().IsMap() {
  1122  		return nil
  1123  	}
  1124  
  1125  	// Determine size of RHS.
  1126  	rsiz := 0
  1127  	ir.Any(as.Y, func(n ir.Node) bool {
  1128  		rsiz++
  1129  		return false
  1130  	})
  1131  	if base.Debug.WrapGlobalMapDbg > 0 {
  1132  		fmt.Fprintf(os.Stderr, "=-= mapassign %s %v rhs size %d\n",
  1133  			base.Ctxt.Pkgpath, n, rsiz)
  1134  	}
  1135  
  1136  	// Reject smaller candidates if not in stress mode.
  1137  	if rsiz < wrapGlobalMapInitSizeThreshold && base.Debug.WrapGlobalMapCtl != 2 {
  1138  		if base.Debug.WrapGlobalMapDbg > 1 {
  1139  			fmt.Fprintf(os.Stderr, "=-= skipping %v size too small at %d\n",
  1140  				nm, rsiz)
  1141  		}
  1142  		return nil
  1143  	}
  1144  
  1145  	// Reject right hand sides with side effects.
  1146  	if AnySideEffects(as.Y) {
  1147  		if base.Debug.WrapGlobalMapDbg > 0 {
  1148  			fmt.Fprintf(os.Stderr, "=-= rejected %v due to side effects\n", nm)
  1149  		}
  1150  		return nil
  1151  	}
  1152  
  1153  	if base.Debug.WrapGlobalMapDbg > 1 {
  1154  		fmt.Fprintf(os.Stderr, "=-= committed for: %+v\n", n)
  1155  	}
  1156  
  1157  	// Create a new function that will (eventually) have this form:
  1158  	//
  1159  	//	func map.init.%d() {
  1160  	//		globmapvar = <map initialization>
  1161  	//	}
  1162  	//
  1163  	// Note: cmd/link expects the function name to contain "map.init".
  1164  	minitsym := typecheck.LookupNum("map.init.", mapinitgen)
  1165  	mapinitgen++
  1166  
  1167  	fn := ir.NewFunc(n.Pos(), n.Pos(), minitsym, types.NewSignature(nil, nil, nil))
  1168  	fn.SetInlinabilityChecked(true) // suppress inlining (which would defeat the point)
  1169  	typecheck.DeclFunc(fn)
  1170  	if base.Debug.WrapGlobalMapDbg > 0 {
  1171  		fmt.Fprintf(os.Stderr, "=-= generated func is %v\n", fn)
  1172  	}
  1173  
  1174  	// NB: we're relying on this phase being run before inlining;
  1175  	// if for some reason we need to move it after inlining, we'll
  1176  	// need code here that relocates or duplicates inline temps.
  1177  
  1178  	// Insert assignment into function body; mark body finished.
  1179  	fn.Body = []ir.Node{as}
  1180  	typecheck.FinishFuncBody()
  1181  
  1182  	if base.Debug.WrapGlobalMapDbg > 1 {
  1183  		fmt.Fprintf(os.Stderr, "=-= mapvar is %v\n", nm)
  1184  		fmt.Fprintf(os.Stderr, "=-= newfunc is %+v\n", fn)
  1185  	}
  1186  
  1187  	recordFuncForVar(nm, fn)
  1188  
  1189  	return fn
  1190  }
  1191  
  1192  // mapinitgen is a counter used to uniquify compiler-generated
  1193  // map init functions.
  1194  var mapinitgen int
  1195  
  1196  // AddKeepRelocations adds a dummy "R_KEEP" relocation from each
  1197  // global map variable V to its associated outlined init function.
  1198  // These relocation ensure that if the map var itself is determined to
  1199  // be reachable at link time, we also mark the init function as
  1200  // reachable.
  1201  func AddKeepRelocations() {
  1202  	if varToMapInit == nil {
  1203  		return
  1204  	}
  1205  	for k, v := range varToMapInit {
  1206  		// Add R_KEEP relocation from map to init function.
  1207  		fs := v.Linksym()
  1208  		if fs == nil {
  1209  			base.Fatalf("bad: func %v has no linksym", v)
  1210  		}
  1211  		vs := k.Linksym()
  1212  		if vs == nil {
  1213  			base.Fatalf("bad: mapvar %v has no linksym", k)
  1214  		}
  1215  		vs.AddRel(base.Ctxt, obj.Reloc{Type: objabi.R_KEEP, Sym: fs})
  1216  		if base.Debug.WrapGlobalMapDbg > 1 {
  1217  			fmt.Fprintf(os.Stderr, "=-= add R_KEEP relo from %s to %s\n",
  1218  				vs.Name, fs.Name)
  1219  		}
  1220  	}
  1221  	varToMapInit = nil
  1222  }
  1223  
  1224  // OutlineMapInits replaces global map initializers with outlined
  1225  // calls to separate "map init" functions (where possible and
  1226  // profitable), to facilitate better dead-code elimination by the
  1227  // linker.
  1228  func OutlineMapInits(fn *ir.Func) {
  1229  	if base.Debug.WrapGlobalMapCtl == 1 {
  1230  		return
  1231  	}
  1232  
  1233  	outlined := 0
  1234  	for i, stmt := range fn.Body {
  1235  		// Attempt to outline stmt. If successful, replace it with a call
  1236  		// to the returned wrapper function.
  1237  		if wrapperFn := tryWrapGlobalInit(stmt); wrapperFn != nil {
  1238  			ir.WithFunc(fn, func() {
  1239  				fn.Body[i] = typecheck.Call(stmt.Pos(), wrapperFn.Nname, nil, false)
  1240  			})
  1241  			outlined++
  1242  		}
  1243  	}
  1244  
  1245  	if base.Debug.WrapGlobalMapDbg > 1 {
  1246  		fmt.Fprintf(os.Stderr, "=-= outlined %v map initializations\n", outlined)
  1247  	}
  1248  }
  1249  
  1250  const maxInitStatements = 1000
  1251  
  1252  // SplitLargeInit breaks up a large "init" function into smaller chunks to avoid slow compilation.
  1253  func SplitLargeInit(fn *ir.Func) {
  1254  	if !fn.IsPackageInit() || len(fn.Body) <= maxInitStatements {
  1255  		return
  1256  	}
  1257  	var calls []ir.Node
  1258  	for chunk := range slices.Chunk(fn.Body, maxInitStatements) {
  1259  		varInitFn := generateVarInitFunc(chunk)
  1260  		ir.WithFunc(fn, func() {
  1261  			calls = append(calls, typecheck.Call(varInitFn.Pos(), varInitFn.Nname, nil, false))
  1262  		})
  1263  	}
  1264  	fn.Body = calls
  1265  }
  1266  
  1267  // CanOptimize reports whether the given fn can be optimized for static assignments.
  1268  func CanOptimize(fn *ir.Func) bool {
  1269  	name := fn.Sym().Name
  1270  	return name == "init" || strings.HasPrefix(name, varInitFuncPrefix)
  1271  }
  1272  
  1273  // varInitGen is a counter used to uniquify compiler-generated functions for initializing variables.
  1274  var varInitGen int
  1275  
  1276  const varInitFuncPrefix = "init.var."
  1277  
  1278  // Create a new function that will (eventually) have this form:
  1279  //
  1280  //	func init.var.%d() {
  1281  //		...
  1282  //	}
  1283  func generateVarInitFunc(body []ir.Node) *ir.Func {
  1284  	pos := base.AutogeneratedPos
  1285  	base.Pos = pos
  1286  
  1287  	sym := typecheck.LookupNum(varInitFuncPrefix, varInitGen)
  1288  	varInitGen++
  1289  
  1290  	fn := ir.NewFunc(pos, pos, sym, types.NewSignature(nil, nil, nil))
  1291  	fn.SetInlinabilityChecked(true) // suppress inlining; otherwise, we end up with giant init eventually.
  1292  	fn.SetWrapper(true)             // less disruptive on backtraces.
  1293  	typecheck.DeclFunc(fn)
  1294  
  1295  	fn.Body = body
  1296  	typecheck.FinishFuncBody()
  1297  
  1298  	return fn
  1299  }
  1300  

View as plain text