Source file src/cmd/compile/internal/walk/expr.go

     1  // Copyright 2009 The Go Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  package walk
     6  
     7  import (
     8  	"fmt"
     9  	"go/constant"
    10  	"internal/abi"
    11  	"internal/buildcfg"
    12  	"strings"
    13  
    14  	"cmd/compile/internal/base"
    15  	"cmd/compile/internal/ir"
    16  	"cmd/compile/internal/noder"
    17  	"cmd/compile/internal/objw"
    18  	"cmd/compile/internal/reflectdata"
    19  	"cmd/compile/internal/rttype"
    20  	"cmd/compile/internal/staticdata"
    21  	"cmd/compile/internal/typecheck"
    22  	"cmd/compile/internal/types"
    23  	"cmd/internal/obj"
    24  	"cmd/internal/objabi"
    25  )
    26  
    27  // The result of walkExpr MUST be assigned back to n, e.g.
    28  //
    29  //	n.Left = walkExpr(n.Left, init)
    30  func walkExpr(n ir.Node, init *ir.Nodes) ir.Node {
    31  	if n == nil {
    32  		return n
    33  	}
    34  
    35  	if n, ok := n.(ir.InitNode); ok && init == n.PtrInit() {
    36  		// not okay to use n->ninit when walking n,
    37  		// because we might replace n with some other node
    38  		// and would lose the init list.
    39  		base.Fatalf("walkExpr init == &n->ninit")
    40  	}
    41  
    42  	if len(n.Init()) != 0 {
    43  		walkStmtList(n.Init())
    44  		init.Append(ir.TakeInit(n)...)
    45  	}
    46  
    47  	lno := ir.SetPos(n)
    48  
    49  	if base.Flag.LowerW > 1 {
    50  		ir.Dump("before walk expr", n)
    51  	}
    52  
    53  	if n.Typecheck() != 1 {
    54  		base.Fatalf("missed typecheck: %+v", n)
    55  	}
    56  
    57  	if n.Type().IsUntyped() {
    58  		base.Fatalf("expression has untyped type: %+v", n)
    59  	}
    60  
    61  	n = walkExpr1(n, init)
    62  
    63  	// Eagerly compute sizes of all expressions for the back end.
    64  	if typ := n.Type(); typ != nil && typ.Kind() != types.TBLANK && !typ.IsFuncArgStruct() {
    65  		types.CheckSize(typ)
    66  	}
    67  	if n, ok := n.(*ir.Name); ok && n.Heapaddr != nil {
    68  		types.CheckSize(n.Heapaddr.Type())
    69  	}
    70  	if ir.IsConst(n, constant.String) {
    71  		// Emit string symbol now to avoid emitting
    72  		// any concurrently during the backend.
    73  		_ = staticdata.StringSym(n.Pos(), constant.StringVal(n.Val()))
    74  	}
    75  
    76  	if base.Flag.LowerW != 0 && n != nil {
    77  		ir.Dump("after walk expr", n)
    78  	}
    79  
    80  	base.Pos = lno
    81  	return n
    82  }
    83  
    84  func walkExpr1(n ir.Node, init *ir.Nodes) ir.Node {
    85  	switch n.Op() {
    86  	default:
    87  		ir.Dump("walk", n)
    88  		base.Fatalf("walkExpr: switch 1 unknown op %+v", n.Op())
    89  		panic("unreachable")
    90  
    91  	case ir.OGETG, ir.OGETCALLERSP:
    92  		return n
    93  
    94  	case ir.OTYPE, ir.ONAME, ir.OLITERAL, ir.ONIL, ir.OLINKSYMOFFSET:
    95  		// TODO(mdempsky): Just return n; see discussion on CL 38655.
    96  		// Perhaps refactor to use Node.mayBeShared for these instead.
    97  		// If these return early, make sure to still call
    98  		// StringSym for constant strings.
    99  		return n
   100  
   101  	case ir.OMETHEXPR:
   102  		// TODO(mdempsky): Do this right after type checking.
   103  		n := n.(*ir.SelectorExpr)
   104  		return n.FuncName()
   105  
   106  	case ir.OMIN, ir.OMAX:
   107  		n := n.(*ir.CallExpr)
   108  		return walkMinMax(n, init)
   109  
   110  	case ir.ONOT, ir.ONEG, ir.OPLUS, ir.OBITNOT, ir.OREAL, ir.OIMAG, ir.OSPTR, ir.OITAB, ir.OIDATA:
   111  		n := n.(*ir.UnaryExpr)
   112  		n.X = walkExpr(n.X, init)
   113  		return n
   114  
   115  	case ir.ODOTMETH, ir.ODOTINTER:
   116  		n := n.(*ir.SelectorExpr)
   117  		n.X = walkExpr(n.X, init)
   118  		return n
   119  
   120  	case ir.OADDR:
   121  		n := n.(*ir.AddrExpr)
   122  		n.X = walkExpr(n.X, init)
   123  		return n
   124  
   125  	case ir.ODEREF:
   126  		n := n.(*ir.StarExpr)
   127  		n.X = walkExpr(n.X, init)
   128  		return n
   129  
   130  	case ir.OMAKEFACE, ir.OAND, ir.OANDNOT, ir.OSUB, ir.OMUL, ir.OADD, ir.OOR, ir.OXOR, ir.OLSH, ir.ORSH,
   131  		ir.OUNSAFEADD:
   132  		n := n.(*ir.BinaryExpr)
   133  		n.X = walkExpr(n.X, init)
   134  		n.Y = walkExpr(n.Y, init)
   135  		if n.Op() == ir.OUNSAFEADD && ir.ShouldCheckPtr(ir.CurFunc, 1) {
   136  			// For unsafe.Add(p, n), just walk "unsafe.Pointer(uintptr(p)+uintptr(n))"
   137  			// for the side effects of validating unsafe.Pointer rules.
   138  			x := typecheck.ConvNop(n.X, types.Types[types.TUINTPTR])
   139  			y := typecheck.Conv(n.Y, types.Types[types.TUINTPTR])
   140  			conv := typecheck.ConvNop(ir.NewBinaryExpr(n.Pos(), ir.OADD, x, y), types.Types[types.TUNSAFEPTR])
   141  			walkExpr(conv, init)
   142  		}
   143  		return n
   144  
   145  	case ir.OUNSAFESLICE:
   146  		n := n.(*ir.BinaryExpr)
   147  		return walkUnsafeSlice(n, init)
   148  
   149  	case ir.OUNSAFESTRING:
   150  		n := n.(*ir.BinaryExpr)
   151  		return walkUnsafeString(n, init)
   152  
   153  	case ir.OUNSAFESTRINGDATA, ir.OUNSAFESLICEDATA:
   154  		n := n.(*ir.UnaryExpr)
   155  		return walkUnsafeData(n, init)
   156  
   157  	case ir.ODOT, ir.ODOTPTR:
   158  		n := n.(*ir.SelectorExpr)
   159  		return walkDot(n, init)
   160  
   161  	case ir.ODOTTYPE, ir.ODOTTYPE2:
   162  		n := n.(*ir.TypeAssertExpr)
   163  		return walkDotType(n, init)
   164  
   165  	case ir.ODYNAMICDOTTYPE, ir.ODYNAMICDOTTYPE2:
   166  		n := n.(*ir.DynamicTypeAssertExpr)
   167  		return walkDynamicDotType(n, init)
   168  
   169  	case ir.OLEN, ir.OCAP:
   170  		n := n.(*ir.UnaryExpr)
   171  		return walkLenCap(n, init)
   172  
   173  	case ir.OCOMPLEX:
   174  		n := n.(*ir.BinaryExpr)
   175  		n.X = walkExpr(n.X, init)
   176  		n.Y = walkExpr(n.Y, init)
   177  		return n
   178  
   179  	case ir.OEQ, ir.ONE, ir.OLT, ir.OLE, ir.OGT, ir.OGE:
   180  		n := n.(*ir.BinaryExpr)
   181  		return walkCompare(n, init)
   182  
   183  	case ir.OANDAND, ir.OOROR:
   184  		n := n.(*ir.LogicalExpr)
   185  		return walkLogical(n, init)
   186  
   187  	case ir.OPRINT, ir.OPRINTLN:
   188  		return walkPrint(n.(*ir.CallExpr), init)
   189  
   190  	case ir.OPANIC:
   191  		n := n.(*ir.UnaryExpr)
   192  		return mkcall("gopanic", nil, init, n.X)
   193  
   194  	case ir.ORECOVER:
   195  		return walkRecover(n.(*ir.CallExpr), init)
   196  
   197  	case ir.OCFUNC:
   198  		return n
   199  
   200  	case ir.OCALLINTER, ir.OCALLFUNC:
   201  		n := n.(*ir.CallExpr)
   202  		return walkCall(n, init)
   203  
   204  	case ir.OAS, ir.OASOP:
   205  		return walkAssign(init, n)
   206  
   207  	case ir.OAS2:
   208  		n := n.(*ir.AssignListStmt)
   209  		return walkAssignList(init, n)
   210  
   211  	// a,b,... = fn()
   212  	case ir.OAS2FUNC:
   213  		n := n.(*ir.AssignListStmt)
   214  		return walkAssignFunc(init, n)
   215  
   216  	// x, y = <-c
   217  	// order.stmt made sure x is addressable or blank.
   218  	case ir.OAS2RECV:
   219  		n := n.(*ir.AssignListStmt)
   220  		return walkAssignRecv(init, n)
   221  
   222  	// a,b = m[i]
   223  	case ir.OAS2MAPR:
   224  		n := n.(*ir.AssignListStmt)
   225  		return walkAssignMapRead(init, n)
   226  
   227  	case ir.ODELETE:
   228  		n := n.(*ir.CallExpr)
   229  		return walkDelete(init, n)
   230  
   231  	case ir.OAS2DOTTYPE:
   232  		n := n.(*ir.AssignListStmt)
   233  		return walkAssignDotType(n, init)
   234  
   235  	case ir.OCONVIFACE:
   236  		n := n.(*ir.ConvExpr)
   237  		return walkConvInterface(n, init)
   238  
   239  	case ir.OCONV, ir.OCONVNOP:
   240  		n := n.(*ir.ConvExpr)
   241  		return walkConv(n, init)
   242  
   243  	case ir.OSLICE2ARR:
   244  		n := n.(*ir.ConvExpr)
   245  		return walkSliceToArray(n, init)
   246  
   247  	case ir.OSLICE2ARRPTR:
   248  		n := n.(*ir.ConvExpr)
   249  		n.X = walkExpr(n.X, init)
   250  		return n
   251  
   252  	case ir.ODIV, ir.OMOD:
   253  		n := n.(*ir.BinaryExpr)
   254  		return walkDivMod(n, init)
   255  
   256  	case ir.OINDEX:
   257  		n := n.(*ir.IndexExpr)
   258  		return walkIndex(n, init)
   259  
   260  	case ir.OINDEXMAP:
   261  		n := n.(*ir.IndexExpr)
   262  		return walkIndexMap(n, init)
   263  
   264  	case ir.ORECV:
   265  		base.Fatalf("walkExpr ORECV") // should see inside OAS only
   266  		panic("unreachable")
   267  
   268  	case ir.OSLICEHEADER:
   269  		n := n.(*ir.SliceHeaderExpr)
   270  		return walkSliceHeader(n, init)
   271  
   272  	case ir.OSTRINGHEADER:
   273  		n := n.(*ir.StringHeaderExpr)
   274  		return walkStringHeader(n, init)
   275  
   276  	case ir.OSLICE, ir.OSLICEARR, ir.OSLICESTR, ir.OSLICE3, ir.OSLICE3ARR:
   277  		n := n.(*ir.SliceExpr)
   278  		return walkSlice(n, init)
   279  
   280  	case ir.ONEW:
   281  		n := n.(*ir.UnaryExpr)
   282  		return walkNew(n, init)
   283  
   284  	case ir.OADDSTR:
   285  		return walkAddString(n.(*ir.AddStringExpr), init, nil)
   286  
   287  	case ir.OAPPEND:
   288  		// order should make sure we only see OAS(node, OAPPEND), which we handle above.
   289  		base.Fatalf("append outside assignment")
   290  		panic("unreachable")
   291  
   292  	case ir.OCOPY:
   293  		return walkCopy(n.(*ir.BinaryExpr), init, base.Flag.Cfg.Instrumenting && !base.Flag.CompilingRuntime)
   294  
   295  	case ir.OCLEAR:
   296  		n := n.(*ir.UnaryExpr)
   297  		return walkClear(n, init)
   298  
   299  	case ir.OCLOSE:
   300  		n := n.(*ir.UnaryExpr)
   301  		return walkClose(n, init)
   302  
   303  	case ir.OMAKECHAN:
   304  		n := n.(*ir.MakeExpr)
   305  		return walkMakeChan(n, init)
   306  
   307  	case ir.OMAKEMAP:
   308  		n := n.(*ir.MakeExpr)
   309  		return walkMakeMap(n, init)
   310  
   311  	case ir.OMAKESLICE:
   312  		n := n.(*ir.MakeExpr)
   313  		return walkMakeSlice(n, init)
   314  
   315  	case ir.OMAKESLICECOPY:
   316  		n := n.(*ir.MakeExpr)
   317  		return walkMakeSliceCopy(n, init)
   318  
   319  	case ir.ORUNESTR:
   320  		n := n.(*ir.ConvExpr)
   321  		return walkRuneToString(n, init)
   322  
   323  	case ir.OBYTES2STR, ir.ORUNES2STR:
   324  		n := n.(*ir.ConvExpr)
   325  		return walkBytesRunesToString(n, init)
   326  
   327  	case ir.OBYTES2STRTMP:
   328  		n := n.(*ir.ConvExpr)
   329  		return walkBytesToStringTemp(n, init)
   330  
   331  	case ir.OSTR2BYTES:
   332  		n := n.(*ir.ConvExpr)
   333  		return walkStringToBytes(n, init)
   334  
   335  	case ir.OSTR2BYTESTMP:
   336  		n := n.(*ir.ConvExpr)
   337  		return walkStringToBytesTemp(n, init)
   338  
   339  	case ir.OSTR2RUNES:
   340  		n := n.(*ir.ConvExpr)
   341  		return walkStringToRunes(n, init)
   342  
   343  	case ir.OARRAYLIT, ir.OSLICELIT, ir.OMAPLIT, ir.OSTRUCTLIT, ir.OPTRLIT:
   344  		return walkCompLit(n, init)
   345  
   346  	case ir.OSEND:
   347  		n := n.(*ir.SendStmt)
   348  		return walkSend(n, init)
   349  
   350  	case ir.OCLOSURE:
   351  		return walkClosure(n.(*ir.ClosureExpr), init)
   352  
   353  	case ir.OMETHVALUE:
   354  		return walkMethodValue(n.(*ir.SelectorExpr), init)
   355  
   356  	case ir.OMOVE2HEAP:
   357  		n := n.(*ir.MoveToHeapExpr)
   358  		n.Slice = walkExpr(n.Slice, init)
   359  		return n
   360  	}
   361  
   362  	// No return! Each case must return (or panic),
   363  	// to avoid confusion about what gets returned
   364  	// in the presence of type assertions.
   365  }
   366  
   367  // walk the whole tree of the body of an
   368  // expression or simple statement.
   369  // the types expressions are calculated.
   370  // compile-time constants are evaluated.
   371  // complex side effects like statements are appended to init.
   372  func walkExprList(s []ir.Node, init *ir.Nodes) {
   373  	for i := range s {
   374  		s[i] = walkExpr(s[i], init)
   375  	}
   376  }
   377  
   378  func walkExprListCheap(s []ir.Node, init *ir.Nodes) {
   379  	for i, n := range s {
   380  		s[i] = cheapExpr(n, init)
   381  		s[i] = walkExpr(s[i], init)
   382  	}
   383  }
   384  
   385  func walkExprListSafe(s []ir.Node, init *ir.Nodes) {
   386  	for i, n := range s {
   387  		s[i] = safeExpr(n, init)
   388  		s[i] = walkExpr(s[i], init)
   389  	}
   390  }
   391  
   392  // return side-effect free and cheap n, appending side effects to init.
   393  // result may not be assignable.
   394  func cheapExpr(n ir.Node, init *ir.Nodes) ir.Node {
   395  	switch n.Op() {
   396  	case ir.ONAME, ir.OLITERAL, ir.ONIL:
   397  		return n
   398  	}
   399  
   400  	return copyExpr(n, n.Type(), init)
   401  }
   402  
   403  // return side effect-free n, appending side effects to init.
   404  // result is assignable if n is.
   405  func safeExpr(n ir.Node, init *ir.Nodes) ir.Node {
   406  	if n == nil {
   407  		return nil
   408  	}
   409  
   410  	if len(n.Init()) != 0 {
   411  		walkStmtList(n.Init())
   412  		init.Append(ir.TakeInit(n)...)
   413  	}
   414  
   415  	switch n.Op() {
   416  	case ir.ONAME, ir.OLITERAL, ir.ONIL, ir.OLINKSYMOFFSET:
   417  		return n
   418  
   419  	case ir.OLEN, ir.OCAP:
   420  		n := n.(*ir.UnaryExpr)
   421  		l := safeExpr(n.X, init)
   422  		if l == n.X {
   423  			return n
   424  		}
   425  		a := ir.Copy(n).(*ir.UnaryExpr)
   426  		a.X = l
   427  		return walkExpr(typecheck.Expr(a), init)
   428  
   429  	case ir.ODOT, ir.ODOTPTR:
   430  		n := n.(*ir.SelectorExpr)
   431  		l := safeExpr(n.X, init)
   432  		if l == n.X {
   433  			return n
   434  		}
   435  		a := ir.Copy(n).(*ir.SelectorExpr)
   436  		a.X = l
   437  		return walkExpr(typecheck.Expr(a), init)
   438  
   439  	case ir.ODEREF:
   440  		n := n.(*ir.StarExpr)
   441  		l := safeExpr(n.X, init)
   442  		if l == n.X {
   443  			return n
   444  		}
   445  		a := ir.Copy(n).(*ir.StarExpr)
   446  		a.X = l
   447  		return walkExpr(typecheck.Expr(a), init)
   448  
   449  	case ir.OINDEX, ir.OINDEXMAP:
   450  		n := n.(*ir.IndexExpr)
   451  		l := safeExpr(n.X, init)
   452  		r := safeExpr(n.Index, init)
   453  		if l == n.X && r == n.Index {
   454  			return n
   455  		}
   456  		a := ir.Copy(n).(*ir.IndexExpr)
   457  		a.X = l
   458  		a.Index = r
   459  		return walkExpr(typecheck.Expr(a), init)
   460  
   461  	case ir.OSTRUCTLIT, ir.OARRAYLIT, ir.OSLICELIT:
   462  		n := n.(*ir.CompLitExpr)
   463  		if isStaticCompositeLiteral(n) {
   464  			return n
   465  		}
   466  	}
   467  
   468  	// make a copy; must not be used as an lvalue
   469  	if ir.IsAddressable(n) {
   470  		base.Fatalf("missing lvalue case in safeExpr: %v", n)
   471  	}
   472  	return cheapExpr(n, init)
   473  }
   474  
   475  func copyExpr(n ir.Node, t *types.Type, init *ir.Nodes) ir.Node {
   476  	l := typecheck.TempAt(base.Pos, ir.CurFunc, t)
   477  	appendWalkStmt(init, ir.NewAssignStmt(base.Pos, l, n))
   478  	return l
   479  }
   480  
   481  // walkAddString walks a string concatenation expression x.
   482  // If conv is non nil, x is the conv.X field.
   483  func walkAddString(x *ir.AddStringExpr, init *ir.Nodes, conv *ir.ConvExpr) ir.Node {
   484  	c := len(x.List)
   485  	if c < 2 {
   486  		base.Fatalf("walkAddString count %d too small", c)
   487  	}
   488  
   489  	typ := x.Type()
   490  	if conv != nil {
   491  		typ = conv.Type()
   492  	}
   493  
   494  	// list of string arguments
   495  	var args []ir.Node
   496  
   497  	var fn, fnsmall, fnbig string
   498  
   499  	buf := typecheck.NodNil()
   500  	switch {
   501  	default:
   502  		base.FatalfAt(x.Pos(), "unexpected type: %v", typ)
   503  	case typ.IsString():
   504  		if x.Esc() == ir.EscNone {
   505  			sz := int64(0)
   506  			for _, n1 := range x.List {
   507  				if n1.Op() == ir.OLITERAL {
   508  					sz += int64(len(ir.StringVal(n1)))
   509  				}
   510  			}
   511  
   512  			// Don't allocate the buffer if the result won't fit.
   513  			if sz < tmpstringbufsize {
   514  				// Create temporary buffer for result string on stack.
   515  				buf = stackBufAddr(tmpstringbufsize, types.Types[types.TUINT8])
   516  			}
   517  		}
   518  
   519  		args = []ir.Node{buf}
   520  		fnsmall, fnbig = "concatstring%d", "concatstrings"
   521  	case typ.IsSlice() && typ.Elem().IsKind(types.TUINT8): // Optimize []byte(str1+str2+...)
   522  		if conv != nil && conv.Esc() == ir.EscNone {
   523  			buf = stackBufAddr(tmpstringbufsize, types.Types[types.TUINT8])
   524  		}
   525  		args = []ir.Node{buf}
   526  		fnsmall, fnbig = "concatbyte%d", "concatbytes"
   527  	}
   528  
   529  	if c <= 5 {
   530  		// small numbers of strings use direct runtime helpers.
   531  		// note: order.expr knows this cutoff too.
   532  		fn = fmt.Sprintf(fnsmall, c)
   533  
   534  		for _, n2 := range x.List {
   535  			args = append(args, typecheck.Conv(n2, types.Types[types.TSTRING]))
   536  		}
   537  	} else {
   538  		// large numbers of strings are passed to the runtime as a slice.
   539  		fn = fnbig
   540  		t := types.NewSlice(types.Types[types.TSTRING])
   541  
   542  		slargs := make([]ir.Node, len(x.List))
   543  		for i, n2 := range x.List {
   544  			slargs[i] = typecheck.Conv(n2, types.Types[types.TSTRING])
   545  		}
   546  		slice := ir.NewCompLitExpr(base.Pos, ir.OCOMPLIT, t, slargs)
   547  		slice.Prealloc = x.Prealloc
   548  		args = append(args, slice)
   549  		slice.SetEsc(ir.EscNone)
   550  	}
   551  
   552  	cat := typecheck.LookupRuntime(fn)
   553  	r := ir.NewCallExpr(base.Pos, ir.OCALL, cat, nil)
   554  	r.Args = args
   555  	r1 := typecheck.Expr(r)
   556  	r1 = walkExpr(r1, init)
   557  	r1.SetType(typ)
   558  
   559  	return r1
   560  }
   561  
   562  type hookInfo struct {
   563  	paramType   types.Kind
   564  	argsNum     int
   565  	runtimeFunc string
   566  }
   567  
   568  var hooks = map[string]hookInfo{
   569  	"strings.EqualFold": {paramType: types.TSTRING, argsNum: 2, runtimeFunc: "libfuzzerHookEqualFold"},
   570  }
   571  
   572  // walkCall walks an OCALLFUNC or OCALLINTER node.
   573  func walkCall(n *ir.CallExpr, init *ir.Nodes) ir.Node {
   574  	if n.Op() == ir.OCALLMETH {
   575  		base.FatalfAt(n.Pos(), "OCALLMETH missed by typecheck")
   576  	}
   577  	if n.Op() == ir.OCALLINTER || n.Fun.Op() == ir.OMETHEXPR {
   578  		// We expect both interface call reflect.Type.Method and concrete
   579  		// call reflect.(*rtype).Method.
   580  		usemethod(n)
   581  	}
   582  	if n.Op() == ir.OCALLINTER {
   583  		reflectdata.MarkUsedIfaceMethod(n)
   584  	}
   585  
   586  	if n.Op() == ir.OCALLFUNC && n.Fun.Op() == ir.OCLOSURE {
   587  		directClosureCall(n)
   588  	}
   589  
   590  	if ir.IsFuncPCIntrinsic(n) {
   591  		// For internal/abi.FuncPCABIxxx(fn), if fn is a defined function, rewrite
   592  		// it to the address of the function of the ABI fn is defined.
   593  		name := n.Fun.(*ir.Name).Sym().Name
   594  		arg := n.Args[0]
   595  		var wantABI obj.ABI
   596  		switch name {
   597  		case "FuncPCABI0":
   598  			wantABI = obj.ABI0
   599  		case "FuncPCABIInternal":
   600  			wantABI = obj.ABIInternal
   601  		}
   602  		if n.Type() != types.Types[types.TUINTPTR] {
   603  			base.FatalfAt(n.Pos(), "FuncPC intrinsic should return uintptr, got %v", n.Type()) // as expected by typecheck.FuncPC.
   604  		}
   605  		n := ir.FuncPC(n.Pos(), arg, wantABI)
   606  		return walkExpr(n, init)
   607  	}
   608  
   609  	if n.Op() == ir.OCALLFUNC {
   610  		fn := ir.StaticCalleeName(n.Fun)
   611  		if fn != nil && fn.Sym().Pkg.Path == "internal/abi" && strings.HasPrefix(fn.Sym().Name, "EscapeNonString[") {
   612  			// internal/abi.EscapeNonString[T] is a compiler intrinsic
   613  			// for the escape analysis to escape its argument based on
   614  			// the type. The call itself is no-op. Just walk the
   615  			// argument.
   616  			ps := fn.Type().Params()
   617  			if len(ps) == 2 && ps[1].Type.IsShape() {
   618  				return walkExpr(n.Args[1], init)
   619  			}
   620  		}
   621  	}
   622  
   623  	if name, ok := n.Fun.(*ir.Name); ok {
   624  		sym := name.Sym()
   625  		if sym.Pkg.Path == "go.runtime" && sym.Name == "deferrangefunc" {
   626  			// Call to runtime.deferrangefunc is being shared with a range-over-func
   627  			// body that might add defers to this frame, so we cannot use open-coded defers
   628  			// and we need to call deferreturn even if we don't see any other explicit defers.
   629  			ir.CurFunc.SetHasDefer(true)
   630  			ir.CurFunc.SetOpenCodedDeferDisallowed(true)
   631  		}
   632  	}
   633  
   634  	walkCall1(n, init)
   635  	return n
   636  }
   637  
   638  func walkCall1(n *ir.CallExpr, init *ir.Nodes) {
   639  	if n.Walked() {
   640  		return // already walked
   641  	}
   642  	n.SetWalked(true)
   643  
   644  	if n.Op() == ir.OCALLMETH {
   645  		base.FatalfAt(n.Pos(), "OCALLMETH missed by typecheck")
   646  	}
   647  
   648  	args := n.Args
   649  	params := n.Fun.Type().Params()
   650  
   651  	n.Fun = walkExpr(n.Fun, init)
   652  	walkExprList(args, init)
   653  
   654  	for i, arg := range args {
   655  		// Validate argument and parameter types match.
   656  		param := params[i]
   657  		if !types.Identical(arg.Type(), param.Type) {
   658  			base.FatalfAt(n.Pos(), "assigning %L to parameter %v (type %v)", arg, param.Sym, param.Type)
   659  		}
   660  
   661  		// For any argument whose evaluation might require a function call,
   662  		// store that argument into a temporary variable,
   663  		// to prevent that calls from clobbering arguments already on the stack.
   664  		if mayCall(arg) {
   665  			// assignment of arg to Temp
   666  			tmp := typecheck.TempAt(base.Pos, ir.CurFunc, param.Type)
   667  			init.Append(convas(typecheck.Stmt(ir.NewAssignStmt(base.Pos, tmp, arg)).(*ir.AssignStmt), init))
   668  			// replace arg with temp
   669  			args[i] = tmp
   670  		}
   671  	}
   672  
   673  	funSym := n.Fun.Sym()
   674  	if base.Debug.Libfuzzer != 0 && funSym != nil {
   675  		if hook, found := hooks[funSym.Pkg.Path+"."+funSym.Name]; found {
   676  			if len(args) != hook.argsNum {
   677  				panic(fmt.Sprintf("%s.%s expects %d arguments, but received %d", funSym.Pkg.Path, funSym.Name, hook.argsNum, len(args)))
   678  			}
   679  			var hookArgs []ir.Node
   680  			for _, arg := range args {
   681  				hookArgs = append(hookArgs, tracecmpArg(arg, types.Types[hook.paramType], init))
   682  			}
   683  			hookArgs = append(hookArgs, fakePC(n))
   684  			init.Append(mkcall(hook.runtimeFunc, nil, init, hookArgs...))
   685  		}
   686  	}
   687  }
   688  
   689  // walkDivMod walks an ODIV or OMOD node.
   690  func walkDivMod(n *ir.BinaryExpr, init *ir.Nodes) ir.Node {
   691  	n.X = walkExpr(n.X, init)
   692  	n.Y = walkExpr(n.Y, init)
   693  
   694  	// rewrite complex div into function call.
   695  	et := n.X.Type().Kind()
   696  
   697  	if types.IsComplex[et] && n.Op() == ir.ODIV {
   698  		t := n.Type()
   699  		call := mkcall("complex128div", types.Types[types.TCOMPLEX128], init, typecheck.Conv(n.X, types.Types[types.TCOMPLEX128]), typecheck.Conv(n.Y, types.Types[types.TCOMPLEX128]))
   700  		return typecheck.Conv(call, t)
   701  	}
   702  
   703  	// Nothing to do for float divisions.
   704  	if types.IsFloat[et] {
   705  		return n
   706  	}
   707  
   708  	// rewrite 64-bit div and mod on 32-bit architectures.
   709  	// TODO: Remove this code once we can introduce
   710  	// runtime calls late in SSA processing.
   711  	if types.RegSize < 8 && (et == types.TINT64 || et == types.TUINT64) {
   712  		if n.Y.Op() == ir.OLITERAL {
   713  			// Leave div/mod by non-zero uint64 constants.
   714  			// The SSA backend will handle those.
   715  			// (Zero constants should have been rejected already, but we check just in case.)
   716  			switch et {
   717  			case types.TINT64:
   718  				if ir.Int64Val(n.Y) != 0 {
   719  					return n
   720  				}
   721  			case types.TUINT64:
   722  				if ir.Uint64Val(n.Y) != 0 {
   723  					return n
   724  				}
   725  			}
   726  		}
   727  		// Build call to uint64div, uint64mod, int64div, or int64mod.
   728  		var fn string
   729  		if et == types.TINT64 {
   730  			fn = "int64"
   731  		} else {
   732  			fn = "uint64"
   733  		}
   734  		if n.Op() == ir.ODIV {
   735  			fn += "div"
   736  		} else {
   737  			fn += "mod"
   738  		}
   739  		return mkcall(fn, n.Type(), init, typecheck.Conv(n.X, types.Types[et]), typecheck.Conv(n.Y, types.Types[et]))
   740  	}
   741  	return n
   742  }
   743  
   744  // walkDot walks an ODOT or ODOTPTR node.
   745  func walkDot(n *ir.SelectorExpr, init *ir.Nodes) ir.Node {
   746  	usefield(n)
   747  	n.X = walkExpr(n.X, init)
   748  	return n
   749  }
   750  
   751  // walkDotType walks an ODOTTYPE or ODOTTYPE2 node.
   752  func walkDotType(n *ir.TypeAssertExpr, init *ir.Nodes) ir.Node {
   753  	n.X = walkExpr(n.X, init)
   754  	// Set up interface type addresses for back end.
   755  	if !n.Type().IsInterface() && !n.X.Type().IsEmptyInterface() {
   756  		n.ITab = reflectdata.ITabAddrAt(base.Pos, n.Type(), n.X.Type())
   757  	}
   758  	if n.X.Type().IsInterface() && n.Type().IsInterface() && !n.Type().IsEmptyInterface() {
   759  		// This kind of conversion needs a runtime call. Allocate
   760  		// a descriptor for that call.
   761  		n.Descriptor = makeTypeAssertDescriptor(n.Type(), n.Op() == ir.ODOTTYPE2)
   762  	}
   763  	return n
   764  }
   765  
   766  // shapeTypeAssertImpossible reports whether a type assertion from src
   767  // to concrete type dst can never succeed because they have
   768  // incompatible shape types.
   769  func shapeTypeAssertImpossible(src ir.Node, dst *types.Type) bool {
   770  	if dst.IsInterface() {
   771  		return false
   772  	}
   773  	srcShape := convIfaceShapeType(src)
   774  	if srcShape == nil {
   775  		return false
   776  	}
   777  	return !types.Identical(srcShape, noder.Shapify(dst, false)) &&
   778  		!types.Identical(srcShape, noder.Shapify(dst, true))
   779  }
   780  
   781  // convIfaceShapeType returns the shape type from which src was
   782  // created via OCONVIFACE, or nil.
   783  func convIfaceShapeType(src ir.Node) *types.Type {
   784  	for {
   785  		switch s := src.(type) {
   786  		case *ir.ParenExpr:
   787  			src = s.X
   788  			continue
   789  		case *ir.ConvExpr:
   790  			if s.Op() == ir.OCONVNOP {
   791  				src = s.X
   792  				continue
   793  			}
   794  			if s.Op() == ir.OCONVIFACE {
   795  				srcType := s.X.Type()
   796  				if srcType != nil && !srcType.IsInterface() && srcType.IsShape() {
   797  					return srcType
   798  				}
   799  				return nil
   800  			}
   801  		}
   802  		break
   803  	}
   804  
   805  	if name, ok := src.(*ir.Name); ok && shapeConvSources != nil {
   806  		return shapeConvSources[name.Canonical()]
   807  	}
   808  	return nil
   809  }
   810  
   811  func makeTypeAssertDescriptor(target *types.Type, canFail bool) *obj.LSym {
   812  	// When converting from an interface to a non-empty interface. Needs a runtime call.
   813  	// Allocate an internal/abi.TypeAssert descriptor for that call.
   814  	lsym := types.LocalPkg.Lookup(fmt.Sprintf(".typeAssert.%d", typeAssertGen)).LinksymABI(obj.ABI0)
   815  	typeAssertGen++
   816  	c := rttype.NewCursor(lsym, 0, rttype.TypeAssert)
   817  	c.Field("Cache").WritePtr(typecheck.LookupRuntimeVar("emptyTypeAssertCache"))
   818  	c.Field("Inter").WritePtr(reflectdata.TypeLinksym(target))
   819  	c.Field("CanFail").WriteBool(canFail)
   820  	objw.Global(lsym, int32(rttype.TypeAssert.Size()), obj.LOCAL)
   821  	lsym.Gotype = reflectdata.TypeLinksym(rttype.TypeAssert)
   822  	return lsym
   823  }
   824  
   825  var typeAssertGen int
   826  
   827  // walkDynamicDotType walks an ODYNAMICDOTTYPE or ODYNAMICDOTTYPE2 node.
   828  func walkDynamicDotType(n *ir.DynamicTypeAssertExpr, init *ir.Nodes) ir.Node {
   829  	n.X = walkExpr(n.X, init)
   830  	n.RType = walkExpr(n.RType, init)
   831  	n.ITab = walkExpr(n.ITab, init)
   832  	// Convert to non-dynamic if we can.
   833  	if n.RType != nil && n.RType.Op() == ir.OADDR {
   834  		addr := n.RType.(*ir.AddrExpr)
   835  		if addr.X.Op() == ir.OLINKSYMOFFSET {
   836  			r := ir.NewTypeAssertExpr(n.Pos(), n.X, n.Type())
   837  			if n.Op() == ir.ODYNAMICDOTTYPE2 {
   838  				r.SetOp(ir.ODOTTYPE2)
   839  			}
   840  			r.SetType(n.Type())
   841  			r.SetTypecheck(1)
   842  			return walkExpr(r, init)
   843  		}
   844  	}
   845  	return n
   846  }
   847  
   848  // walkIndex walks an OINDEX node.
   849  func walkIndex(n *ir.IndexExpr, init *ir.Nodes) ir.Node {
   850  	n.X = walkExpr(n.X, init)
   851  
   852  	// save the original node for bounds checking elision.
   853  	// If it was a ODIV/OMOD walk might rewrite it.
   854  	r := n.Index
   855  
   856  	n.Index = walkExpr(n.Index, init)
   857  
   858  	// if range of type cannot exceed static array bound,
   859  	// disable bounds check.
   860  	if n.Bounded() {
   861  		return n
   862  	}
   863  	t := n.X.Type()
   864  	if t != nil && t.IsPtr() {
   865  		t = t.Elem()
   866  	}
   867  	if t.IsArray() {
   868  		n.SetBounded(bounded(r, t.NumElem()))
   869  		if base.Flag.LowerM != 0 && n.Bounded() && !ir.IsConst(n.Index, constant.Int) {
   870  			base.Warn("index bounds check elided")
   871  		}
   872  	} else if ir.IsConst(n.X, constant.String) {
   873  		n.SetBounded(bounded(r, int64(len(ir.StringVal(n.X)))))
   874  		if base.Flag.LowerM != 0 && n.Bounded() && !ir.IsConst(n.Index, constant.Int) {
   875  			base.Warn("index bounds check elided")
   876  		}
   877  	}
   878  	return n
   879  }
   880  
   881  // mapKeyArg returns an expression for key that is suitable to be passed
   882  // as the key argument for runtime map* functions.
   883  // n is the map indexing or delete Node (to provide Pos).
   884  func mapKeyArg(fast int, n, key ir.Node, assigned bool) ir.Node {
   885  	if fast == mapslow {
   886  		// standard version takes key by reference.
   887  		// orderState.expr made sure key is addressable.
   888  		return typecheck.NodAddr(key)
   889  	}
   890  	if assigned {
   891  		// mapassign does distinguish pointer vs. integer key.
   892  		return key
   893  	}
   894  	// mapaccess and mapdelete don't distinguish pointer vs. integer key.
   895  	switch fast {
   896  	case mapfast32ptr:
   897  		return ir.NewConvExpr(n.Pos(), ir.OCONVNOP, types.Types[types.TUINT32], key)
   898  	case mapfast64ptr:
   899  		return ir.NewConvExpr(n.Pos(), ir.OCONVNOP, types.Types[types.TUINT64], key)
   900  	default:
   901  		// fast version takes key by value.
   902  		return key
   903  	}
   904  }
   905  
   906  // walkIndexMap walks an OINDEXMAP node.
   907  // It replaces m[k] with *map{access1,assign}(maptype, m, &k)
   908  func walkIndexMap(n *ir.IndexExpr, init *ir.Nodes) ir.Node {
   909  	n.X = walkExpr(n.X, init)
   910  	n.Index = walkExpr(n.Index, init)
   911  	map_ := n.X
   912  	t := map_.Type()
   913  	fast := mapfast(t)
   914  	key := mapKeyArg(fast, n, n.Index, n.Assigned)
   915  	args := []ir.Node{reflectdata.IndexMapRType(base.Pos, n), map_, key}
   916  
   917  	if n.Assigned {
   918  		mapFn := mapfn(mapassign[fast], t, false)
   919  		call := mkcall1(mapFn, nil, init, args...)
   920  		call.SetType(types.NewPtr(t.Elem()))
   921  		call.MarkNonNil() // mapassign always return non-nil pointers.
   922  		star := ir.NewStarExpr(base.Pos, call)
   923  		star.SetType(t.Elem())
   924  		star.SetTypecheck(1)
   925  		return star
   926  	}
   927  
   928  	// from:
   929  	//   m[i]
   930  	// to:
   931  	//   var, _ = mapaccess2*(t, m, i)
   932  	//   *var
   933  	var mapFn ir.Node
   934  	if t.Elem().Size() > abi.ZeroValSize {
   935  		args = append(args, reflectdata.ZeroAddr(t.Elem().Size()))
   936  		mapFn = mapfn("mapaccess2_fat", t, true)
   937  	} else {
   938  		mapFn = mapfn(mapaccess[fast], t, false)
   939  	}
   940  	call := mkcall1(mapFn, mapFn.Type().ResultsTuple(), init, args...)
   941  
   942  	var_ := typecheck.TempAt(base.Pos, ir.CurFunc, types.NewPtr(t.Elem()))
   943  	var_.SetTypecheck(1)
   944  	var_.MarkNonNil() // mapaccess always returns a non-nill pointer
   945  
   946  	bool_ := typecheck.TempAt(base.Pos, ir.CurFunc, types.Types[types.TBOOL])
   947  	bool_.SetTypecheck(1)
   948  
   949  	r := ir.NewAssignListStmt(base.Pos, ir.OAS2FUNC, []ir.Node{var_, bool_}, []ir.Node{call})
   950  	r.SetTypecheck(1)
   951  	init.Append(walkExpr(r, init))
   952  
   953  	star := ir.NewStarExpr(base.Pos, var_)
   954  	star.SetType(t.Elem())
   955  	star.SetTypecheck(1)
   956  	return star
   957  }
   958  
   959  // walkLogical walks an OANDAND or OOROR node.
   960  func walkLogical(n *ir.LogicalExpr, init *ir.Nodes) ir.Node {
   961  	n.X = walkExpr(n.X, init)
   962  
   963  	// cannot put side effects from n.Right on init,
   964  	// because they cannot run before n.Left is checked.
   965  	// save elsewhere and store on the eventual n.Right.
   966  	var ll ir.Nodes
   967  
   968  	n.Y = walkExpr(n.Y, &ll)
   969  	n.Y = ir.InitExpr(ll, n.Y)
   970  	return n
   971  }
   972  
   973  // walkSend walks an OSEND node.
   974  func walkSend(n *ir.SendStmt, init *ir.Nodes) ir.Node {
   975  	n1 := n.Value
   976  	n1 = typecheck.AssignConv(n1, n.Chan.Type().Elem(), "chan send")
   977  	n1 = walkExpr(n1, init)
   978  	n1 = typecheck.NodAddr(n1)
   979  	return mkcall1(chanfn("chansend1", 2, n.Chan.Type()), nil, init, n.Chan, n1)
   980  }
   981  
   982  // walkSlice walks an OSLICE, OSLICEARR, OSLICESTR, OSLICE3, or OSLICE3ARR node.
   983  func walkSlice(n *ir.SliceExpr, init *ir.Nodes) ir.Node {
   984  	n.X = walkExpr(n.X, init)
   985  	n.Low = walkExpr(n.Low, init)
   986  	if n.Low != nil && ir.IsZero(n.Low) {
   987  		// Reduce x[0:j] to x[:j] and x[0:j:k] to x[:j:k].
   988  		n.Low = nil
   989  	}
   990  	n.High = walkExpr(n.High, init)
   991  	n.Max = walkExpr(n.Max, init)
   992  
   993  	if (n.Op() == ir.OSLICE || n.Op() == ir.OSLICESTR) && n.Low == nil && n.High == nil {
   994  		// Reduce x[:] to x.
   995  		if base.Debug.Slice > 0 {
   996  			base.Warn("slice: omit slice operation")
   997  		}
   998  		return n.X
   999  	}
  1000  	return n
  1001  }
  1002  
  1003  // walkSliceHeader walks an OSLICEHEADER node.
  1004  func walkSliceHeader(n *ir.SliceHeaderExpr, init *ir.Nodes) ir.Node {
  1005  	n.Ptr = walkExpr(n.Ptr, init)
  1006  	n.Len = walkExpr(n.Len, init)
  1007  	n.Cap = walkExpr(n.Cap, init)
  1008  	return n
  1009  }
  1010  
  1011  // walkStringHeader walks an OSTRINGHEADER node.
  1012  func walkStringHeader(n *ir.StringHeaderExpr, init *ir.Nodes) ir.Node {
  1013  	n.Ptr = walkExpr(n.Ptr, init)
  1014  	n.Len = walkExpr(n.Len, init)
  1015  	return n
  1016  }
  1017  
  1018  // return 1 if integer n must be in range [0, max), 0 otherwise.
  1019  func bounded(n ir.Node, max int64) bool {
  1020  	if n.Type() == nil || !n.Type().IsInteger() {
  1021  		return false
  1022  	}
  1023  
  1024  	sign := n.Type().IsSigned()
  1025  	bits := int32(8 * n.Type().Size())
  1026  
  1027  	if ir.IsSmallIntConst(n) {
  1028  		v := ir.Int64Val(n)
  1029  		return 0 <= v && v < max
  1030  	}
  1031  
  1032  	switch n.Op() {
  1033  	case ir.OAND, ir.OANDNOT:
  1034  		n := n.(*ir.BinaryExpr)
  1035  		v := int64(-1)
  1036  		switch {
  1037  		case ir.IsSmallIntConst(n.X):
  1038  			v = ir.Int64Val(n.X)
  1039  		case ir.IsSmallIntConst(n.Y):
  1040  			v = ir.Int64Val(n.Y)
  1041  			if n.Op() == ir.OANDNOT {
  1042  				v = ^v
  1043  				if !sign {
  1044  					v &= 1<<uint(bits) - 1
  1045  				}
  1046  			}
  1047  		}
  1048  		if 0 <= v && v < max {
  1049  			return true
  1050  		}
  1051  
  1052  	case ir.OMOD:
  1053  		n := n.(*ir.BinaryExpr)
  1054  		if !sign && ir.IsSmallIntConst(n.Y) {
  1055  			v := ir.Int64Val(n.Y)
  1056  			if 0 <= v && v <= max {
  1057  				return true
  1058  			}
  1059  		}
  1060  
  1061  	case ir.ODIV:
  1062  		n := n.(*ir.BinaryExpr)
  1063  		if !sign && ir.IsSmallIntConst(n.Y) {
  1064  			v := ir.Int64Val(n.Y)
  1065  			for bits > 0 && v >= 2 {
  1066  				bits--
  1067  				v >>= 1
  1068  			}
  1069  		}
  1070  
  1071  	case ir.ORSH:
  1072  		n := n.(*ir.BinaryExpr)
  1073  		if !sign && ir.IsSmallIntConst(n.Y) {
  1074  			v := ir.Int64Val(n.Y)
  1075  			if v > int64(bits) {
  1076  				return true
  1077  			}
  1078  			bits -= int32(v)
  1079  		}
  1080  	}
  1081  
  1082  	if !sign && bits <= 62 && 1<<uint(bits) <= max {
  1083  		return true
  1084  	}
  1085  
  1086  	return false
  1087  }
  1088  
  1089  // usemethod checks calls for uses of Method and MethodByName of reflect.Value,
  1090  // reflect.Type, reflect.(*rtype), and reflect.(*interfaceType).
  1091  func usemethod(n *ir.CallExpr) {
  1092  	// Don't mark reflect.(*rtype).Method, etc. themselves in the reflect package.
  1093  	// Those functions may be alive via the itab, which should not cause all methods
  1094  	// alive. We only want to mark their callers.
  1095  	if base.Ctxt.Pkgpath == "reflect" {
  1096  		// TODO: is there a better way than hardcoding the names?
  1097  		switch fn := ir.CurFunc.Nname.Sym().Name; {
  1098  		case fn == "(*rtype).Method", fn == "(*rtype).MethodByName":
  1099  			return
  1100  		case fn == "(*interfaceType).Method", fn == "(*interfaceType).MethodByName":
  1101  			return
  1102  		case fn == "Value.Method", fn == "Value.MethodByName":
  1103  			return
  1104  		}
  1105  	}
  1106  
  1107  	dot, ok := n.Fun.(*ir.SelectorExpr)
  1108  	if !ok {
  1109  		return
  1110  	}
  1111  
  1112  	// looking for either direct method calls and interface method calls of:
  1113  	//	reflect.Type.Method        - func(int) reflect.Method
  1114  	//	reflect.Type.MethodByName  - func(string) (reflect.Method, bool)
  1115  	//
  1116  	//	reflect.Value.Method       - func(int) reflect.Value
  1117  	//	reflect.Value.MethodByName - func(string) reflect.Value
  1118  	methodName := dot.Sel.Name
  1119  	t := dot.Selection.Type
  1120  
  1121  	// Check the number of arguments and return values.
  1122  	if t.NumParams() != 1 || (t.NumResults() != 1 && t.NumResults() != 2) {
  1123  		return
  1124  	}
  1125  
  1126  	// Check the type of the argument.
  1127  	switch pKind := t.Param(0).Type.Kind(); {
  1128  	case methodName == "Method" && pKind == types.TINT,
  1129  		methodName == "MethodByName" && pKind == types.TSTRING:
  1130  
  1131  	default:
  1132  		// not a call to Method or MethodByName of reflect.{Type,Value}.
  1133  		return
  1134  	}
  1135  
  1136  	// Check that first result type is "reflect.Method" or "reflect.Value".
  1137  	// Note that we have to check sym name and sym package separately, as
  1138  	// we can't check for exact string "reflect.Method" reliably
  1139  	// (e.g., see #19028 and #38515).
  1140  	switch s := t.Result(0).Type.Sym(); {
  1141  	case s != nil && types.ReflectSymName(s) == "Method",
  1142  		s != nil && types.ReflectSymName(s) == "Value":
  1143  
  1144  	default:
  1145  		// not a call to Method or MethodByName of reflect.{Type,Value}.
  1146  		return
  1147  	}
  1148  
  1149  	var targetName ir.Node
  1150  	switch dot.Op() {
  1151  	case ir.ODOTINTER:
  1152  		if methodName == "MethodByName" {
  1153  			targetName = n.Args[0]
  1154  		}
  1155  	case ir.OMETHEXPR:
  1156  		if methodName == "MethodByName" {
  1157  			targetName = n.Args[1]
  1158  		}
  1159  	default:
  1160  		base.FatalfAt(dot.Pos(), "usemethod: unexpected dot.Op() %s", dot.Op())
  1161  	}
  1162  
  1163  	if ir.IsConst(targetName, constant.String) {
  1164  		name := constant.StringVal(targetName.Val())
  1165  		ir.CurFunc.LSym.AddRel(base.Ctxt, obj.Reloc{
  1166  			Type: objabi.R_USENAMEDMETHOD,
  1167  			Sym:  staticdata.StringSymNoCommon(name),
  1168  		})
  1169  	} else {
  1170  		ir.CurFunc.LSym.Set(obj.AttrReflectMethod, true)
  1171  	}
  1172  }
  1173  
  1174  func usefield(n *ir.SelectorExpr) {
  1175  	if !buildcfg.Experiment.FieldTrack {
  1176  		return
  1177  	}
  1178  
  1179  	switch n.Op() {
  1180  	default:
  1181  		base.Fatalf("usefield %v", n.Op())
  1182  
  1183  	case ir.ODOT, ir.ODOTPTR:
  1184  		break
  1185  	}
  1186  
  1187  	field := n.Selection
  1188  	if field == nil {
  1189  		base.Fatalf("usefield %v %v without paramfld", n.X.Type(), n.Sel)
  1190  	}
  1191  	if field.Sym != n.Sel {
  1192  		base.Fatalf("field inconsistency: %v != %v", field.Sym, n.Sel)
  1193  	}
  1194  	if !strings.Contains(field.Note, "go:\"track\"") {
  1195  		return
  1196  	}
  1197  
  1198  	outer := n.X.Type()
  1199  	if outer.IsPtr() {
  1200  		outer = outer.Elem()
  1201  	}
  1202  	if outer.Sym() == nil {
  1203  		base.Errorf("tracked field must be in named struct type")
  1204  	}
  1205  
  1206  	sym := reflectdata.TrackSym(outer, field)
  1207  	if ir.CurFunc.FieldTrack == nil {
  1208  		ir.CurFunc.FieldTrack = make(map[*obj.LSym]struct{})
  1209  	}
  1210  	ir.CurFunc.FieldTrack[sym] = struct{}{}
  1211  }
  1212  

View as plain text