Source file src/cmd/compile/internal/walk/expr.go

     1  // Copyright 2009 The Go Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  package walk
     6  
     7  import (
     8  	"fmt"
     9  	"go/constant"
    10  	"internal/abi"
    11  	"internal/buildcfg"
    12  	"strings"
    13  
    14  	"cmd/compile/internal/base"
    15  	"cmd/compile/internal/ir"
    16  	"cmd/compile/internal/objw"
    17  	"cmd/compile/internal/reflectdata"
    18  	"cmd/compile/internal/rttype"
    19  	"cmd/compile/internal/staticdata"
    20  	"cmd/compile/internal/typecheck"
    21  	"cmd/compile/internal/types"
    22  	"cmd/internal/obj"
    23  	"cmd/internal/objabi"
    24  )
    25  
    26  // The result of walkExpr MUST be assigned back to n, e.g.
    27  //
    28  //	n.Left = walkExpr(n.Left, init)
    29  func walkExpr(n ir.Node, init *ir.Nodes) ir.Node {
    30  	if n == nil {
    31  		return n
    32  	}
    33  
    34  	if n, ok := n.(ir.InitNode); ok && init == n.PtrInit() {
    35  		// not okay to use n->ninit when walking n,
    36  		// because we might replace n with some other node
    37  		// and would lose the init list.
    38  		base.Fatalf("walkExpr init == &n->ninit")
    39  	}
    40  
    41  	if len(n.Init()) != 0 {
    42  		walkStmtList(n.Init())
    43  		init.Append(ir.TakeInit(n)...)
    44  	}
    45  
    46  	lno := ir.SetPos(n)
    47  
    48  	if base.Flag.LowerW > 1 {
    49  		ir.Dump("before walk expr", n)
    50  	}
    51  
    52  	if n.Typecheck() != 1 {
    53  		base.Fatalf("missed typecheck: %+v", n)
    54  	}
    55  
    56  	if n.Type().IsUntyped() {
    57  		base.Fatalf("expression has untyped type: %+v", n)
    58  	}
    59  
    60  	n = walkExpr1(n, init)
    61  
    62  	// Eagerly compute sizes of all expressions for the back end.
    63  	if typ := n.Type(); typ != nil && typ.Kind() != types.TBLANK && !typ.IsFuncArgStruct() {
    64  		types.CheckSize(typ)
    65  	}
    66  	if n, ok := n.(*ir.Name); ok && n.Heapaddr != nil {
    67  		types.CheckSize(n.Heapaddr.Type())
    68  	}
    69  	if ir.IsConst(n, constant.String) {
    70  		// Emit string symbol now to avoid emitting
    71  		// any concurrently during the backend.
    72  		_ = staticdata.StringSym(n.Pos(), constant.StringVal(n.Val()))
    73  	}
    74  
    75  	if base.Flag.LowerW != 0 && n != nil {
    76  		ir.Dump("after walk expr", n)
    77  	}
    78  
    79  	base.Pos = lno
    80  	return n
    81  }
    82  
    83  func walkExpr1(n ir.Node, init *ir.Nodes) ir.Node {
    84  	switch n.Op() {
    85  	default:
    86  		ir.Dump("walk", n)
    87  		base.Fatalf("walkExpr: switch 1 unknown op %+v", n.Op())
    88  		panic("unreachable")
    89  
    90  	case ir.OGETG, ir.OGETCALLERSP:
    91  		return n
    92  
    93  	case ir.OTYPE, ir.ONAME, ir.OLITERAL, ir.ONIL, ir.OLINKSYMOFFSET:
    94  		// TODO(mdempsky): Just return n; see discussion on CL 38655.
    95  		// Perhaps refactor to use Node.mayBeShared for these instead.
    96  		// If these return early, make sure to still call
    97  		// StringSym for constant strings.
    98  		return n
    99  
   100  	case ir.OMETHEXPR:
   101  		// TODO(mdempsky): Do this right after type checking.
   102  		n := n.(*ir.SelectorExpr)
   103  		return n.FuncName()
   104  
   105  	case ir.OMIN, ir.OMAX:
   106  		n := n.(*ir.CallExpr)
   107  		return walkMinMax(n, init)
   108  
   109  	case ir.ONOT, ir.ONEG, ir.OPLUS, ir.OBITNOT, ir.OREAL, ir.OIMAG, ir.OSPTR, ir.OITAB, ir.OIDATA:
   110  		n := n.(*ir.UnaryExpr)
   111  		n.X = walkExpr(n.X, init)
   112  		return n
   113  
   114  	case ir.ODOTMETH, ir.ODOTINTER:
   115  		n := n.(*ir.SelectorExpr)
   116  		n.X = walkExpr(n.X, init)
   117  		return n
   118  
   119  	case ir.OADDR:
   120  		n := n.(*ir.AddrExpr)
   121  		n.X = walkExpr(n.X, init)
   122  		return n
   123  
   124  	case ir.ODEREF:
   125  		n := n.(*ir.StarExpr)
   126  		n.X = walkExpr(n.X, init)
   127  		return n
   128  
   129  	case ir.OMAKEFACE, ir.OAND, ir.OANDNOT, ir.OSUB, ir.OMUL, ir.OADD, ir.OOR, ir.OXOR, ir.OLSH, ir.ORSH,
   130  		ir.OUNSAFEADD:
   131  		n := n.(*ir.BinaryExpr)
   132  		n.X = walkExpr(n.X, init)
   133  		n.Y = walkExpr(n.Y, init)
   134  		if n.Op() == ir.OUNSAFEADD && ir.ShouldCheckPtr(ir.CurFunc, 1) {
   135  			// For unsafe.Add(p, n), just walk "unsafe.Pointer(uintptr(p)+uintptr(n))"
   136  			// for the side effects of validating unsafe.Pointer rules.
   137  			x := typecheck.ConvNop(n.X, types.Types[types.TUINTPTR])
   138  			y := typecheck.Conv(n.Y, types.Types[types.TUINTPTR])
   139  			conv := typecheck.ConvNop(ir.NewBinaryExpr(n.Pos(), ir.OADD, x, y), types.Types[types.TUNSAFEPTR])
   140  			walkExpr(conv, init)
   141  		}
   142  		return n
   143  
   144  	case ir.OUNSAFESLICE:
   145  		n := n.(*ir.BinaryExpr)
   146  		return walkUnsafeSlice(n, init)
   147  
   148  	case ir.OUNSAFESTRING:
   149  		n := n.(*ir.BinaryExpr)
   150  		return walkUnsafeString(n, init)
   151  
   152  	case ir.OUNSAFESTRINGDATA, ir.OUNSAFESLICEDATA:
   153  		n := n.(*ir.UnaryExpr)
   154  		return walkUnsafeData(n, init)
   155  
   156  	case ir.ODOT, ir.ODOTPTR:
   157  		n := n.(*ir.SelectorExpr)
   158  		return walkDot(n, init)
   159  
   160  	case ir.ODOTTYPE, ir.ODOTTYPE2:
   161  		n := n.(*ir.TypeAssertExpr)
   162  		return walkDotType(n, init)
   163  
   164  	case ir.ODYNAMICDOTTYPE, ir.ODYNAMICDOTTYPE2:
   165  		n := n.(*ir.DynamicTypeAssertExpr)
   166  		return walkDynamicDotType(n, init)
   167  
   168  	case ir.OLEN, ir.OCAP:
   169  		n := n.(*ir.UnaryExpr)
   170  		return walkLenCap(n, init)
   171  
   172  	case ir.OCOMPLEX:
   173  		n := n.(*ir.BinaryExpr)
   174  		n.X = walkExpr(n.X, init)
   175  		n.Y = walkExpr(n.Y, init)
   176  		return n
   177  
   178  	case ir.OEQ, ir.ONE, ir.OLT, ir.OLE, ir.OGT, ir.OGE:
   179  		n := n.(*ir.BinaryExpr)
   180  		return walkCompare(n, init)
   181  
   182  	case ir.OANDAND, ir.OOROR:
   183  		n := n.(*ir.LogicalExpr)
   184  		return walkLogical(n, init)
   185  
   186  	case ir.OPRINT, ir.OPRINTLN:
   187  		return walkPrint(n.(*ir.CallExpr), init)
   188  
   189  	case ir.OPANIC:
   190  		n := n.(*ir.UnaryExpr)
   191  		return mkcall("gopanic", nil, init, n.X)
   192  
   193  	case ir.ORECOVER:
   194  		return walkRecover(n.(*ir.CallExpr), init)
   195  
   196  	case ir.OCFUNC:
   197  		return n
   198  
   199  	case ir.OCALLINTER, ir.OCALLFUNC:
   200  		n := n.(*ir.CallExpr)
   201  		return walkCall(n, init)
   202  
   203  	case ir.OAS, ir.OASOP:
   204  		return walkAssign(init, n)
   205  
   206  	case ir.OAS2:
   207  		n := n.(*ir.AssignListStmt)
   208  		return walkAssignList(init, n)
   209  
   210  	// a,b,... = fn()
   211  	case ir.OAS2FUNC:
   212  		n := n.(*ir.AssignListStmt)
   213  		return walkAssignFunc(init, n)
   214  
   215  	// x, y = <-c
   216  	// order.stmt made sure x is addressable or blank.
   217  	case ir.OAS2RECV:
   218  		n := n.(*ir.AssignListStmt)
   219  		return walkAssignRecv(init, n)
   220  
   221  	// a,b = m[i]
   222  	case ir.OAS2MAPR:
   223  		n := n.(*ir.AssignListStmt)
   224  		return walkAssignMapRead(init, n)
   225  
   226  	case ir.ODELETE:
   227  		n := n.(*ir.CallExpr)
   228  		return walkDelete(init, n)
   229  
   230  	case ir.OAS2DOTTYPE:
   231  		n := n.(*ir.AssignListStmt)
   232  		return walkAssignDotType(n, init)
   233  
   234  	case ir.OCONVIFACE:
   235  		n := n.(*ir.ConvExpr)
   236  		return walkConvInterface(n, init)
   237  
   238  	case ir.OCONV, ir.OCONVNOP:
   239  		n := n.(*ir.ConvExpr)
   240  		return walkConv(n, init)
   241  
   242  	case ir.OSLICE2ARR:
   243  		n := n.(*ir.ConvExpr)
   244  		return walkSliceToArray(n, init)
   245  
   246  	case ir.OSLICE2ARRPTR:
   247  		n := n.(*ir.ConvExpr)
   248  		n.X = walkExpr(n.X, init)
   249  		return n
   250  
   251  	case ir.ODIV, ir.OMOD:
   252  		n := n.(*ir.BinaryExpr)
   253  		return walkDivMod(n, init)
   254  
   255  	case ir.OINDEX:
   256  		n := n.(*ir.IndexExpr)
   257  		return walkIndex(n, init)
   258  
   259  	case ir.OINDEXMAP:
   260  		n := n.(*ir.IndexExpr)
   261  		return walkIndexMap(n, init)
   262  
   263  	case ir.ORECV:
   264  		base.Fatalf("walkExpr ORECV") // should see inside OAS only
   265  		panic("unreachable")
   266  
   267  	case ir.OSLICEHEADER:
   268  		n := n.(*ir.SliceHeaderExpr)
   269  		return walkSliceHeader(n, init)
   270  
   271  	case ir.OSTRINGHEADER:
   272  		n := n.(*ir.StringHeaderExpr)
   273  		return walkStringHeader(n, init)
   274  
   275  	case ir.OSLICE, ir.OSLICEARR, ir.OSLICESTR, ir.OSLICE3, ir.OSLICE3ARR:
   276  		n := n.(*ir.SliceExpr)
   277  		return walkSlice(n, init)
   278  
   279  	case ir.ONEW:
   280  		n := n.(*ir.UnaryExpr)
   281  		return walkNew(n, init)
   282  
   283  	case ir.OADDSTR:
   284  		return walkAddString(n.(*ir.AddStringExpr), init, nil)
   285  
   286  	case ir.OAPPEND:
   287  		// order should make sure we only see OAS(node, OAPPEND), which we handle above.
   288  		base.Fatalf("append outside assignment")
   289  		panic("unreachable")
   290  
   291  	case ir.OCOPY:
   292  		return walkCopy(n.(*ir.BinaryExpr), init, base.Flag.Cfg.Instrumenting && !base.Flag.CompilingRuntime)
   293  
   294  	case ir.OCLEAR:
   295  		n := n.(*ir.UnaryExpr)
   296  		return walkClear(n)
   297  
   298  	case ir.OCLOSE:
   299  		n := n.(*ir.UnaryExpr)
   300  		return walkClose(n, init)
   301  
   302  	case ir.OMAKECHAN:
   303  		n := n.(*ir.MakeExpr)
   304  		return walkMakeChan(n, init)
   305  
   306  	case ir.OMAKEMAP:
   307  		n := n.(*ir.MakeExpr)
   308  		return walkMakeMap(n, init)
   309  
   310  	case ir.OMAKESLICE:
   311  		n := n.(*ir.MakeExpr)
   312  		return walkMakeSlice(n, init)
   313  
   314  	case ir.OMAKESLICECOPY:
   315  		n := n.(*ir.MakeExpr)
   316  		return walkMakeSliceCopy(n, init)
   317  
   318  	case ir.ORUNESTR:
   319  		n := n.(*ir.ConvExpr)
   320  		return walkRuneToString(n, init)
   321  
   322  	case ir.OBYTES2STR, ir.ORUNES2STR:
   323  		n := n.(*ir.ConvExpr)
   324  		return walkBytesRunesToString(n, init)
   325  
   326  	case ir.OBYTES2STRTMP:
   327  		n := n.(*ir.ConvExpr)
   328  		return walkBytesToStringTemp(n, init)
   329  
   330  	case ir.OSTR2BYTES:
   331  		n := n.(*ir.ConvExpr)
   332  		return walkStringToBytes(n, init)
   333  
   334  	case ir.OSTR2BYTESTMP:
   335  		n := n.(*ir.ConvExpr)
   336  		return walkStringToBytesTemp(n, init)
   337  
   338  	case ir.OSTR2RUNES:
   339  		n := n.(*ir.ConvExpr)
   340  		return walkStringToRunes(n, init)
   341  
   342  	case ir.OARRAYLIT, ir.OSLICELIT, ir.OMAPLIT, ir.OSTRUCTLIT, ir.OPTRLIT:
   343  		return walkCompLit(n, init)
   344  
   345  	case ir.OSEND:
   346  		n := n.(*ir.SendStmt)
   347  		return walkSend(n, init)
   348  
   349  	case ir.OCLOSURE:
   350  		return walkClosure(n.(*ir.ClosureExpr), init)
   351  
   352  	case ir.OMETHVALUE:
   353  		return walkMethodValue(n.(*ir.SelectorExpr), init)
   354  
   355  	case ir.OMOVE2HEAP:
   356  		n := n.(*ir.MoveToHeapExpr)
   357  		n.Slice = walkExpr(n.Slice, init)
   358  		return n
   359  	}
   360  
   361  	// No return! Each case must return (or panic),
   362  	// to avoid confusion about what gets returned
   363  	// in the presence of type assertions.
   364  }
   365  
   366  // walk the whole tree of the body of an
   367  // expression or simple statement.
   368  // the types expressions are calculated.
   369  // compile-time constants are evaluated.
   370  // complex side effects like statements are appended to init.
   371  func walkExprList(s []ir.Node, init *ir.Nodes) {
   372  	for i := range s {
   373  		s[i] = walkExpr(s[i], init)
   374  	}
   375  }
   376  
   377  func walkExprListCheap(s []ir.Node, init *ir.Nodes) {
   378  	for i, n := range s {
   379  		s[i] = cheapExpr(n, init)
   380  		s[i] = walkExpr(s[i], init)
   381  	}
   382  }
   383  
   384  func walkExprListSafe(s []ir.Node, init *ir.Nodes) {
   385  	for i, n := range s {
   386  		s[i] = safeExpr(n, init)
   387  		s[i] = walkExpr(s[i], init)
   388  	}
   389  }
   390  
   391  // return side-effect free and cheap n, appending side effects to init.
   392  // result may not be assignable.
   393  func cheapExpr(n ir.Node, init *ir.Nodes) ir.Node {
   394  	switch n.Op() {
   395  	case ir.ONAME, ir.OLITERAL, ir.ONIL:
   396  		return n
   397  	}
   398  
   399  	return copyExpr(n, n.Type(), init)
   400  }
   401  
   402  // return side effect-free n, appending side effects to init.
   403  // result is assignable if n is.
   404  func safeExpr(n ir.Node, init *ir.Nodes) ir.Node {
   405  	if n == nil {
   406  		return nil
   407  	}
   408  
   409  	if len(n.Init()) != 0 {
   410  		walkStmtList(n.Init())
   411  		init.Append(ir.TakeInit(n)...)
   412  	}
   413  
   414  	switch n.Op() {
   415  	case ir.ONAME, ir.OLITERAL, ir.ONIL, ir.OLINKSYMOFFSET:
   416  		return n
   417  
   418  	case ir.OLEN, ir.OCAP:
   419  		n := n.(*ir.UnaryExpr)
   420  		l := safeExpr(n.X, init)
   421  		if l == n.X {
   422  			return n
   423  		}
   424  		a := ir.Copy(n).(*ir.UnaryExpr)
   425  		a.X = l
   426  		return walkExpr(typecheck.Expr(a), init)
   427  
   428  	case ir.ODOT, ir.ODOTPTR:
   429  		n := n.(*ir.SelectorExpr)
   430  		l := safeExpr(n.X, init)
   431  		if l == n.X {
   432  			return n
   433  		}
   434  		a := ir.Copy(n).(*ir.SelectorExpr)
   435  		a.X = l
   436  		return walkExpr(typecheck.Expr(a), init)
   437  
   438  	case ir.ODEREF:
   439  		n := n.(*ir.StarExpr)
   440  		l := safeExpr(n.X, init)
   441  		if l == n.X {
   442  			return n
   443  		}
   444  		a := ir.Copy(n).(*ir.StarExpr)
   445  		a.X = l
   446  		return walkExpr(typecheck.Expr(a), init)
   447  
   448  	case ir.OINDEX, ir.OINDEXMAP:
   449  		n := n.(*ir.IndexExpr)
   450  		l := safeExpr(n.X, init)
   451  		r := safeExpr(n.Index, init)
   452  		if l == n.X && r == n.Index {
   453  			return n
   454  		}
   455  		a := ir.Copy(n).(*ir.IndexExpr)
   456  		a.X = l
   457  		a.Index = r
   458  		return walkExpr(typecheck.Expr(a), init)
   459  
   460  	case ir.OSTRUCTLIT, ir.OARRAYLIT, ir.OSLICELIT:
   461  		n := n.(*ir.CompLitExpr)
   462  		if isStaticCompositeLiteral(n) {
   463  			return n
   464  		}
   465  	}
   466  
   467  	// make a copy; must not be used as an lvalue
   468  	if ir.IsAddressable(n) {
   469  		base.Fatalf("missing lvalue case in safeExpr: %v", n)
   470  	}
   471  	return cheapExpr(n, init)
   472  }
   473  
   474  func copyExpr(n ir.Node, t *types.Type, init *ir.Nodes) ir.Node {
   475  	l := typecheck.TempAt(base.Pos, ir.CurFunc, t)
   476  	appendWalkStmt(init, ir.NewAssignStmt(base.Pos, l, n))
   477  	return l
   478  }
   479  
   480  // walkAddString walks a string concatenation expression x.
   481  // If conv is non nil, x is the conv.X field.
   482  func walkAddString(x *ir.AddStringExpr, init *ir.Nodes, conv *ir.ConvExpr) ir.Node {
   483  	c := len(x.List)
   484  	if c < 2 {
   485  		base.Fatalf("walkAddString count %d too small", c)
   486  	}
   487  
   488  	typ := x.Type()
   489  	if conv != nil {
   490  		typ = conv.Type()
   491  	}
   492  
   493  	// list of string arguments
   494  	var args []ir.Node
   495  
   496  	var fn, fnsmall, fnbig string
   497  
   498  	buf := typecheck.NodNil()
   499  	switch {
   500  	default:
   501  		base.FatalfAt(x.Pos(), "unexpected type: %v", typ)
   502  	case typ.IsString():
   503  		if x.Esc() == ir.EscNone {
   504  			sz := int64(0)
   505  			for _, n1 := range x.List {
   506  				if n1.Op() == ir.OLITERAL {
   507  					sz += int64(len(ir.StringVal(n1)))
   508  				}
   509  			}
   510  
   511  			// Don't allocate the buffer if the result won't fit.
   512  			if sz < tmpstringbufsize {
   513  				// Create temporary buffer for result string on stack.
   514  				buf = stackBufAddr(tmpstringbufsize, types.Types[types.TUINT8])
   515  			}
   516  		}
   517  
   518  		args = []ir.Node{buf}
   519  		fnsmall, fnbig = "concatstring%d", "concatstrings"
   520  	case typ.IsSlice() && typ.Elem().IsKind(types.TUINT8): // Optimize []byte(str1+str2+...)
   521  		if conv != nil && conv.Esc() == ir.EscNone {
   522  			buf = stackBufAddr(tmpstringbufsize, types.Types[types.TUINT8])
   523  		}
   524  		args = []ir.Node{buf}
   525  		fnsmall, fnbig = "concatbyte%d", "concatbytes"
   526  	}
   527  
   528  	if c <= 5 {
   529  		// small numbers of strings use direct runtime helpers.
   530  		// note: order.expr knows this cutoff too.
   531  		fn = fmt.Sprintf(fnsmall, c)
   532  
   533  		for _, n2 := range x.List {
   534  			args = append(args, typecheck.Conv(n2, types.Types[types.TSTRING]))
   535  		}
   536  	} else {
   537  		// large numbers of strings are passed to the runtime as a slice.
   538  		fn = fnbig
   539  		t := types.NewSlice(types.Types[types.TSTRING])
   540  
   541  		slargs := make([]ir.Node, len(x.List))
   542  		for i, n2 := range x.List {
   543  			slargs[i] = typecheck.Conv(n2, types.Types[types.TSTRING])
   544  		}
   545  		slice := ir.NewCompLitExpr(base.Pos, ir.OCOMPLIT, t, slargs)
   546  		slice.Prealloc = x.Prealloc
   547  		args = append(args, slice)
   548  		slice.SetEsc(ir.EscNone)
   549  	}
   550  
   551  	cat := typecheck.LookupRuntime(fn)
   552  	r := ir.NewCallExpr(base.Pos, ir.OCALL, cat, nil)
   553  	r.Args = args
   554  	r1 := typecheck.Expr(r)
   555  	r1 = walkExpr(r1, init)
   556  	r1.SetType(typ)
   557  
   558  	return r1
   559  }
   560  
   561  type hookInfo struct {
   562  	paramType   types.Kind
   563  	argsNum     int
   564  	runtimeFunc string
   565  }
   566  
   567  var hooks = map[string]hookInfo{
   568  	"strings.EqualFold": {paramType: types.TSTRING, argsNum: 2, runtimeFunc: "libfuzzerHookEqualFold"},
   569  }
   570  
   571  // walkCall walks an OCALLFUNC or OCALLINTER node.
   572  func walkCall(n *ir.CallExpr, init *ir.Nodes) ir.Node {
   573  	if n.Op() == ir.OCALLMETH {
   574  		base.FatalfAt(n.Pos(), "OCALLMETH missed by typecheck")
   575  	}
   576  	if n.Op() == ir.OCALLINTER || n.Fun.Op() == ir.OMETHEXPR {
   577  		// We expect both interface call reflect.Type.Method and concrete
   578  		// call reflect.(*rtype).Method.
   579  		usemethod(n)
   580  	}
   581  	if n.Op() == ir.OCALLINTER {
   582  		reflectdata.MarkUsedIfaceMethod(n)
   583  	}
   584  
   585  	if n.Op() == ir.OCALLFUNC && n.Fun.Op() == ir.OCLOSURE {
   586  		directClosureCall(n)
   587  	}
   588  
   589  	if ir.IsFuncPCIntrinsic(n) {
   590  		// For internal/abi.FuncPCABIxxx(fn), if fn is a defined function, rewrite
   591  		// it to the address of the function of the ABI fn is defined.
   592  		name := n.Fun.(*ir.Name).Sym().Name
   593  		arg := n.Args[0]
   594  		var wantABI obj.ABI
   595  		switch name {
   596  		case "FuncPCABI0":
   597  			wantABI = obj.ABI0
   598  		case "FuncPCABIInternal":
   599  			wantABI = obj.ABIInternal
   600  		}
   601  		if n.Type() != types.Types[types.TUINTPTR] {
   602  			base.FatalfAt(n.Pos(), "FuncPC intrinsic should return uintptr, got %v", n.Type()) // as expected by typecheck.FuncPC.
   603  		}
   604  		n := ir.FuncPC(n.Pos(), arg, wantABI)
   605  		return walkExpr(n, init)
   606  	}
   607  
   608  	if n.Op() == ir.OCALLFUNC {
   609  		fn := ir.StaticCalleeName(n.Fun)
   610  		if fn != nil && fn.Sym().Pkg.Path == "internal/abi" && strings.HasPrefix(fn.Sym().Name, "EscapeNonString[") {
   611  			// internal/abi.EscapeNonString[T] is a compiler intrinsic
   612  			// for the escape analysis to escape its argument based on
   613  			// the type. The call itself is no-op. Just walk the
   614  			// argument.
   615  			ps := fn.Type().Params()
   616  			if len(ps) == 2 && ps[1].Type.IsShape() {
   617  				return walkExpr(n.Args[1], init)
   618  			}
   619  		}
   620  	}
   621  
   622  	if name, ok := n.Fun.(*ir.Name); ok {
   623  		sym := name.Sym()
   624  		if sym.Pkg.Path == "go.runtime" && sym.Name == "deferrangefunc" {
   625  			// Call to runtime.deferrangefunc is being shared with a range-over-func
   626  			// body that might add defers to this frame, so we cannot use open-coded defers
   627  			// and we need to call deferreturn even if we don't see any other explicit defers.
   628  			ir.CurFunc.SetHasDefer(true)
   629  			ir.CurFunc.SetOpenCodedDeferDisallowed(true)
   630  		}
   631  	}
   632  
   633  	walkCall1(n, init)
   634  	return n
   635  }
   636  
   637  func walkCall1(n *ir.CallExpr, init *ir.Nodes) {
   638  	if n.Walked() {
   639  		return // already walked
   640  	}
   641  	n.SetWalked(true)
   642  
   643  	if n.Op() == ir.OCALLMETH {
   644  		base.FatalfAt(n.Pos(), "OCALLMETH missed by typecheck")
   645  	}
   646  
   647  	args := n.Args
   648  	params := n.Fun.Type().Params()
   649  
   650  	n.Fun = walkExpr(n.Fun, init)
   651  	walkExprList(args, init)
   652  
   653  	for i, arg := range args {
   654  		// Validate argument and parameter types match.
   655  		param := params[i]
   656  		if !types.Identical(arg.Type(), param.Type) {
   657  			base.FatalfAt(n.Pos(), "assigning %L to parameter %v (type %v)", arg, param.Sym, param.Type)
   658  		}
   659  
   660  		// For any argument whose evaluation might require a function call,
   661  		// store that argument into a temporary variable,
   662  		// to prevent that calls from clobbering arguments already on the stack.
   663  		if mayCall(arg) {
   664  			// assignment of arg to Temp
   665  			tmp := typecheck.TempAt(base.Pos, ir.CurFunc, param.Type)
   666  			init.Append(convas(typecheck.Stmt(ir.NewAssignStmt(base.Pos, tmp, arg)).(*ir.AssignStmt), init))
   667  			// replace arg with temp
   668  			args[i] = tmp
   669  		}
   670  	}
   671  
   672  	funSym := n.Fun.Sym()
   673  	if base.Debug.Libfuzzer != 0 && funSym != nil {
   674  		if hook, found := hooks[funSym.Pkg.Path+"."+funSym.Name]; found {
   675  			if len(args) != hook.argsNum {
   676  				panic(fmt.Sprintf("%s.%s expects %d arguments, but received %d", funSym.Pkg.Path, funSym.Name, hook.argsNum, len(args)))
   677  			}
   678  			var hookArgs []ir.Node
   679  			for _, arg := range args {
   680  				hookArgs = append(hookArgs, tracecmpArg(arg, types.Types[hook.paramType], init))
   681  			}
   682  			hookArgs = append(hookArgs, fakePC(n))
   683  			init.Append(mkcall(hook.runtimeFunc, nil, init, hookArgs...))
   684  		}
   685  	}
   686  }
   687  
   688  // walkDivMod walks an ODIV or OMOD node.
   689  func walkDivMod(n *ir.BinaryExpr, init *ir.Nodes) ir.Node {
   690  	n.X = walkExpr(n.X, init)
   691  	n.Y = walkExpr(n.Y, init)
   692  
   693  	// rewrite complex div into function call.
   694  	et := n.X.Type().Kind()
   695  
   696  	if types.IsComplex[et] && n.Op() == ir.ODIV {
   697  		t := n.Type()
   698  		call := mkcall("complex128div", types.Types[types.TCOMPLEX128], init, typecheck.Conv(n.X, types.Types[types.TCOMPLEX128]), typecheck.Conv(n.Y, types.Types[types.TCOMPLEX128]))
   699  		return typecheck.Conv(call, t)
   700  	}
   701  
   702  	// Nothing to do for float divisions.
   703  	if types.IsFloat[et] {
   704  		return n
   705  	}
   706  
   707  	// rewrite 64-bit div and mod on 32-bit architectures.
   708  	// TODO: Remove this code once we can introduce
   709  	// runtime calls late in SSA processing.
   710  	if types.RegSize < 8 && (et == types.TINT64 || et == types.TUINT64) {
   711  		if n.Y.Op() == ir.OLITERAL {
   712  			// Leave div/mod by non-zero uint64 constants.
   713  			// The SSA backend will handle those.
   714  			// (Zero constants should have been rejected already, but we check just in case.)
   715  			switch et {
   716  			case types.TINT64:
   717  				if ir.Int64Val(n.Y) != 0 {
   718  					return n
   719  				}
   720  			case types.TUINT64:
   721  				if ir.Uint64Val(n.Y) != 0 {
   722  					return n
   723  				}
   724  			}
   725  		}
   726  		// Build call to uint64div, uint64mod, int64div, or int64mod.
   727  		var fn string
   728  		if et == types.TINT64 {
   729  			fn = "int64"
   730  		} else {
   731  			fn = "uint64"
   732  		}
   733  		if n.Op() == ir.ODIV {
   734  			fn += "div"
   735  		} else {
   736  			fn += "mod"
   737  		}
   738  		return mkcall(fn, n.Type(), init, typecheck.Conv(n.X, types.Types[et]), typecheck.Conv(n.Y, types.Types[et]))
   739  	}
   740  	return n
   741  }
   742  
   743  // walkDot walks an ODOT or ODOTPTR node.
   744  func walkDot(n *ir.SelectorExpr, init *ir.Nodes) ir.Node {
   745  	usefield(n)
   746  	n.X = walkExpr(n.X, init)
   747  	return n
   748  }
   749  
   750  // walkDotType walks an ODOTTYPE or ODOTTYPE2 node.
   751  func walkDotType(n *ir.TypeAssertExpr, init *ir.Nodes) ir.Node {
   752  	n.X = walkExpr(n.X, init)
   753  	// Set up interface type addresses for back end.
   754  	if !n.Type().IsInterface() && !n.X.Type().IsEmptyInterface() {
   755  		n.ITab = reflectdata.ITabAddrAt(base.Pos, n.Type(), n.X.Type())
   756  	}
   757  	if n.X.Type().IsInterface() && n.Type().IsInterface() && !n.Type().IsEmptyInterface() {
   758  		// This kind of conversion needs a runtime call. Allocate
   759  		// a descriptor for that call.
   760  		n.Descriptor = makeTypeAssertDescriptor(n.Type(), n.Op() == ir.ODOTTYPE2)
   761  	}
   762  	return n
   763  }
   764  
   765  func makeTypeAssertDescriptor(target *types.Type, canFail bool) *obj.LSym {
   766  	// When converting from an interface to a non-empty interface. Needs a runtime call.
   767  	// Allocate an internal/abi.TypeAssert descriptor for that call.
   768  	lsym := types.LocalPkg.Lookup(fmt.Sprintf(".typeAssert.%d", typeAssertGen)).LinksymABI(obj.ABI0)
   769  	typeAssertGen++
   770  	c := rttype.NewCursor(lsym, 0, rttype.TypeAssert)
   771  	c.Field("Cache").WritePtr(typecheck.LookupRuntimeVar("emptyTypeAssertCache"))
   772  	c.Field("Inter").WritePtr(reflectdata.TypeLinksym(target))
   773  	c.Field("CanFail").WriteBool(canFail)
   774  	objw.Global(lsym, int32(rttype.TypeAssert.Size()), obj.LOCAL)
   775  	lsym.Gotype = reflectdata.TypeLinksym(rttype.TypeAssert)
   776  	return lsym
   777  }
   778  
   779  var typeAssertGen int
   780  
   781  // walkDynamicDotType walks an ODYNAMICDOTTYPE or ODYNAMICDOTTYPE2 node.
   782  func walkDynamicDotType(n *ir.DynamicTypeAssertExpr, init *ir.Nodes) ir.Node {
   783  	n.X = walkExpr(n.X, init)
   784  	n.RType = walkExpr(n.RType, init)
   785  	n.ITab = walkExpr(n.ITab, init)
   786  	// Convert to non-dynamic if we can.
   787  	if n.RType != nil && n.RType.Op() == ir.OADDR {
   788  		addr := n.RType.(*ir.AddrExpr)
   789  		if addr.X.Op() == ir.OLINKSYMOFFSET {
   790  			r := ir.NewTypeAssertExpr(n.Pos(), n.X, n.Type())
   791  			if n.Op() == ir.ODYNAMICDOTTYPE2 {
   792  				r.SetOp(ir.ODOTTYPE2)
   793  			}
   794  			r.SetType(n.Type())
   795  			r.SetTypecheck(1)
   796  			return walkExpr(r, init)
   797  		}
   798  	}
   799  	return n
   800  }
   801  
   802  // walkIndex walks an OINDEX node.
   803  func walkIndex(n *ir.IndexExpr, init *ir.Nodes) ir.Node {
   804  	n.X = walkExpr(n.X, init)
   805  
   806  	// save the original node for bounds checking elision.
   807  	// If it was a ODIV/OMOD walk might rewrite it.
   808  	r := n.Index
   809  
   810  	n.Index = walkExpr(n.Index, init)
   811  
   812  	// if range of type cannot exceed static array bound,
   813  	// disable bounds check.
   814  	if n.Bounded() {
   815  		return n
   816  	}
   817  	t := n.X.Type()
   818  	if t != nil && t.IsPtr() {
   819  		t = t.Elem()
   820  	}
   821  	if t.IsArray() {
   822  		n.SetBounded(bounded(r, t.NumElem()))
   823  		if base.Flag.LowerM != 0 && n.Bounded() && !ir.IsConst(n.Index, constant.Int) {
   824  			base.Warn("index bounds check elided")
   825  		}
   826  	} else if ir.IsConst(n.X, constant.String) {
   827  		n.SetBounded(bounded(r, int64(len(ir.StringVal(n.X)))))
   828  		if base.Flag.LowerM != 0 && n.Bounded() && !ir.IsConst(n.Index, constant.Int) {
   829  			base.Warn("index bounds check elided")
   830  		}
   831  	}
   832  	return n
   833  }
   834  
   835  // mapKeyArg returns an expression for key that is suitable to be passed
   836  // as the key argument for runtime map* functions.
   837  // n is the map indexing or delete Node (to provide Pos).
   838  func mapKeyArg(fast int, n, key ir.Node, assigned bool) ir.Node {
   839  	if fast == mapslow {
   840  		// standard version takes key by reference.
   841  		// orderState.expr made sure key is addressable.
   842  		return typecheck.NodAddr(key)
   843  	}
   844  	if assigned {
   845  		// mapassign does distinguish pointer vs. integer key.
   846  		return key
   847  	}
   848  	// mapaccess and mapdelete don't distinguish pointer vs. integer key.
   849  	switch fast {
   850  	case mapfast32ptr:
   851  		return ir.NewConvExpr(n.Pos(), ir.OCONVNOP, types.Types[types.TUINT32], key)
   852  	case mapfast64ptr:
   853  		return ir.NewConvExpr(n.Pos(), ir.OCONVNOP, types.Types[types.TUINT64], key)
   854  	default:
   855  		// fast version takes key by value.
   856  		return key
   857  	}
   858  }
   859  
   860  // walkIndexMap walks an OINDEXMAP node.
   861  // It replaces m[k] with *map{access1,assign}(maptype, m, &k)
   862  func walkIndexMap(n *ir.IndexExpr, init *ir.Nodes) ir.Node {
   863  	n.X = walkExpr(n.X, init)
   864  	n.Index = walkExpr(n.Index, init)
   865  	map_ := n.X
   866  	t := map_.Type()
   867  	fast := mapfast(t)
   868  	key := mapKeyArg(fast, n, n.Index, n.Assigned)
   869  	args := []ir.Node{reflectdata.IndexMapRType(base.Pos, n), map_, key}
   870  
   871  	var mapFn ir.Node
   872  	switch {
   873  	case n.Assigned:
   874  		mapFn = mapfn(mapassign[fast], t, false)
   875  	case t.Elem().Size() > abi.ZeroValSize:
   876  		args = append(args, reflectdata.ZeroAddr(t.Elem().Size()))
   877  		mapFn = mapfn("mapaccess1_fat", t, true)
   878  	default:
   879  		mapFn = mapfn(mapaccess1[fast], t, false)
   880  	}
   881  	call := mkcall1(mapFn, nil, init, args...)
   882  	call.SetType(types.NewPtr(t.Elem()))
   883  	call.MarkNonNil() // mapaccess1* and mapassign always return non-nil pointers.
   884  	star := ir.NewStarExpr(base.Pos, call)
   885  	star.SetType(t.Elem())
   886  	star.SetTypecheck(1)
   887  	return star
   888  }
   889  
   890  // walkLogical walks an OANDAND or OOROR node.
   891  func walkLogical(n *ir.LogicalExpr, init *ir.Nodes) ir.Node {
   892  	n.X = walkExpr(n.X, init)
   893  
   894  	// cannot put side effects from n.Right on init,
   895  	// because they cannot run before n.Left is checked.
   896  	// save elsewhere and store on the eventual n.Right.
   897  	var ll ir.Nodes
   898  
   899  	n.Y = walkExpr(n.Y, &ll)
   900  	n.Y = ir.InitExpr(ll, n.Y)
   901  	return n
   902  }
   903  
   904  // walkSend walks an OSEND node.
   905  func walkSend(n *ir.SendStmt, init *ir.Nodes) ir.Node {
   906  	n1 := n.Value
   907  	n1 = typecheck.AssignConv(n1, n.Chan.Type().Elem(), "chan send")
   908  	n1 = walkExpr(n1, init)
   909  	n1 = typecheck.NodAddr(n1)
   910  	return mkcall1(chanfn("chansend1", 2, n.Chan.Type()), nil, init, n.Chan, n1)
   911  }
   912  
   913  // walkSlice walks an OSLICE, OSLICEARR, OSLICESTR, OSLICE3, or OSLICE3ARR node.
   914  func walkSlice(n *ir.SliceExpr, init *ir.Nodes) ir.Node {
   915  	n.X = walkExpr(n.X, init)
   916  	n.Low = walkExpr(n.Low, init)
   917  	if n.Low != nil && ir.IsZero(n.Low) {
   918  		// Reduce x[0:j] to x[:j] and x[0:j:k] to x[:j:k].
   919  		n.Low = nil
   920  	}
   921  	n.High = walkExpr(n.High, init)
   922  	n.Max = walkExpr(n.Max, init)
   923  
   924  	if (n.Op() == ir.OSLICE || n.Op() == ir.OSLICESTR) && n.Low == nil && n.High == nil {
   925  		// Reduce x[:] to x.
   926  		if base.Debug.Slice > 0 {
   927  			base.Warn("slice: omit slice operation")
   928  		}
   929  		return n.X
   930  	}
   931  	return n
   932  }
   933  
   934  // walkSliceHeader walks an OSLICEHEADER node.
   935  func walkSliceHeader(n *ir.SliceHeaderExpr, init *ir.Nodes) ir.Node {
   936  	n.Ptr = walkExpr(n.Ptr, init)
   937  	n.Len = walkExpr(n.Len, init)
   938  	n.Cap = walkExpr(n.Cap, init)
   939  	return n
   940  }
   941  
   942  // walkStringHeader walks an OSTRINGHEADER node.
   943  func walkStringHeader(n *ir.StringHeaderExpr, init *ir.Nodes) ir.Node {
   944  	n.Ptr = walkExpr(n.Ptr, init)
   945  	n.Len = walkExpr(n.Len, init)
   946  	return n
   947  }
   948  
   949  // return 1 if integer n must be in range [0, max), 0 otherwise.
   950  func bounded(n ir.Node, max int64) bool {
   951  	if n.Type() == nil || !n.Type().IsInteger() {
   952  		return false
   953  	}
   954  
   955  	sign := n.Type().IsSigned()
   956  	bits := int32(8 * n.Type().Size())
   957  
   958  	if ir.IsSmallIntConst(n) {
   959  		v := ir.Int64Val(n)
   960  		return 0 <= v && v < max
   961  	}
   962  
   963  	switch n.Op() {
   964  	case ir.OAND, ir.OANDNOT:
   965  		n := n.(*ir.BinaryExpr)
   966  		v := int64(-1)
   967  		switch {
   968  		case ir.IsSmallIntConst(n.X):
   969  			v = ir.Int64Val(n.X)
   970  		case ir.IsSmallIntConst(n.Y):
   971  			v = ir.Int64Val(n.Y)
   972  			if n.Op() == ir.OANDNOT {
   973  				v = ^v
   974  				if !sign {
   975  					v &= 1<<uint(bits) - 1
   976  				}
   977  			}
   978  		}
   979  		if 0 <= v && v < max {
   980  			return true
   981  		}
   982  
   983  	case ir.OMOD:
   984  		n := n.(*ir.BinaryExpr)
   985  		if !sign && ir.IsSmallIntConst(n.Y) {
   986  			v := ir.Int64Val(n.Y)
   987  			if 0 <= v && v <= max {
   988  				return true
   989  			}
   990  		}
   991  
   992  	case ir.ODIV:
   993  		n := n.(*ir.BinaryExpr)
   994  		if !sign && ir.IsSmallIntConst(n.Y) {
   995  			v := ir.Int64Val(n.Y)
   996  			for bits > 0 && v >= 2 {
   997  				bits--
   998  				v >>= 1
   999  			}
  1000  		}
  1001  
  1002  	case ir.ORSH:
  1003  		n := n.(*ir.BinaryExpr)
  1004  		if !sign && ir.IsSmallIntConst(n.Y) {
  1005  			v := ir.Int64Val(n.Y)
  1006  			if v > int64(bits) {
  1007  				return true
  1008  			}
  1009  			bits -= int32(v)
  1010  		}
  1011  	}
  1012  
  1013  	if !sign && bits <= 62 && 1<<uint(bits) <= max {
  1014  		return true
  1015  	}
  1016  
  1017  	return false
  1018  }
  1019  
  1020  // usemethod checks calls for uses of Method and MethodByName of reflect.Value,
  1021  // reflect.Type, reflect.(*rtype), and reflect.(*interfaceType).
  1022  func usemethod(n *ir.CallExpr) {
  1023  	// Don't mark reflect.(*rtype).Method, etc. themselves in the reflect package.
  1024  	// Those functions may be alive via the itab, which should not cause all methods
  1025  	// alive. We only want to mark their callers.
  1026  	if base.Ctxt.Pkgpath == "reflect" {
  1027  		// TODO: is there a better way than hardcoding the names?
  1028  		switch fn := ir.CurFunc.Nname.Sym().Name; {
  1029  		case fn == "(*rtype).Method", fn == "(*rtype).MethodByName":
  1030  			return
  1031  		case fn == "(*interfaceType).Method", fn == "(*interfaceType).MethodByName":
  1032  			return
  1033  		case fn == "Value.Method", fn == "Value.MethodByName":
  1034  			return
  1035  		}
  1036  	}
  1037  
  1038  	dot, ok := n.Fun.(*ir.SelectorExpr)
  1039  	if !ok {
  1040  		return
  1041  	}
  1042  
  1043  	// looking for either direct method calls and interface method calls of:
  1044  	//	reflect.Type.Method        - func(int) reflect.Method
  1045  	//	reflect.Type.MethodByName  - func(string) (reflect.Method, bool)
  1046  	//
  1047  	//	reflect.Value.Method       - func(int) reflect.Value
  1048  	//	reflect.Value.MethodByName - func(string) reflect.Value
  1049  	methodName := dot.Sel.Name
  1050  	t := dot.Selection.Type
  1051  
  1052  	// Check the number of arguments and return values.
  1053  	if t.NumParams() != 1 || (t.NumResults() != 1 && t.NumResults() != 2) {
  1054  		return
  1055  	}
  1056  
  1057  	// Check the type of the argument.
  1058  	switch pKind := t.Param(0).Type.Kind(); {
  1059  	case methodName == "Method" && pKind == types.TINT,
  1060  		methodName == "MethodByName" && pKind == types.TSTRING:
  1061  
  1062  	default:
  1063  		// not a call to Method or MethodByName of reflect.{Type,Value}.
  1064  		return
  1065  	}
  1066  
  1067  	// Check that first result type is "reflect.Method" or "reflect.Value".
  1068  	// Note that we have to check sym name and sym package separately, as
  1069  	// we can't check for exact string "reflect.Method" reliably
  1070  	// (e.g., see #19028 and #38515).
  1071  	switch s := t.Result(0).Type.Sym(); {
  1072  	case s != nil && types.ReflectSymName(s) == "Method",
  1073  		s != nil && types.ReflectSymName(s) == "Value":
  1074  
  1075  	default:
  1076  		// not a call to Method or MethodByName of reflect.{Type,Value}.
  1077  		return
  1078  	}
  1079  
  1080  	var targetName ir.Node
  1081  	switch dot.Op() {
  1082  	case ir.ODOTINTER:
  1083  		if methodName == "MethodByName" {
  1084  			targetName = n.Args[0]
  1085  		}
  1086  	case ir.OMETHEXPR:
  1087  		if methodName == "MethodByName" {
  1088  			targetName = n.Args[1]
  1089  		}
  1090  	default:
  1091  		base.FatalfAt(dot.Pos(), "usemethod: unexpected dot.Op() %s", dot.Op())
  1092  	}
  1093  
  1094  	if ir.IsConst(targetName, constant.String) {
  1095  		name := constant.StringVal(targetName.Val())
  1096  		ir.CurFunc.LSym.AddRel(base.Ctxt, obj.Reloc{
  1097  			Type: objabi.R_USENAMEDMETHOD,
  1098  			Sym:  staticdata.StringSymNoCommon(name),
  1099  		})
  1100  	} else {
  1101  		ir.CurFunc.LSym.Set(obj.AttrReflectMethod, true)
  1102  	}
  1103  }
  1104  
  1105  func usefield(n *ir.SelectorExpr) {
  1106  	if !buildcfg.Experiment.FieldTrack {
  1107  		return
  1108  	}
  1109  
  1110  	switch n.Op() {
  1111  	default:
  1112  		base.Fatalf("usefield %v", n.Op())
  1113  
  1114  	case ir.ODOT, ir.ODOTPTR:
  1115  		break
  1116  	}
  1117  
  1118  	field := n.Selection
  1119  	if field == nil {
  1120  		base.Fatalf("usefield %v %v without paramfld", n.X.Type(), n.Sel)
  1121  	}
  1122  	if field.Sym != n.Sel {
  1123  		base.Fatalf("field inconsistency: %v != %v", field.Sym, n.Sel)
  1124  	}
  1125  	if !strings.Contains(field.Note, "go:\"track\"") {
  1126  		return
  1127  	}
  1128  
  1129  	outer := n.X.Type()
  1130  	if outer.IsPtr() {
  1131  		outer = outer.Elem()
  1132  	}
  1133  	if outer.Sym() == nil {
  1134  		base.Errorf("tracked field must be in named struct type")
  1135  	}
  1136  
  1137  	sym := reflectdata.TrackSym(outer, field)
  1138  	if ir.CurFunc.FieldTrack == nil {
  1139  		ir.CurFunc.FieldTrack = make(map[*obj.LSym]struct{})
  1140  	}
  1141  	ir.CurFunc.FieldTrack[sym] = struct{}{}
  1142  }
  1143  

View as plain text