1
2
3
4
5 package walk
6
7 import (
8 "fmt"
9 "internal/abi"
10
11 "cmd/compile/internal/base"
12 "cmd/compile/internal/ir"
13 "cmd/compile/internal/reflectdata"
14 "cmd/compile/internal/rttype"
15 "cmd/compile/internal/ssagen"
16 "cmd/compile/internal/typecheck"
17 "cmd/compile/internal/types"
18 "cmd/internal/src"
19 )
20
21
22 const tmpstringbufsize = 32
23
24 func Walk(fn *ir.Func) {
25 ir.CurFunc = fn
26 errorsBefore := base.Errors()
27 order(fn)
28 if base.Errors() > errorsBefore {
29 return
30 }
31
32 if base.Flag.W != 0 {
33 s := fmt.Sprintf("\nbefore walk %v", ir.CurFunc.Sym())
34 ir.DumpList(s, ir.CurFunc.Body)
35 }
36
37 walkStmtList(ir.CurFunc.Body)
38 if base.Flag.W != 0 {
39 s := fmt.Sprintf("after walk %v", ir.CurFunc.Sym())
40 ir.DumpList(s, ir.CurFunc.Body)
41 }
42
43
44 for _, n := range fn.Dcl {
45 types.CalcSize(n.Type())
46 }
47 }
48
49
50 func walkRecv(n *ir.UnaryExpr) ir.Node {
51 if n.Typecheck() == 0 {
52 base.Fatalf("missing typecheck: %+v", n)
53 }
54 init := ir.TakeInit(n)
55
56 n.X = walkExpr(n.X, &init)
57 call := walkExpr(mkcall1(chanfn("chanrecv1", 2, n.X.Type()), nil, &init, n.X, typecheck.NodNil()), &init)
58 return ir.InitExpr(init, call)
59 }
60
61 func convas(n *ir.AssignStmt, init *ir.Nodes) *ir.AssignStmt {
62 if n.Op() != ir.OAS {
63 base.Fatalf("convas: not OAS %v", n.Op())
64 }
65 n.SetTypecheck(1)
66
67 if n.X == nil || n.Y == nil {
68 return n
69 }
70
71 lt := n.X.Type()
72 rt := n.Y.Type()
73 if lt == nil || rt == nil {
74 return n
75 }
76
77 if ir.IsBlank(n.X) {
78 n.Y = typecheck.DefaultLit(n.Y, nil)
79 return n
80 }
81
82 if !types.Identical(lt, rt) {
83 n.Y = typecheck.AssignConv(n.Y, lt, "assignment")
84 n.Y = walkExpr(n.Y, init)
85 }
86 types.CalcSize(n.Y.Type())
87
88 return n
89 }
90
91 func vmkcall(fn ir.Node, t *types.Type, init *ir.Nodes, va []ir.Node) *ir.CallExpr {
92 if init == nil {
93 base.Fatalf("mkcall with nil init: %v", fn)
94 }
95 if fn.Type() == nil || fn.Type().Kind() != types.TFUNC {
96 base.Fatalf("mkcall %v %v", fn, fn.Type())
97 }
98
99 n := fn.Type().NumParams()
100 if n != len(va) {
101 base.Fatalf("vmkcall %v needs %v args got %v", fn, n, len(va))
102 }
103
104 call := typecheck.Call(base.Pos, fn, va, false).(*ir.CallExpr)
105 call.SetType(t)
106 return walkExpr(call, init).(*ir.CallExpr)
107 }
108
109 func mkcall(name string, t *types.Type, init *ir.Nodes, args ...ir.Node) *ir.CallExpr {
110 return vmkcall(typecheck.LookupRuntime(name), t, init, args)
111 }
112
113 func mkcallstmt(name string, args ...ir.Node) ir.Node {
114 return mkcallstmt1(typecheck.LookupRuntime(name), args...)
115 }
116
117 func mkcall1(fn ir.Node, t *types.Type, init *ir.Nodes, args ...ir.Node) *ir.CallExpr {
118 return vmkcall(fn, t, init, args)
119 }
120
121 func mkcallstmt1(fn ir.Node, args ...ir.Node) ir.Node {
122 var init ir.Nodes
123 n := vmkcall(fn, nil, &init, args)
124 if len(init) == 0 {
125 return n
126 }
127 init.Append(n)
128 return ir.NewBlockStmt(n.Pos(), init)
129 }
130
131 func chanfn(name string, n int, t *types.Type) ir.Node {
132 if !t.IsChan() {
133 base.Fatalf("chanfn %v", t)
134 }
135 switch n {
136 case 1:
137 return typecheck.LookupRuntime(name, t.Elem())
138 case 2:
139 return typecheck.LookupRuntime(name, t.Elem(), t.Elem())
140 }
141 base.Fatalf("chanfn %d", n)
142 return nil
143 }
144
145 func mapfn(name string, t *types.Type, isfat bool) ir.Node {
146 if !t.IsMap() {
147 base.Fatalf("mapfn %v", t)
148 }
149 if mapfast(t) == mapslow || isfat {
150 return typecheck.LookupRuntime(name, t.Key(), t.Elem(), t.Key(), t.Elem())
151 }
152 return typecheck.LookupRuntime(name, t.Key(), t.Elem(), t.Elem())
153 }
154
155 func mapfndel(name string, t *types.Type) ir.Node {
156 if !t.IsMap() {
157 base.Fatalf("mapfn %v", t)
158 }
159 if mapfast(t) == mapslow {
160 return typecheck.LookupRuntime(name, t.Key(), t.Elem(), t.Key())
161 }
162 return typecheck.LookupRuntime(name, t.Key(), t.Elem())
163 }
164
165 const (
166 mapslow = iota
167 mapfast32
168 mapfast32ptr
169 mapfast64
170 mapfast64ptr
171 mapfaststr
172 nmapfast
173 )
174
175 type mapnames [nmapfast]string
176
177 func mkmapnames(base string, ptr string) mapnames {
178 return mapnames{base, base + "_fast32", base + "_fast32" + ptr, base + "_fast64", base + "_fast64" + ptr, base + "_faststr"}
179 }
180
181 var mapaccess1 = mkmapnames("mapaccess1", "")
182 var mapaccess2 = mkmapnames("mapaccess2", "")
183 var mapassign = mkmapnames("mapassign", "ptr")
184 var mapdelete = mkmapnames("mapdelete", "")
185
186 func mapfast(t *types.Type) int {
187 if t.Elem().Size() > abi.MapMaxElemBytes {
188 return mapslow
189 }
190 switch reflectdata.AlgType(t.Key()) {
191 case types.AMEM32:
192 if !t.Key().HasPointers() {
193 return mapfast32
194 }
195 if types.PtrSize == 4 {
196 return mapfast32ptr
197 }
198 base.Fatalf("small pointer %v", t.Key())
199 case types.AMEM64:
200 if !t.Key().HasPointers() {
201 return mapfast64
202 }
203 if types.PtrSize == 8 {
204 return mapfast64ptr
205 }
206
207
208 case types.ASTRING:
209 return mapfaststr
210 }
211 return mapslow
212 }
213
214 func walkAppendArgs(n *ir.CallExpr, init *ir.Nodes) {
215 walkExprListSafe(n.Args, init)
216
217
218
219
220 ls := n.Args
221 for i1, n1 := range ls {
222 ls[i1] = cheapExpr(n1, init)
223 }
224 }
225
226
227 func appendWalkStmt(init *ir.Nodes, stmt ir.Node) {
228 op := stmt.Op()
229 n := typecheck.Stmt(stmt)
230 if op == ir.OAS || op == ir.OAS2 {
231
232
233
234
235 n = walkExpr(n, init)
236 } else {
237 n = walkStmt(n)
238 }
239 init.Append(n)
240 }
241
242
243
244 const maxOpenDefers = 8
245
246
247
248 func backingArrayPtrLen(n ir.Node) (ptr, length ir.Node) {
249 var init ir.Nodes
250 c := cheapExpr(n, &init)
251 if c != n || len(init) != 0 {
252 base.Fatalf("backingArrayPtrLen not cheap: %v", n)
253 }
254 ptr = ir.NewUnaryExpr(base.Pos, ir.OSPTR, n)
255 if n.Type().IsString() {
256 ptr.SetType(types.Types[types.TUINT8].PtrTo())
257 } else {
258 ptr.SetType(n.Type().Elem().PtrTo())
259 }
260 ptr.SetTypecheck(1)
261 length = ir.NewUnaryExpr(base.Pos, ir.OLEN, n)
262 length.SetType(types.Types[types.TINT])
263 length.SetTypecheck(1)
264 return ptr, length
265 }
266
267
268
269
270 func mayCall(n ir.Node) bool {
271
272 if base.Flag.Cfg.Instrumenting {
273 return true
274 }
275
276 isSoftFloat := func(typ *types.Type) bool {
277 return types.IsFloat[typ.Kind()] || types.IsComplex[typ.Kind()]
278 }
279
280 return ir.Any(n, func(n ir.Node) bool {
281
282
283 if len(n.Init()) != 0 {
284 base.FatalfAt(n.Pos(), "mayCall %+v", n)
285 }
286
287 switch n.Op() {
288 default:
289 base.FatalfAt(n.Pos(), "mayCall %+v", n)
290
291 case ir.OCALLFUNC, ir.OCALLINTER,
292 ir.OUNSAFEADD, ir.OUNSAFESLICE:
293 return true
294
295 case ir.OINDEX, ir.OSLICE, ir.OSLICEARR, ir.OSLICE3, ir.OSLICE3ARR, ir.OSLICESTR,
296 ir.ODEREF, ir.ODOTPTR, ir.ODOTTYPE, ir.ODYNAMICDOTTYPE, ir.ODIV, ir.OMOD,
297 ir.OSLICE2ARR, ir.OSLICE2ARRPTR:
298
299
300 return true
301
302 case ir.OANDAND, ir.OOROR:
303 n := n.(*ir.LogicalExpr)
304
305
306
307
308 return len(n.Y.Init()) != 0
309
310
311
312 case ir.OADD, ir.OSUB, ir.OMUL, ir.ONEG:
313 return ssagen.Arch.SoftFloat && isSoftFloat(n.Type())
314 case ir.OLT, ir.OEQ, ir.ONE, ir.OLE, ir.OGE, ir.OGT:
315 n := n.(*ir.BinaryExpr)
316 return ssagen.Arch.SoftFloat && isSoftFloat(n.X.Type())
317 case ir.OCONV:
318 n := n.(*ir.ConvExpr)
319 return ssagen.Arch.SoftFloat && (isSoftFloat(n.Type()) || isSoftFloat(n.X.Type()))
320
321 case ir.OMIN, ir.OMAX:
322
323 return n.Type().IsString() || n.Type().IsFloat()
324
325 case ir.OLITERAL, ir.ONIL, ir.ONAME, ir.OLINKSYMOFFSET, ir.OMETHEXPR,
326 ir.OAND, ir.OANDNOT, ir.OLSH, ir.OOR, ir.ORSH, ir.OXOR, ir.OCOMPLEX, ir.OMAKEFACE,
327 ir.OADDR, ir.OBITNOT, ir.ONOT, ir.OPLUS,
328 ir.OCAP, ir.OIMAG, ir.OLEN, ir.OREAL,
329 ir.OCONVNOP, ir.ODOT,
330 ir.OCFUNC, ir.OIDATA, ir.OITAB, ir.OSPTR,
331 ir.OBYTES2STRTMP, ir.OGETG, ir.OGETCALLERPC, ir.OGETCALLERSP, ir.OSLICEHEADER, ir.OSTRINGHEADER:
332
333
334 }
335
336 return false
337 })
338 }
339
340
341 func itabType(itab ir.Node) ir.Node {
342 if itabTypeField == nil {
343
344 itabTypeField = runtimeField("Type", rttype.ITab.OffsetOf("Type"), types.NewPtr(types.Types[types.TUINT8]))
345 }
346 return boundedDotPtr(base.Pos, itab, itabTypeField)
347 }
348
349 var itabTypeField *types.Field
350
351
352
353 func boundedDotPtr(pos src.XPos, ptr ir.Node, field *types.Field) *ir.SelectorExpr {
354 sel := ir.NewSelectorExpr(pos, ir.ODOTPTR, ptr, field.Sym)
355 sel.Selection = field
356 sel.SetType(field.Type)
357 sel.SetTypecheck(1)
358 sel.SetBounded(true)
359 return sel
360 }
361
362 func runtimeField(name string, offset int64, typ *types.Type) *types.Field {
363 f := types.NewField(src.NoXPos, ir.Pkgs.Runtime.Lookup(name), typ)
364 f.Offset = offset
365 return f
366 }
367
368
369
370
371 func ifaceData(pos src.XPos, n ir.Node, t *types.Type) ir.Node {
372 if t.IsInterface() {
373 base.Fatalf("ifaceData interface: %v", t)
374 }
375 ptr := ir.NewUnaryExpr(pos, ir.OIDATA, n)
376 if types.IsDirectIface(t) {
377 ptr.SetType(t)
378 ptr.SetTypecheck(1)
379 return ptr
380 }
381 ptr.SetType(types.NewPtr(t))
382 ptr.SetTypecheck(1)
383 ind := ir.NewStarExpr(pos, ptr)
384 ind.SetType(t)
385 ind.SetTypecheck(1)
386 ind.SetBounded(true)
387 return ind
388 }
389
View as plain text