1
2
3
4
5 package walk
6
7 import (
8 "fmt"
9 "internal/abi"
10 "internal/buildcfg"
11
12 "cmd/compile/internal/base"
13 "cmd/compile/internal/ir"
14 "cmd/compile/internal/reflectdata"
15 "cmd/compile/internal/rttype"
16 "cmd/compile/internal/ssagen"
17 "cmd/compile/internal/typecheck"
18 "cmd/compile/internal/types"
19 "cmd/internal/src"
20 )
21
22
23 const tmpstringbufsize = 32
24
25 func Walk(fn *ir.Func) {
26 ir.CurFunc = fn
27 errorsBefore := base.Errors()
28 order(fn)
29 if base.Errors() > errorsBefore {
30 return
31 }
32
33 if base.Flag.W != 0 {
34 s := fmt.Sprintf("\nbefore walk %v", ir.CurFunc.Sym())
35 ir.DumpList(s, ir.CurFunc.Body)
36 }
37
38 walkStmtList(ir.CurFunc.Body)
39 if base.Flag.W != 0 {
40 s := fmt.Sprintf("after walk %v", ir.CurFunc.Sym())
41 ir.DumpList(s, ir.CurFunc.Body)
42 }
43
44
45 for _, n := range fn.Dcl {
46 types.CalcSize(n.Type())
47 }
48 }
49
50
51 func walkRecv(n *ir.UnaryExpr) ir.Node {
52 if n.Typecheck() == 0 {
53 base.Fatalf("missing typecheck: %+v", n)
54 }
55 init := ir.TakeInit(n)
56
57 n.X = walkExpr(n.X, &init)
58 call := walkExpr(mkcall1(chanfn("chanrecv1", 2, n.X.Type()), nil, &init, n.X, typecheck.NodNil()), &init)
59 return ir.InitExpr(init, call)
60 }
61
62 func convas(n *ir.AssignStmt, init *ir.Nodes) *ir.AssignStmt {
63 if n.Op() != ir.OAS {
64 base.Fatalf("convas: not OAS %v", n.Op())
65 }
66 n.SetTypecheck(1)
67
68 if n.X == nil || n.Y == nil {
69 return n
70 }
71
72 lt := n.X.Type()
73 rt := n.Y.Type()
74 if lt == nil || rt == nil {
75 return n
76 }
77
78 if ir.IsBlank(n.X) {
79 n.Y = typecheck.DefaultLit(n.Y, nil)
80 return n
81 }
82
83 if !types.Identical(lt, rt) {
84 n.Y = typecheck.AssignConv(n.Y, lt, "assignment")
85 n.Y = walkExpr(n.Y, init)
86 }
87 types.CalcSize(n.Y.Type())
88
89 return n
90 }
91
92 func vmkcall(fn ir.Node, t *types.Type, init *ir.Nodes, va []ir.Node) *ir.CallExpr {
93 if init == nil {
94 base.Fatalf("mkcall with nil init: %v", fn)
95 }
96 if fn.Type() == nil || fn.Type().Kind() != types.TFUNC {
97 base.Fatalf("mkcall %v %v", fn, fn.Type())
98 }
99
100 n := fn.Type().NumParams()
101 if n != len(va) {
102 base.Fatalf("vmkcall %v needs %v args got %v", fn, n, len(va))
103 }
104
105 call := typecheck.Call(base.Pos, fn, va, false).(*ir.CallExpr)
106 call.SetType(t)
107 return walkExpr(call, init).(*ir.CallExpr)
108 }
109
110 func mkcall(name string, t *types.Type, init *ir.Nodes, args ...ir.Node) *ir.CallExpr {
111 return vmkcall(typecheck.LookupRuntime(name), t, init, args)
112 }
113
114 func mkcallstmt(name string, args ...ir.Node) ir.Node {
115 return mkcallstmt1(typecheck.LookupRuntime(name), args...)
116 }
117
118 func mkcall1(fn ir.Node, t *types.Type, init *ir.Nodes, args ...ir.Node) *ir.CallExpr {
119 return vmkcall(fn, t, init, args)
120 }
121
122 func mkcallstmt1(fn ir.Node, args ...ir.Node) ir.Node {
123 var init ir.Nodes
124 n := vmkcall(fn, nil, &init, args)
125 if len(init) == 0 {
126 return n
127 }
128 init.Append(n)
129 return ir.NewBlockStmt(n.Pos(), init)
130 }
131
132 func chanfn(name string, n int, t *types.Type) ir.Node {
133 if !t.IsChan() {
134 base.Fatalf("chanfn %v", t)
135 }
136 switch n {
137 case 1:
138 return typecheck.LookupRuntime(name, t.Elem())
139 case 2:
140 return typecheck.LookupRuntime(name, t.Elem(), t.Elem())
141 }
142 base.Fatalf("chanfn %d", n)
143 return nil
144 }
145
146 func mapfn(name string, t *types.Type, isfat bool) ir.Node {
147 if !t.IsMap() {
148 base.Fatalf("mapfn %v", t)
149 }
150 if mapfast(t) == mapslow || isfat {
151 return typecheck.LookupRuntime(name, t.Key(), t.Elem(), t.Key(), t.Elem())
152 }
153 return typecheck.LookupRuntime(name, t.Key(), t.Elem(), t.Elem())
154 }
155
156 func mapfndel(name string, t *types.Type) ir.Node {
157 if !t.IsMap() {
158 base.Fatalf("mapfn %v", t)
159 }
160 if mapfast(t) == mapslow {
161 return typecheck.LookupRuntime(name, t.Key(), t.Elem(), t.Key())
162 }
163 return typecheck.LookupRuntime(name, t.Key(), t.Elem())
164 }
165
166 const (
167 mapslow = iota
168 mapfast32
169 mapfast32ptr
170 mapfast64
171 mapfast64ptr
172 mapfaststr
173 nmapfast
174 )
175
176 type mapnames [nmapfast]string
177
178 func mkmapnames(base string, ptr string) mapnames {
179 return mapnames{base, base + "_fast32", base + "_fast32" + ptr, base + "_fast64", base + "_fast64" + ptr, base + "_faststr"}
180 }
181
182 var mapaccess1 = mkmapnames("mapaccess1", "")
183 var mapaccess2 = mkmapnames("mapaccess2", "")
184 var mapassign = mkmapnames("mapassign", "ptr")
185 var mapdelete = mkmapnames("mapdelete", "")
186
187 func mapfast(t *types.Type) int {
188 if buildcfg.Experiment.SwissMap {
189 return mapfastSwiss(t)
190 }
191 return mapfastOld(t)
192 }
193
194 func mapfastSwiss(t *types.Type) int {
195 if t.Elem().Size() > abi.OldMapMaxElemBytes {
196 return mapslow
197 }
198 switch reflectdata.AlgType(t.Key()) {
199 case types.AMEM32:
200 if !t.Key().HasPointers() {
201 return mapfast32
202 }
203 if types.PtrSize == 4 {
204 return mapfast32ptr
205 }
206 base.Fatalf("small pointer %v", t.Key())
207 case types.AMEM64:
208 if !t.Key().HasPointers() {
209 return mapfast64
210 }
211 if types.PtrSize == 8 {
212 return mapfast64ptr
213 }
214
215
216 case types.ASTRING:
217 return mapfaststr
218 }
219 return mapslow
220 }
221
222 func mapfastOld(t *types.Type) int {
223 if t.Elem().Size() > abi.OldMapMaxElemBytes {
224 return mapslow
225 }
226 switch reflectdata.AlgType(t.Key()) {
227 case types.AMEM32:
228 if !t.Key().HasPointers() {
229 return mapfast32
230 }
231 if types.PtrSize == 4 {
232 return mapfast32ptr
233 }
234 base.Fatalf("small pointer %v", t.Key())
235 case types.AMEM64:
236 if !t.Key().HasPointers() {
237 return mapfast64
238 }
239 if types.PtrSize == 8 {
240 return mapfast64ptr
241 }
242
243
244 case types.ASTRING:
245 return mapfaststr
246 }
247 return mapslow
248 }
249
250 func walkAppendArgs(n *ir.CallExpr, init *ir.Nodes) {
251 walkExprListSafe(n.Args, init)
252
253
254
255
256 ls := n.Args
257 for i1, n1 := range ls {
258 ls[i1] = cheapExpr(n1, init)
259 }
260 }
261
262
263 func appendWalkStmt(init *ir.Nodes, stmt ir.Node) {
264 op := stmt.Op()
265 n := typecheck.Stmt(stmt)
266 if op == ir.OAS || op == ir.OAS2 {
267
268
269
270
271 n = walkExpr(n, init)
272 } else {
273 n = walkStmt(n)
274 }
275 init.Append(n)
276 }
277
278
279
280 const maxOpenDefers = 8
281
282
283
284 func backingArrayPtrLen(n ir.Node) (ptr, length ir.Node) {
285 var init ir.Nodes
286 c := cheapExpr(n, &init)
287 if c != n || len(init) != 0 {
288 base.Fatalf("backingArrayPtrLen not cheap: %v", n)
289 }
290 ptr = ir.NewUnaryExpr(base.Pos, ir.OSPTR, n)
291 if n.Type().IsString() {
292 ptr.SetType(types.Types[types.TUINT8].PtrTo())
293 } else {
294 ptr.SetType(n.Type().Elem().PtrTo())
295 }
296 ptr.SetTypecheck(1)
297 length = ir.NewUnaryExpr(base.Pos, ir.OLEN, n)
298 length.SetType(types.Types[types.TINT])
299 length.SetTypecheck(1)
300 return ptr, length
301 }
302
303
304
305
306 func mayCall(n ir.Node) bool {
307
308 if base.Flag.Cfg.Instrumenting {
309 return true
310 }
311
312 isSoftFloat := func(typ *types.Type) bool {
313 return types.IsFloat[typ.Kind()] || types.IsComplex[typ.Kind()]
314 }
315
316 return ir.Any(n, func(n ir.Node) bool {
317
318
319 if len(n.Init()) != 0 {
320 base.FatalfAt(n.Pos(), "mayCall %+v", n)
321 }
322
323 switch n.Op() {
324 default:
325 base.FatalfAt(n.Pos(), "mayCall %+v", n)
326
327 case ir.OCALLFUNC, ir.OCALLINTER,
328 ir.OUNSAFEADD, ir.OUNSAFESLICE:
329 return true
330
331 case ir.OINDEX, ir.OSLICE, ir.OSLICEARR, ir.OSLICE3, ir.OSLICE3ARR, ir.OSLICESTR,
332 ir.ODEREF, ir.ODOTPTR, ir.ODOTTYPE, ir.ODYNAMICDOTTYPE, ir.ODIV, ir.OMOD,
333 ir.OSLICE2ARR, ir.OSLICE2ARRPTR:
334
335
336 return true
337
338 case ir.OANDAND, ir.OOROR:
339 n := n.(*ir.LogicalExpr)
340
341
342
343
344 return len(n.Y.Init()) != 0
345
346
347
348 case ir.OADD, ir.OSUB, ir.OMUL, ir.ONEG:
349 return ssagen.Arch.SoftFloat && isSoftFloat(n.Type())
350 case ir.OLT, ir.OEQ, ir.ONE, ir.OLE, ir.OGE, ir.OGT:
351 n := n.(*ir.BinaryExpr)
352 return ssagen.Arch.SoftFloat && isSoftFloat(n.X.Type())
353 case ir.OCONV:
354 n := n.(*ir.ConvExpr)
355 return ssagen.Arch.SoftFloat && (isSoftFloat(n.Type()) || isSoftFloat(n.X.Type()))
356
357 case ir.OMIN, ir.OMAX:
358
359 return n.Type().IsString() || n.Type().IsFloat()
360
361 case ir.OLITERAL, ir.ONIL, ir.ONAME, ir.OLINKSYMOFFSET, ir.OMETHEXPR,
362 ir.OAND, ir.OANDNOT, ir.OLSH, ir.OOR, ir.ORSH, ir.OXOR, ir.OCOMPLEX, ir.OMAKEFACE,
363 ir.OADDR, ir.OBITNOT, ir.ONOT, ir.OPLUS,
364 ir.OCAP, ir.OIMAG, ir.OLEN, ir.OREAL,
365 ir.OCONVNOP, ir.ODOT,
366 ir.OCFUNC, ir.OIDATA, ir.OITAB, ir.OSPTR,
367 ir.OBYTES2STRTMP, ir.OGETG, ir.OGETCALLERSP, ir.OSLICEHEADER, ir.OSTRINGHEADER:
368
369
370 }
371
372 return false
373 })
374 }
375
376
377 func itabType(itab ir.Node) ir.Node {
378 if itabTypeField == nil {
379
380 itabTypeField = runtimeField("Type", rttype.ITab.OffsetOf("Type"), types.NewPtr(types.Types[types.TUINT8]))
381 }
382 return boundedDotPtr(base.Pos, itab, itabTypeField)
383 }
384
385 var itabTypeField *types.Field
386
387
388
389 func boundedDotPtr(pos src.XPos, ptr ir.Node, field *types.Field) *ir.SelectorExpr {
390 sel := ir.NewSelectorExpr(pos, ir.ODOTPTR, ptr, field.Sym)
391 sel.Selection = field
392 sel.SetType(field.Type)
393 sel.SetTypecheck(1)
394 sel.SetBounded(true)
395 return sel
396 }
397
398 func runtimeField(name string, offset int64, typ *types.Type) *types.Field {
399 f := types.NewField(src.NoXPos, ir.Pkgs.Runtime.Lookup(name), typ)
400 f.Offset = offset
401 return f
402 }
403
404
405
406
407 func ifaceData(pos src.XPos, n ir.Node, t *types.Type) ir.Node {
408 if t.IsInterface() {
409 base.Fatalf("ifaceData interface: %v", t)
410 }
411 ptr := ir.NewUnaryExpr(pos, ir.OIDATA, n)
412 if types.IsDirectIface(t) {
413 ptr.SetType(t)
414 ptr.SetTypecheck(1)
415 return ptr
416 }
417 ptr.SetType(types.NewPtr(t))
418 ptr.SetTypecheck(1)
419 ind := ir.NewStarExpr(pos, ptr)
420 ind.SetType(t)
421 ind.SetTypecheck(1)
422 ind.SetBounded(true)
423 return ind
424 }
425
View as plain text