1
2
3
4
5 package walk
6
7 import (
8 "fmt"
9 "go/constant"
10 "internal/buildcfg"
11
12 "cmd/compile/internal/base"
13 "cmd/compile/internal/ir"
14 "cmd/compile/internal/reflectdata"
15 "cmd/compile/internal/ssa"
16 "cmd/compile/internal/staticinit"
17 "cmd/compile/internal/typecheck"
18 "cmd/compile/internal/types"
19 "cmd/internal/objabi"
20 "cmd/internal/src"
21 )
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42 type orderState struct {
43 out []ir.Node
44 temp []*ir.Name
45 free map[string][]*ir.Name
46 edit func(ir.Node) ir.Node
47 }
48
49
50
51 func order(fn *ir.Func) {
52 if base.Flag.W > 1 {
53 s := fmt.Sprintf("\nbefore order %v", fn.Sym())
54 ir.DumpList(s, fn.Body)
55 }
56 ir.SetPos(fn)
57 orderBlock(&fn.Body, map[string][]*ir.Name{})
58 }
59
60
61 func (o *orderState) append(stmt ir.Node) {
62 o.out = append(o.out, typecheck.Stmt(stmt))
63 }
64
65
66
67
68 func (o *orderState) newTemp(t *types.Type, clear bool) *ir.Name {
69 var v *ir.Name
70 key := t.LinkString()
71 if a := o.free[key]; len(a) > 0 {
72 v = a[len(a)-1]
73 if !types.Identical(t, v.Type()) {
74 base.Fatalf("expected %L to have type %v", v, t)
75 }
76 o.free[key] = a[:len(a)-1]
77 } else {
78 v = typecheck.TempAt(base.Pos, ir.CurFunc, t)
79 }
80 if clear {
81 o.append(ir.NewAssignStmt(base.Pos, v, nil))
82 }
83
84 o.temp = append(o.temp, v)
85 return v
86 }
87
88
89
90 func (o *orderState) copyExpr(n ir.Node) *ir.Name {
91 return o.copyExpr1(n, false)
92 }
93
94
95
96
97
98
99
100
101
102
103
104 func (o *orderState) copyExprClear(n ir.Node) *ir.Name {
105 return o.copyExpr1(n, true)
106 }
107
108 func (o *orderState) copyExpr1(n ir.Node, clear bool) *ir.Name {
109 t := n.Type()
110 v := o.newTemp(t, clear)
111 o.append(ir.NewAssignStmt(base.Pos, v, n))
112 return v
113 }
114
115
116
117
118
119 func (o *orderState) cheapExpr(n ir.Node) ir.Node {
120 if n == nil {
121 return nil
122 }
123
124 switch n.Op() {
125 case ir.ONAME, ir.OLITERAL, ir.ONIL:
126 return n
127 case ir.OLEN, ir.OCAP:
128 n := n.(*ir.UnaryExpr)
129 l := o.cheapExpr(n.X)
130 if l == n.X {
131 return n
132 }
133 a := ir.Copy(n).(*ir.UnaryExpr)
134 a.X = l
135 return typecheck.Expr(a)
136 }
137
138 return o.copyExpr(n)
139 }
140
141
142
143
144
145
146
147
148 func (o *orderState) safeExpr(n ir.Node) ir.Node {
149 switch n.Op() {
150 case ir.ONAME, ir.OLITERAL, ir.ONIL:
151 return n
152
153 case ir.OLEN, ir.OCAP:
154 n := n.(*ir.UnaryExpr)
155 l := o.safeExpr(n.X)
156 if l == n.X {
157 return n
158 }
159 a := ir.Copy(n).(*ir.UnaryExpr)
160 a.X = l
161 return typecheck.Expr(a)
162
163 case ir.ODOT:
164 n := n.(*ir.SelectorExpr)
165 l := o.safeExpr(n.X)
166 if l == n.X {
167 return n
168 }
169 a := ir.Copy(n).(*ir.SelectorExpr)
170 a.X = l
171 return typecheck.Expr(a)
172
173 case ir.ODOTPTR:
174 n := n.(*ir.SelectorExpr)
175 l := o.cheapExpr(n.X)
176 if l == n.X {
177 return n
178 }
179 a := ir.Copy(n).(*ir.SelectorExpr)
180 a.X = l
181 return typecheck.Expr(a)
182
183 case ir.ODEREF:
184 n := n.(*ir.StarExpr)
185 l := o.cheapExpr(n.X)
186 if l == n.X {
187 return n
188 }
189 a := ir.Copy(n).(*ir.StarExpr)
190 a.X = l
191 return typecheck.Expr(a)
192
193 case ir.OINDEX, ir.OINDEXMAP:
194 n := n.(*ir.IndexExpr)
195 var l ir.Node
196 if n.X.Type().IsArray() {
197 l = o.safeExpr(n.X)
198 } else {
199 l = o.cheapExpr(n.X)
200 }
201 r := o.cheapExpr(n.Index)
202 if l == n.X && r == n.Index {
203 return n
204 }
205 a := ir.Copy(n).(*ir.IndexExpr)
206 a.X = l
207 a.Index = r
208 return typecheck.Expr(a)
209
210 default:
211 base.Fatalf("order.safeExpr %v", n.Op())
212 return nil
213 }
214 }
215
216
217
218
219
220
221
222 func (o *orderState) addrTemp(n ir.Node) ir.Node {
223
224
225
226
227
228 if n.Op() == ir.ONIL || (n.Op() == ir.OLITERAL && !base.Ctxt.IsFIPS()) {
229
230 n = typecheck.DefaultLit(n, nil)
231 types.CalcSize(n.Type())
232 vstat := readonlystaticname(n.Type())
233 var s staticinit.Schedule
234 s.StaticAssign(vstat, 0, n, n.Type())
235 if s.Out != nil {
236 base.Fatalf("staticassign of const generated code: %+v", n)
237 }
238 vstat = typecheck.Expr(vstat).(*ir.Name)
239 return vstat
240 }
241
242
243
244
245
246
247
248
249 if ir.IsAddressable(n) {
250 if name, ok := ir.OuterValue(n).(*ir.Name); ok && name.Op() == ir.ONAME {
251 if name.Class == ir.PAUTO && !name.Addrtaken() && ssa.CanSSA(name.Type()) {
252 goto Copy
253 }
254 }
255
256 return n
257 }
258
259 Copy:
260 return o.copyExpr(n)
261 }
262
263
264
265
266 func (o *orderState) mapKeyTemp(outerPos src.XPos, t *types.Type, n ir.Node) ir.Node {
267 pos := outerPos
268 if ir.HasUniquePos(n) {
269 pos = n.Pos()
270 }
271
272
273 alg := mapfast(t)
274 if alg == mapslow {
275 return o.addrTemp(n)
276 }
277 var kt *types.Type
278 switch alg {
279 case mapfast32:
280 kt = types.Types[types.TUINT32]
281 case mapfast64:
282 kt = types.Types[types.TUINT64]
283 case mapfast32ptr, mapfast64ptr:
284 kt = types.Types[types.TUNSAFEPTR]
285 case mapfaststr:
286 kt = types.Types[types.TSTRING]
287 }
288 nt := n.Type()
289 switch {
290 case nt == kt:
291 return n
292 case nt.Kind() == kt.Kind(), nt.IsPtrShaped() && kt.IsPtrShaped():
293
294 return typecheck.Expr(ir.NewConvExpr(pos, ir.OCONVNOP, kt, n))
295 case nt.IsInteger() && kt.IsInteger():
296
297 if n.Op() == ir.OLITERAL && nt.IsSigned() {
298
299 n = ir.NewConstExpr(constant.MakeUint64(uint64(ir.Int64Val(n))), n)
300 n.SetType(kt)
301 return n
302 }
303 return typecheck.Expr(ir.NewConvExpr(pos, ir.OCONV, kt, n))
304 default:
305
306
307
308 if uint8(kt.Alignment()) < uint8(nt.Alignment()) {
309 base.Fatalf("mapKeyTemp: key type is not sufficiently aligned, kt=%v nt=%v", kt, nt)
310 }
311 tmp := o.newTemp(kt, true)
312
313 var e ir.Node = typecheck.NodAddr(tmp)
314 e = ir.NewConvExpr(pos, ir.OCONVNOP, nt.PtrTo(), e)
315 e = ir.NewStarExpr(pos, e)
316 o.append(ir.NewAssignStmt(pos, e, n))
317 return tmp
318 }
319 }
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337 func mapKeyReplaceStrConv(n ir.Node) bool {
338 var replaced bool
339 switch n.Op() {
340 case ir.OBYTES2STR:
341 n := n.(*ir.ConvExpr)
342 n.SetOp(ir.OBYTES2STRTMP)
343 replaced = true
344 case ir.OSTRUCTLIT:
345 n := n.(*ir.CompLitExpr)
346 for _, elem := range n.List {
347 elem := elem.(*ir.StructKeyExpr)
348 if mapKeyReplaceStrConv(elem.Value) {
349 replaced = true
350 }
351 }
352 case ir.OARRAYLIT:
353 n := n.(*ir.CompLitExpr)
354 for _, elem := range n.List {
355 if elem.Op() == ir.OKEY {
356 elem = elem.(*ir.KeyExpr).Value
357 }
358 if mapKeyReplaceStrConv(elem) {
359 replaced = true
360 }
361 }
362 }
363 return replaced
364 }
365
366 type ordermarker int
367
368
369 func (o *orderState) markTemp() ordermarker {
370 return ordermarker(len(o.temp))
371 }
372
373
374
375 func (o *orderState) popTemp(mark ordermarker) {
376 for _, n := range o.temp[mark:] {
377 key := n.Type().LinkString()
378 o.free[key] = append(o.free[key], n)
379 }
380 o.temp = o.temp[:mark]
381 }
382
383
384 func (o *orderState) stmtList(l ir.Nodes) {
385 s := l
386 for i := range s {
387 orderMakeSliceCopy(s[i:])
388 o.stmt(s[i])
389 }
390 }
391
392
393
394
395
396
397
398
399 func orderMakeSliceCopy(s []ir.Node) {
400 if base.Flag.N != 0 || base.Flag.Cfg.Instrumenting {
401 return
402 }
403 if len(s) < 2 || s[0] == nil || s[0].Op() != ir.OAS || s[1] == nil || s[1].Op() != ir.OCOPY {
404 return
405 }
406
407 as := s[0].(*ir.AssignStmt)
408 cp := s[1].(*ir.BinaryExpr)
409 if as.Y == nil || as.Y.Op() != ir.OMAKESLICE || ir.IsBlank(as.X) ||
410 as.X.Op() != ir.ONAME || cp.X.Op() != ir.ONAME || cp.Y.Op() != ir.ONAME ||
411 as.X.Name() != cp.X.Name() || cp.X.Name() == cp.Y.Name() {
412
413
414
415 return
416 }
417
418 mk := as.Y.(*ir.MakeExpr)
419 if mk.Esc() == ir.EscNone || mk.Len == nil || mk.Cap != nil {
420 return
421 }
422 mk.SetOp(ir.OMAKESLICECOPY)
423 mk.Cap = cp.Y
424
425 mk.SetBounded(mk.Len.Op() == ir.OLEN && ir.SameSafeExpr(mk.Len.(*ir.UnaryExpr).X, cp.Y))
426 as.Y = typecheck.Expr(mk)
427 s[1] = nil
428 }
429
430
431 func (o *orderState) edge() {
432 if base.Debug.Libfuzzer == 0 {
433 return
434 }
435
436
437 counter := staticinit.StaticName(types.Types[types.TUINT8])
438 counter.SetLibfuzzer8BitCounter(true)
439
440
441
442
443
444
445 counter.Linksym().Type = objabi.SLIBFUZZER_8BIT_COUNTER
446
447
448
449
450
451
452
453
454
455
456
457 o.append(ir.NewIfStmt(base.Pos,
458 ir.NewBinaryExpr(base.Pos, ir.OEQ, counter, ir.NewInt(base.Pos, 0xff)),
459 []ir.Node{ir.NewAssignStmt(base.Pos, counter, ir.NewInt(base.Pos, 1))},
460 []ir.Node{ir.NewAssignOpStmt(base.Pos, ir.OADD, counter, ir.NewInt(base.Pos, 1))}))
461 }
462
463
464
465
466 func orderBlock(n *ir.Nodes, free map[string][]*ir.Name) {
467 if len(*n) != 0 {
468
469
470
471 ir.SetPos((*n)[0])
472 }
473 var order orderState
474 order.free = free
475 mark := order.markTemp()
476 order.edge()
477 order.stmtList(*n)
478 order.popTemp(mark)
479 *n = order.out
480 }
481
482
483
484
485
486
487 func (o *orderState) exprInPlace(n ir.Node) ir.Node {
488 var order orderState
489 order.free = o.free
490 n = order.expr(n, nil)
491 n = ir.InitExpr(order.out, n)
492
493
494
495 o.temp = append(o.temp, order.temp...)
496 return n
497 }
498
499
500
501
502
503
504
505
506 func orderStmtInPlace(n ir.Node, free map[string][]*ir.Name) ir.Node {
507 var order orderState
508 order.free = free
509 mark := order.markTemp()
510 order.stmt(n)
511 order.popTemp(mark)
512 return ir.NewBlockStmt(src.NoXPos, order.out)
513 }
514
515
516 func (o *orderState) init(n ir.Node) {
517 if ir.MayBeShared(n) {
518
519
520 if len(n.Init()) > 0 {
521 base.Fatalf("order.init shared node with ninit")
522 }
523 return
524 }
525 o.stmtList(ir.TakeInit(n))
526 }
527
528
529
530 func (o *orderState) call(nn ir.Node) {
531 if len(nn.Init()) > 0 {
532
533 base.Fatalf("%v with unexpected ninit", nn.Op())
534 }
535 if nn.Op() == ir.OCALLMETH {
536 base.FatalfAt(nn.Pos(), "OCALLMETH missed by typecheck")
537 }
538
539
540 if nn.Op() != ir.OCALLFUNC && nn.Op() != ir.OCALLINTER {
541 switch n := nn.(type) {
542 default:
543 base.Fatalf("unexpected call: %+v", n)
544 case *ir.UnaryExpr:
545 n.X = o.expr(n.X, nil)
546 case *ir.ConvExpr:
547 n.X = o.expr(n.X, nil)
548 case *ir.BinaryExpr:
549 n.X = o.expr(n.X, nil)
550 n.Y = o.expr(n.Y, nil)
551 case *ir.MakeExpr:
552 n.Len = o.expr(n.Len, nil)
553 n.Cap = o.expr(n.Cap, nil)
554 case *ir.CallExpr:
555 o.exprList(n.Args)
556 }
557 return
558 }
559
560 n := nn.(*ir.CallExpr)
561 typecheck.AssertFixedCall(n)
562
563 if ir.IsFuncPCIntrinsic(n) && ir.IsIfaceOfFunc(n.Args[0]) != nil {
564
565
566
567 return
568 }
569
570 n.Fun = o.expr(n.Fun, nil)
571 o.exprList(n.Args)
572 }
573
574
575 func (o *orderState) mapAssign(n ir.Node) {
576 switch n.Op() {
577 default:
578 base.Fatalf("order.mapAssign %v", n.Op())
579
580 case ir.OAS:
581 n := n.(*ir.AssignStmt)
582 if n.X.Op() == ir.OINDEXMAP {
583 n.Y = o.safeMapRHS(n.Y)
584 }
585 o.out = append(o.out, n)
586 case ir.OASOP:
587 n := n.(*ir.AssignOpStmt)
588 if n.X.Op() == ir.OINDEXMAP {
589 n.Y = o.safeMapRHS(n.Y)
590 }
591 o.out = append(o.out, n)
592 }
593 }
594
595 func (o *orderState) safeMapRHS(r ir.Node) ir.Node {
596
597
598 if r.Op() == ir.OAPPEND {
599 r := r.(*ir.CallExpr)
600 s := r.Args[1:]
601 for i, n := range s {
602 s[i] = o.cheapExpr(n)
603 }
604 return r
605 }
606 return o.cheapExpr(r)
607 }
608
609
610 func (o *orderState) stmt(n ir.Node) {
611 if n == nil {
612 return
613 }
614
615 lno := ir.SetPos(n)
616 o.init(n)
617
618 switch n.Op() {
619 default:
620 base.Fatalf("order.stmt %v", n.Op())
621
622 case ir.OINLMARK:
623 o.out = append(o.out, n)
624
625 case ir.OAS:
626 n := n.(*ir.AssignStmt)
627 t := o.markTemp()
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642 mapAppend := n.X.Op() == ir.OINDEXMAP && n.Y.Op() == ir.OAPPEND &&
643 ir.SameSafeExpr(n.X, n.Y.(*ir.CallExpr).Args[0])
644
645 n.X = o.expr(n.X, nil)
646 if mapAppend {
647 indexLHS := n.X.(*ir.IndexExpr)
648 indexLHS.X = o.cheapExpr(indexLHS.X)
649 indexLHS.Index = o.cheapExpr(indexLHS.Index)
650
651 call := n.Y.(*ir.CallExpr)
652 arg0 := call.Args[0]
653
654 for arg0.Op() == ir.OCONVNOP {
655 arg0 = arg0.(*ir.ConvExpr).X
656 }
657 indexRHS := arg0.(*ir.IndexExpr)
658 indexRHS.X = indexLHS.X
659 indexRHS.Index = indexLHS.Index
660
661 o.exprList(call.Args[1:])
662 } else {
663 n.Y = o.expr(n.Y, n.X)
664 }
665 o.mapAssign(n)
666 o.popTemp(t)
667
668 case ir.OASOP:
669 n := n.(*ir.AssignOpStmt)
670 t := o.markTemp()
671 n.X = o.expr(n.X, nil)
672 n.Y = o.expr(n.Y, nil)
673
674 if base.Flag.Cfg.Instrumenting || n.X.Op() == ir.OINDEXMAP && (n.AsOp == ir.ODIV || n.AsOp == ir.OMOD) {
675
676
677
678
679
680
681 l1 := o.safeExpr(n.X)
682 l2 := ir.DeepCopy(src.NoXPos, l1)
683 if l2.Op() == ir.OINDEXMAP {
684 l2 := l2.(*ir.IndexExpr)
685 l2.Assigned = false
686 }
687 l2 = o.copyExpr(l2)
688 r := o.expr(typecheck.Expr(ir.NewBinaryExpr(n.Pos(), n.AsOp, l2, n.Y)), nil)
689 as := typecheck.Stmt(ir.NewAssignStmt(n.Pos(), l1, r))
690 o.mapAssign(as)
691 o.popTemp(t)
692 return
693 }
694
695 o.mapAssign(n)
696 o.popTemp(t)
697
698 case ir.OAS2:
699 n := n.(*ir.AssignListStmt)
700 t := o.markTemp()
701 o.exprList(n.Lhs)
702 o.exprList(n.Rhs)
703 o.out = append(o.out, n)
704 o.popTemp(t)
705
706
707 case ir.OAS2FUNC:
708 n := n.(*ir.AssignListStmt)
709 t := o.markTemp()
710 o.exprList(n.Lhs)
711 call := n.Rhs[0]
712 o.init(call)
713 if ic, ok := call.(*ir.InlinedCallExpr); ok {
714 o.stmtList(ic.Body)
715
716 n.SetOp(ir.OAS2)
717 n.Rhs = ic.ReturnVars
718
719 o.exprList(n.Rhs)
720 o.out = append(o.out, n)
721 } else {
722 o.call(call)
723 o.as2func(n)
724 }
725 o.popTemp(t)
726
727
728
729
730
731
732
733 case ir.OAS2DOTTYPE, ir.OAS2RECV, ir.OAS2MAPR:
734 n := n.(*ir.AssignListStmt)
735 t := o.markTemp()
736 o.exprList(n.Lhs)
737
738 switch r := n.Rhs[0]; r.Op() {
739 case ir.ODOTTYPE2:
740 r := r.(*ir.TypeAssertExpr)
741 r.X = o.expr(r.X, nil)
742 case ir.ODYNAMICDOTTYPE2:
743 r := r.(*ir.DynamicTypeAssertExpr)
744 r.X = o.expr(r.X, nil)
745 r.RType = o.expr(r.RType, nil)
746 r.ITab = o.expr(r.ITab, nil)
747 case ir.ORECV:
748 r := r.(*ir.UnaryExpr)
749 r.X = o.expr(r.X, nil)
750 case ir.OINDEXMAP:
751 r := r.(*ir.IndexExpr)
752 r.X = o.expr(r.X, nil)
753 r.Index = o.expr(r.Index, nil)
754
755 _ = mapKeyReplaceStrConv(r.Index)
756 r.Index = o.mapKeyTemp(r.Pos(), r.X.Type(), r.Index)
757 default:
758 base.Fatalf("order.stmt: %v", r.Op())
759 }
760
761 o.as2ok(n)
762 o.popTemp(t)
763
764
765 case ir.OBLOCK:
766 n := n.(*ir.BlockStmt)
767 o.stmtList(n.List)
768
769
770 case ir.OBREAK,
771 ir.OCONTINUE,
772 ir.ODCL,
773 ir.OFALL,
774 ir.OGOTO,
775 ir.OLABEL,
776 ir.OTAILCALL:
777 o.out = append(o.out, n)
778
779
780 case ir.OCALLFUNC, ir.OCALLINTER:
781 n := n.(*ir.CallExpr)
782 t := o.markTemp()
783 o.call(n)
784 o.out = append(o.out, n)
785 o.popTemp(t)
786
787 case ir.OINLCALL:
788 n := n.(*ir.InlinedCallExpr)
789 o.stmtList(n.Body)
790
791
792 for _, result := range n.ReturnVars {
793 if staticinit.AnySideEffects(result) {
794 base.FatalfAt(result.Pos(), "inlined call result has side effects: %v", result)
795 }
796 }
797
798 case ir.OCHECKNIL, ir.OCLEAR, ir.OCLOSE, ir.OPANIC, ir.ORECV:
799 n := n.(*ir.UnaryExpr)
800 t := o.markTemp()
801 n.X = o.expr(n.X, nil)
802 o.out = append(o.out, n)
803 o.popTemp(t)
804
805 case ir.OCOPY:
806 n := n.(*ir.BinaryExpr)
807 t := o.markTemp()
808 n.X = o.expr(n.X, nil)
809 n.Y = o.expr(n.Y, nil)
810 o.out = append(o.out, n)
811 o.popTemp(t)
812
813 case ir.OPRINT, ir.OPRINTLN, ir.ORECOVERFP:
814 n := n.(*ir.CallExpr)
815 t := o.markTemp()
816 o.call(n)
817 o.out = append(o.out, n)
818 o.popTemp(t)
819
820
821 case ir.ODEFER, ir.OGO:
822 n := n.(*ir.GoDeferStmt)
823 t := o.markTemp()
824 o.init(n.Call)
825 o.call(n.Call)
826 o.out = append(o.out, n)
827 o.popTemp(t)
828
829 case ir.ODELETE:
830 n := n.(*ir.CallExpr)
831 t := o.markTemp()
832 n.Args[0] = o.expr(n.Args[0], nil)
833 n.Args[1] = o.expr(n.Args[1], nil)
834 n.Args[1] = o.mapKeyTemp(n.Pos(), n.Args[0].Type(), n.Args[1])
835 o.out = append(o.out, n)
836 o.popTemp(t)
837
838
839
840 case ir.OFOR:
841 n := n.(*ir.ForStmt)
842 t := o.markTemp()
843 n.Cond = o.exprInPlace(n.Cond)
844 orderBlock(&n.Body, o.free)
845 n.Post = orderStmtInPlace(n.Post, o.free)
846 o.out = append(o.out, n)
847 o.popTemp(t)
848
849
850
851 case ir.OIF:
852 n := n.(*ir.IfStmt)
853 t := o.markTemp()
854 n.Cond = o.exprInPlace(n.Cond)
855 o.popTemp(t)
856 orderBlock(&n.Body, o.free)
857 orderBlock(&n.Else, o.free)
858 o.out = append(o.out, n)
859
860 case ir.ORANGE:
861
862
863
864
865
866
867
868
869
870
871
872
873
874 n := n.(*ir.RangeStmt)
875 if x, ok := n.X.(*ir.ConvExpr); ok {
876 switch x.Op() {
877 case ir.OSTR2BYTES:
878 x.SetOp(ir.OSTR2BYTESTMP)
879 fallthrough
880 case ir.OSTR2BYTESTMP:
881 x.MarkNonNil()
882 }
883 }
884
885 t := o.markTemp()
886 n.X = o.expr(n.X, nil)
887
888 orderBody := true
889 xt := typecheck.RangeExprType(n.X.Type())
890 switch k := xt.Kind(); {
891 default:
892 base.Fatalf("order.stmt range %v", n.Type())
893
894 case types.IsInt[k]:
895
896
897 case k == types.TARRAY, k == types.TSLICE:
898 if n.Value == nil || ir.IsBlank(n.Value) {
899
900
901 break
902 }
903 fallthrough
904
905 case k == types.TCHAN, k == types.TSTRING:
906
907
908 r := n.X
909
910 if r.Type().IsString() && r.Type() != types.Types[types.TSTRING] {
911 r = ir.NewConvExpr(base.Pos, ir.OCONV, nil, r)
912 r.SetType(types.Types[types.TSTRING])
913 r = typecheck.Expr(r)
914 }
915
916 n.X = o.copyExpr(r)
917
918 case k == types.TMAP:
919 if isMapClear(n) {
920
921
922
923 orderBody = false
924 break
925 }
926
927
928
929
930 r := n.X
931 n.X = o.copyExpr(r)
932
933
934
935 if buildcfg.Experiment.SwissMap {
936 n.Prealloc = o.newTemp(reflectdata.SwissMapIterType(), true)
937 } else {
938 n.Prealloc = o.newTemp(reflectdata.OldMapIterType(), true)
939 }
940 }
941 n.Key = o.exprInPlace(n.Key)
942 n.Value = o.exprInPlace(n.Value)
943 if orderBody {
944 orderBlock(&n.Body, o.free)
945 }
946 o.out = append(o.out, n)
947 o.popTemp(t)
948
949 case ir.ORETURN:
950 n := n.(*ir.ReturnStmt)
951 o.exprList(n.Results)
952 o.out = append(o.out, n)
953
954
955
956
957
958
959
960
961
962
963 case ir.OSELECT:
964 n := n.(*ir.SelectStmt)
965 t := o.markTemp()
966 for _, ncas := range n.Cases {
967 r := ncas.Comm
968 ir.SetPos(ncas)
969
970
971
972 if len(ncas.Init()) != 0 {
973 base.Fatalf("order select ninit")
974 }
975 if r == nil {
976 continue
977 }
978 switch r.Op() {
979 default:
980 ir.Dump("select case", r)
981 base.Fatalf("unknown op in select %v", r.Op())
982
983 case ir.OSELRECV2:
984
985 r := r.(*ir.AssignListStmt)
986 recv := r.Rhs[0].(*ir.UnaryExpr)
987 recv.X = o.expr(recv.X, nil)
988 if !ir.IsAutoTmp(recv.X) {
989 recv.X = o.copyExpr(recv.X)
990 }
991 init := ir.TakeInit(r)
992
993 colas := r.Def
994 do := func(i int, t *types.Type) {
995 n := r.Lhs[i]
996 if ir.IsBlank(n) {
997 return
998 }
999
1000
1001
1002
1003 if colas {
1004 if len(init) > 0 && init[0].Op() == ir.ODCL && init[0].(*ir.Decl).X == n {
1005 init = init[1:]
1006
1007
1008
1009 if len(init) > 0 && init[0].Op() == ir.OAS && init[0].(*ir.AssignStmt).X == n {
1010 init = init[1:]
1011 }
1012 }
1013 dcl := typecheck.Stmt(ir.NewDecl(base.Pos, ir.ODCL, n.(*ir.Name)))
1014 ncas.PtrInit().Append(dcl)
1015 }
1016 tmp := o.newTemp(t, t.HasPointers())
1017 as := typecheck.Stmt(ir.NewAssignStmt(base.Pos, n, typecheck.Conv(tmp, n.Type())))
1018 ncas.PtrInit().Append(as)
1019 r.Lhs[i] = tmp
1020 }
1021 do(0, recv.X.Type().Elem())
1022 do(1, types.Types[types.TBOOL])
1023 if len(init) != 0 {
1024 ir.DumpList("ninit", init)
1025 base.Fatalf("ninit on select recv")
1026 }
1027 orderBlock(ncas.PtrInit(), o.free)
1028
1029 case ir.OSEND:
1030 r := r.(*ir.SendStmt)
1031 if len(r.Init()) != 0 {
1032 ir.DumpList("ninit", r.Init())
1033 base.Fatalf("ninit on select send")
1034 }
1035
1036
1037
1038 r.Chan = o.expr(r.Chan, nil)
1039
1040 if !ir.IsAutoTmp(r.Chan) {
1041 r.Chan = o.copyExpr(r.Chan)
1042 }
1043 r.Value = o.expr(r.Value, nil)
1044 if !ir.IsAutoTmp(r.Value) {
1045 r.Value = o.copyExpr(r.Value)
1046 }
1047 }
1048 }
1049
1050
1051
1052 for _, cas := range n.Cases {
1053 orderBlock(&cas.Body, o.free)
1054
1055
1056
1057 cas.Body.Prepend(ir.TakeInit(cas)...)
1058 }
1059
1060 o.out = append(o.out, n)
1061 o.popTemp(t)
1062
1063
1064 case ir.OSEND:
1065 n := n.(*ir.SendStmt)
1066 t := o.markTemp()
1067 n.Chan = o.expr(n.Chan, nil)
1068 n.Value = o.expr(n.Value, nil)
1069 if base.Flag.Cfg.Instrumenting {
1070
1071
1072 n.Value = o.copyExpr(n.Value)
1073 } else {
1074 n.Value = o.addrTemp(n.Value)
1075 }
1076 o.out = append(o.out, n)
1077 o.popTemp(t)
1078
1079
1080
1081
1082
1083
1084
1085
1086 case ir.OSWITCH:
1087 n := n.(*ir.SwitchStmt)
1088 if base.Debug.Libfuzzer != 0 && !hasDefaultCase(n) {
1089
1090 n.Cases = append(n.Cases, ir.NewCaseStmt(base.Pos, nil, nil))
1091 }
1092
1093 t := o.markTemp()
1094 n.Tag = o.expr(n.Tag, nil)
1095 for _, ncas := range n.Cases {
1096 o.exprListInPlace(ncas.List)
1097 orderBlock(&ncas.Body, o.free)
1098 }
1099
1100 o.out = append(o.out, n)
1101 o.popTemp(t)
1102 }
1103
1104 base.Pos = lno
1105 }
1106
1107 func hasDefaultCase(n *ir.SwitchStmt) bool {
1108 for _, ncas := range n.Cases {
1109 if len(ncas.List) == 0 {
1110 return true
1111 }
1112 }
1113 return false
1114 }
1115
1116
1117 func (o *orderState) exprList(l ir.Nodes) {
1118 s := l
1119 for i := range s {
1120 s[i] = o.expr(s[i], nil)
1121 }
1122 }
1123
1124
1125
1126 func (o *orderState) exprListInPlace(l ir.Nodes) {
1127 s := l
1128 for i := range s {
1129 s[i] = o.exprInPlace(s[i])
1130 }
1131 }
1132
1133 func (o *orderState) exprNoLHS(n ir.Node) ir.Node {
1134 return o.expr(n, nil)
1135 }
1136
1137
1138
1139
1140
1141
1142
1143
1144
1145 func (o *orderState) expr(n, lhs ir.Node) ir.Node {
1146 if n == nil {
1147 return n
1148 }
1149 lno := ir.SetPos(n)
1150 n = o.expr1(n, lhs)
1151 base.Pos = lno
1152 return n
1153 }
1154
1155 func (o *orderState) expr1(n, lhs ir.Node) ir.Node {
1156 o.init(n)
1157
1158 switch n.Op() {
1159 default:
1160 if o.edit == nil {
1161 o.edit = o.exprNoLHS
1162 }
1163 ir.EditChildren(n, o.edit)
1164 return n
1165
1166
1167
1168
1169 case ir.OADDSTR:
1170 n := n.(*ir.AddStringExpr)
1171 o.exprList(n.List)
1172
1173 if len(n.List) > 5 {
1174 t := types.NewArray(types.Types[types.TSTRING], int64(len(n.List)))
1175 n.Prealloc = o.newTemp(t, false)
1176 }
1177
1178
1179
1180
1181
1182
1183
1184
1185 hasbyte := false
1186
1187 haslit := false
1188 for _, n1 := range n.List {
1189 hasbyte = hasbyte || n1.Op() == ir.OBYTES2STR
1190 haslit = haslit || n1.Op() == ir.OLITERAL && len(ir.StringVal(n1)) != 0
1191 }
1192
1193 if haslit && hasbyte {
1194 for _, n2 := range n.List {
1195 if n2.Op() == ir.OBYTES2STR {
1196 n2 := n2.(*ir.ConvExpr)
1197 n2.SetOp(ir.OBYTES2STRTMP)
1198 }
1199 }
1200 }
1201 return n
1202
1203 case ir.OINDEXMAP:
1204 n := n.(*ir.IndexExpr)
1205 n.X = o.expr(n.X, nil)
1206 n.Index = o.expr(n.Index, nil)
1207 needCopy := false
1208
1209 if !n.Assigned {
1210
1211
1212
1213
1214 needCopy = mapKeyReplaceStrConv(n.Index)
1215
1216 if base.Flag.Cfg.Instrumenting {
1217
1218 needCopy = true
1219 }
1220 }
1221
1222
1223 n.Index = o.mapKeyTemp(n.Pos(), n.X.Type(), n.Index)
1224 if needCopy {
1225 return o.copyExpr(n)
1226 }
1227 return n
1228
1229
1230
1231 case ir.OCONVIFACE:
1232 n := n.(*ir.ConvExpr)
1233 n.X = o.expr(n.X, nil)
1234 if n.X.Type().IsInterface() {
1235 return n
1236 }
1237 if _, _, needsaddr := dataWordFuncName(n.X.Type()); needsaddr || isStaticCompositeLiteral(n.X) {
1238
1239
1240
1241 n.X = o.addrTemp(n.X)
1242 }
1243 return n
1244
1245 case ir.OCONVNOP:
1246 n := n.(*ir.ConvExpr)
1247 if n.X.Op() == ir.OCALLMETH {
1248 base.FatalfAt(n.X.Pos(), "OCALLMETH missed by typecheck")
1249 }
1250 if n.Type().IsKind(types.TUNSAFEPTR) && n.X.Type().IsKind(types.TUINTPTR) && (n.X.Op() == ir.OCALLFUNC || n.X.Op() == ir.OCALLINTER) {
1251 call := n.X.(*ir.CallExpr)
1252
1253
1254
1255 o.init(call)
1256 o.call(call)
1257 if lhs == nil || lhs.Op() != ir.ONAME || base.Flag.Cfg.Instrumenting {
1258 return o.copyExpr(n)
1259 }
1260 } else {
1261 n.X = o.expr(n.X, nil)
1262 }
1263 return n
1264
1265 case ir.OANDAND, ir.OOROR:
1266
1267
1268
1269
1270
1271
1272
1273
1274
1275 n := n.(*ir.LogicalExpr)
1276 r := o.newTemp(n.Type(), false)
1277
1278
1279 lhs := o.expr(n.X, nil)
1280 o.out = append(o.out, typecheck.Stmt(ir.NewAssignStmt(base.Pos, r, lhs)))
1281
1282
1283 saveout := o.out
1284 o.out = nil
1285 t := o.markTemp()
1286 o.edge()
1287 rhs := o.expr(n.Y, nil)
1288 o.out = append(o.out, typecheck.Stmt(ir.NewAssignStmt(base.Pos, r, rhs)))
1289 o.popTemp(t)
1290 gen := o.out
1291 o.out = saveout
1292
1293
1294 nif := ir.NewIfStmt(base.Pos, r, nil, nil)
1295 if n.Op() == ir.OANDAND {
1296 nif.Body = gen
1297 } else {
1298 nif.Else = gen
1299 }
1300 o.out = append(o.out, nif)
1301 return r
1302
1303 case ir.OCALLMETH:
1304 base.FatalfAt(n.Pos(), "OCALLMETH missed by typecheck")
1305 panic("unreachable")
1306
1307 case ir.OCALLFUNC,
1308 ir.OCALLINTER,
1309 ir.OCAP,
1310 ir.OCOMPLEX,
1311 ir.OCOPY,
1312 ir.OIMAG,
1313 ir.OLEN,
1314 ir.OMAKECHAN,
1315 ir.OMAKEMAP,
1316 ir.OMAKESLICE,
1317 ir.OMAKESLICECOPY,
1318 ir.OMAX,
1319 ir.OMIN,
1320 ir.ONEW,
1321 ir.OREAL,
1322 ir.ORECOVERFP,
1323 ir.OSTR2BYTES,
1324 ir.OSTR2BYTESTMP,
1325 ir.OSTR2RUNES:
1326
1327 if isRuneCount(n) {
1328
1329 conv := n.(*ir.UnaryExpr).X.(*ir.ConvExpr)
1330 conv.X = o.expr(conv.X, nil)
1331 } else {
1332 o.call(n)
1333 }
1334
1335 if lhs == nil || lhs.Op() != ir.ONAME || base.Flag.Cfg.Instrumenting {
1336 return o.copyExpr(n)
1337 }
1338 return n
1339
1340 case ir.OINLCALL:
1341 n := n.(*ir.InlinedCallExpr)
1342 o.stmtList(n.Body)
1343 return n.SingleResult()
1344
1345 case ir.OAPPEND:
1346
1347 n := n.(*ir.CallExpr)
1348 if isAppendOfMake(n) {
1349 n.Args[0] = o.expr(n.Args[0], nil)
1350 mk := n.Args[1].(*ir.MakeExpr)
1351 mk.Len = o.expr(mk.Len, nil)
1352 } else {
1353 o.exprList(n.Args)
1354 }
1355
1356 if lhs == nil || lhs.Op() != ir.ONAME && !ir.SameSafeExpr(lhs, n.Args[0]) {
1357 return o.copyExpr(n)
1358 }
1359 return n
1360
1361 case ir.OSLICE, ir.OSLICEARR, ir.OSLICESTR, ir.OSLICE3, ir.OSLICE3ARR:
1362 n := n.(*ir.SliceExpr)
1363 n.X = o.expr(n.X, nil)
1364 n.Low = o.cheapExpr(o.expr(n.Low, nil))
1365 n.High = o.cheapExpr(o.expr(n.High, nil))
1366 n.Max = o.cheapExpr(o.expr(n.Max, nil))
1367 if lhs == nil || lhs.Op() != ir.ONAME && !ir.SameSafeExpr(lhs, n.X) {
1368 return o.copyExpr(n)
1369 }
1370 return n
1371
1372 case ir.OCLOSURE:
1373 n := n.(*ir.ClosureExpr)
1374 if n.Transient() && len(n.Func.ClosureVars) > 0 {
1375 n.Prealloc = o.newTemp(typecheck.ClosureType(n), false)
1376 }
1377 return n
1378
1379 case ir.OMETHVALUE:
1380 n := n.(*ir.SelectorExpr)
1381 n.X = o.expr(n.X, nil)
1382 if n.Transient() {
1383 t := typecheck.MethodValueType(n)
1384 n.Prealloc = o.newTemp(t, false)
1385 }
1386 return n
1387
1388 case ir.OSLICELIT:
1389 n := n.(*ir.CompLitExpr)
1390 o.exprList(n.List)
1391 if n.Transient() {
1392 t := types.NewArray(n.Type().Elem(), n.Len)
1393 n.Prealloc = o.newTemp(t, false)
1394 }
1395 return n
1396
1397 case ir.ODOTTYPE, ir.ODOTTYPE2:
1398 n := n.(*ir.TypeAssertExpr)
1399 n.X = o.expr(n.X, nil)
1400 if !types.IsDirectIface(n.Type()) || base.Flag.Cfg.Instrumenting {
1401 return o.copyExprClear(n)
1402 }
1403 return n
1404
1405 case ir.ORECV:
1406 n := n.(*ir.UnaryExpr)
1407 n.X = o.expr(n.X, nil)
1408 return o.copyExprClear(n)
1409
1410 case ir.OEQ, ir.ONE, ir.OLT, ir.OLE, ir.OGT, ir.OGE:
1411 n := n.(*ir.BinaryExpr)
1412 n.X = o.expr(n.X, nil)
1413 n.Y = o.expr(n.Y, nil)
1414
1415 t := n.X.Type()
1416 switch {
1417 case t.IsString():
1418
1419
1420
1421 if n.X.Op() == ir.OBYTES2STR {
1422 n.X.(*ir.ConvExpr).SetOp(ir.OBYTES2STRTMP)
1423 }
1424 if n.Y.Op() == ir.OBYTES2STR {
1425 n.Y.(*ir.ConvExpr).SetOp(ir.OBYTES2STRTMP)
1426 }
1427
1428 case t.IsStruct() || t.IsArray():
1429
1430
1431 n.X = o.addrTemp(n.X)
1432 n.Y = o.addrTemp(n.Y)
1433 }
1434 return n
1435
1436 case ir.OMAPLIT:
1437
1438
1439
1440
1441
1442
1443
1444
1445
1446
1447
1448
1449
1450
1451
1452 n := n.(*ir.CompLitExpr)
1453 entries := n.List
1454 statics := entries[:0]
1455 var dynamics []*ir.KeyExpr
1456 for _, r := range entries {
1457 r := r.(*ir.KeyExpr)
1458
1459 if !isStaticCompositeLiteral(r.Key) || !isStaticCompositeLiteral(r.Value) {
1460 dynamics = append(dynamics, r)
1461 continue
1462 }
1463
1464
1465
1466 r = o.expr(r, nil).(*ir.KeyExpr)
1467 if !isStaticCompositeLiteral(r.Key) || !isStaticCompositeLiteral(r.Value) {
1468 dynamics = append(dynamics, r)
1469 continue
1470 }
1471
1472 statics = append(statics, r)
1473 }
1474 n.List = statics
1475
1476 if len(dynamics) == 0 {
1477 return n
1478 }
1479
1480
1481 m := o.newTemp(n.Type(), false)
1482 as := ir.NewAssignStmt(base.Pos, m, n)
1483 typecheck.Stmt(as)
1484 o.stmt(as)
1485
1486
1487 for _, r := range dynamics {
1488 lhs := typecheck.AssignExpr(ir.NewIndexExpr(base.Pos, m, r.Key)).(*ir.IndexExpr)
1489 base.AssertfAt(lhs.Op() == ir.OINDEXMAP, lhs.Pos(), "want OINDEXMAP, have %+v", lhs)
1490 lhs.RType = n.RType
1491
1492 as := ir.NewAssignStmt(base.Pos, lhs, r.Value)
1493 typecheck.Stmt(as)
1494 o.stmt(as)
1495 }
1496
1497
1498
1499
1500
1501
1502
1503 n.Len += int64(len(dynamics))
1504
1505 return m
1506 }
1507
1508
1509 }
1510
1511
1512
1513
1514
1515
1516
1517
1518
1519
1520
1521
1522
1523 func (o *orderState) as2func(n *ir.AssignListStmt) {
1524 results := n.Rhs[0].Type()
1525 as := ir.NewAssignListStmt(n.Pos(), ir.OAS2, nil, nil)
1526 for i, nl := range n.Lhs {
1527 if !ir.IsBlank(nl) {
1528 typ := results.Field(i).Type
1529 tmp := o.newTemp(typ, typ.HasPointers())
1530 n.Lhs[i] = tmp
1531 as.Lhs = append(as.Lhs, nl)
1532 as.Rhs = append(as.Rhs, tmp)
1533 }
1534 }
1535
1536 o.out = append(o.out, n)
1537 o.stmt(typecheck.Stmt(as))
1538 }
1539
1540
1541
1542 func (o *orderState) as2ok(n *ir.AssignListStmt) {
1543 as := ir.NewAssignListStmt(n.Pos(), ir.OAS2, nil, nil)
1544
1545 do := func(i int, typ *types.Type) {
1546 if nl := n.Lhs[i]; !ir.IsBlank(nl) {
1547 var tmp ir.Node = o.newTemp(typ, typ.HasPointers())
1548 n.Lhs[i] = tmp
1549 as.Lhs = append(as.Lhs, nl)
1550 if i == 1 {
1551
1552
1553
1554 tmp = typecheck.Conv(tmp, nl.Type())
1555 }
1556 as.Rhs = append(as.Rhs, tmp)
1557 }
1558 }
1559
1560 do(0, n.Rhs[0].Type())
1561 do(1, types.Types[types.TBOOL])
1562
1563 o.out = append(o.out, n)
1564 o.stmt(typecheck.Stmt(as))
1565 }
1566
View as plain text