1
2
3
4
5
6
7
8
9
10
11
12
13
14
15 package liveness
16
17 import (
18 "cmp"
19 "fmt"
20 "os"
21 "slices"
22 "sort"
23 "strings"
24
25 "cmd/compile/internal/abi"
26 "cmd/compile/internal/base"
27 "cmd/compile/internal/bitvec"
28 "cmd/compile/internal/ir"
29 "cmd/compile/internal/objw"
30 "cmd/compile/internal/reflectdata"
31 "cmd/compile/internal/ssa"
32 "cmd/compile/internal/typebits"
33 "cmd/compile/internal/types"
34 "cmd/internal/hash"
35 "cmd/internal/obj"
36 "cmd/internal/src"
37
38 rtabi "internal/abi"
39 )
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88 type blockEffects struct {
89
90
91
92
93
94 uevar bitvec.BitVec
95 varkill bitvec.BitVec
96
97
98
99
100
101 livein bitvec.BitVec
102 liveout bitvec.BitVec
103 }
104
105
106 type liveness struct {
107 fn *ir.Func
108 f *ssa.Func
109 vars []*ir.Name
110 idx map[*ir.Name]int32
111 stkptrsize int64
112
113 be []blockEffects
114
115
116
117 allUnsafe bool
118
119
120 unsafePoints bitvec.BitVec
121
122
123
124 unsafeBlocks bitvec.BitVec
125
126
127
128
129
130
131 livevars []bitvec.BitVec
132
133
134
135 livenessMap Map
136 stackMapSet bvecSet
137 stackMaps []bitvec.BitVec
138
139 cache progeffectscache
140
141
142
143
144 partLiveArgs map[*ir.Name]bool
145
146 doClobber bool
147 noClobberArgs bool
148
149
150
151
152 conservativeWrites bool
153 }
154
155
156
157
158 type Map struct {
159 Vals map[ssa.ID]objw.StackMapIndex
160 UnsafeVals map[ssa.ID]bool
161 UnsafeBlocks map[ssa.ID]bool
162
163
164 DeferReturn objw.StackMapIndex
165 }
166
167 func (m *Map) reset() {
168 if m.Vals == nil {
169 m.Vals = make(map[ssa.ID]objw.StackMapIndex)
170 m.UnsafeVals = make(map[ssa.ID]bool)
171 m.UnsafeBlocks = make(map[ssa.ID]bool)
172 } else {
173 clear(m.Vals)
174 clear(m.UnsafeVals)
175 clear(m.UnsafeBlocks)
176 }
177 m.DeferReturn = objw.StackMapDontCare
178 }
179
180 func (m *Map) set(v *ssa.Value, i objw.StackMapIndex) {
181 m.Vals[v.ID] = i
182 }
183 func (m *Map) setUnsafeVal(v *ssa.Value) {
184 m.UnsafeVals[v.ID] = true
185 }
186 func (m *Map) setUnsafeBlock(b *ssa.Block) {
187 m.UnsafeBlocks[b.ID] = true
188 }
189
190 func (m Map) Get(v *ssa.Value) objw.StackMapIndex {
191
192 if idx, ok := m.Vals[v.ID]; ok {
193 return idx
194 }
195 return objw.StackMapDontCare
196 }
197 func (m Map) GetUnsafe(v *ssa.Value) bool {
198
199 return m.UnsafeVals[v.ID]
200 }
201 func (m Map) GetUnsafeBlock(b *ssa.Block) bool {
202
203 return m.UnsafeBlocks[b.ID]
204 }
205
206 type progeffectscache struct {
207 retuevar []int32
208 tailuevar []int32
209 initialized bool
210 }
211
212
213
214
215
216
217
218 func shouldTrack(n *ir.Name) bool {
219 return (n.Class == ir.PAUTO && n.Esc() != ir.EscHeap || n.Class == ir.PPARAM || n.Class == ir.PPARAMOUT) && n.Type().HasPointers()
220 }
221
222
223
224 func getvariables(fn *ir.Func) ([]*ir.Name, map[*ir.Name]int32) {
225 var vars []*ir.Name
226 for _, n := range fn.Dcl {
227 if shouldTrack(n) {
228 vars = append(vars, n)
229 }
230 }
231 idx := make(map[*ir.Name]int32, len(vars))
232 for i, n := range vars {
233 idx[n] = int32(i)
234 }
235 return vars, idx
236 }
237
238 func (lv *liveness) initcache() {
239 if lv.cache.initialized {
240 base.Fatalf("liveness cache initialized twice")
241 return
242 }
243 lv.cache.initialized = true
244
245 for i, node := range lv.vars {
246 switch node.Class {
247 case ir.PPARAM:
248
249
250
251
252
253
254 lv.cache.tailuevar = append(lv.cache.tailuevar, int32(i))
255
256 case ir.PPARAMOUT:
257
258
259
260 lv.cache.retuevar = append(lv.cache.retuevar, int32(i))
261 }
262 }
263 }
264
265
266
267
268
269
270
271
272
273
274 type liveEffect int
275
276 const (
277 uevar liveEffect = 1 << iota
278 varkill
279 )
280
281
282
283
284 func (lv *liveness) valueEffects(v *ssa.Value) (int32, liveEffect) {
285 n, e := affectedVar(v)
286 if e == 0 || n == nil {
287 return -1, 0
288 }
289
290
291
292
293 switch v.Op {
294 case ssa.OpVarDef, ssa.OpVarLive, ssa.OpKeepAlive:
295 if !n.Used() {
296 return -1, 0
297 }
298 }
299
300 if n.Class == ir.PPARAM && !n.Addrtaken() && n.Type().Size() > int64(types.PtrSize) {
301
302
303 lv.partLiveArgs[n] = true
304 }
305
306 var effect liveEffect
307
308
309
310
311
312
313 if e&(ssa.SymRead|ssa.SymAddr) != 0 {
314 effect |= uevar
315 }
316 if e&ssa.SymWrite != 0 {
317 if !isfat(n.Type()) || v.Op == ssa.OpVarDef {
318 effect |= varkill
319 } else if lv.conservativeWrites {
320 effect |= uevar
321 }
322 }
323
324 if effect == 0 {
325 return -1, 0
326 }
327
328 if pos, ok := lv.idx[n]; ok {
329 return pos, effect
330 }
331 return -1, 0
332 }
333
334
335 func affectedVar(v *ssa.Value) (*ir.Name, ssa.SymEffect) {
336
337 switch v.Op {
338 case ssa.OpLoadReg:
339 n, _ := ssa.AutoVar(v.Args[0])
340 return n, ssa.SymRead
341 case ssa.OpStoreReg:
342 n, _ := ssa.AutoVar(v)
343 return n, ssa.SymWrite
344
345 case ssa.OpArgIntReg:
346
347
348
349
350
351
352
353
354
355
356
357
358 n, _ := ssa.AutoVar(v)
359 return n, ssa.SymRead
360
361 case ssa.OpVarLive:
362 return v.Aux.(*ir.Name), ssa.SymRead
363 case ssa.OpVarDef:
364 return v.Aux.(*ir.Name), ssa.SymWrite
365 case ssa.OpKeepAlive:
366 n, _ := ssa.AutoVar(v.Args[0])
367 return n, ssa.SymRead
368 }
369
370 e := v.Op.SymEffect()
371 if e == 0 {
372 return nil, 0
373 }
374
375 switch a := v.Aux.(type) {
376 case nil, *obj.LSym:
377
378 return nil, e
379 case *ir.Name:
380 return a, e
381 default:
382 base.Fatalf("weird aux: %s", v.LongString())
383 return nil, e
384 }
385 }
386
387 type livenessFuncCache struct {
388 be []blockEffects
389 livenessMap Map
390 }
391
392
393
394
395 func newliveness(fn *ir.Func, f *ssa.Func, vars []*ir.Name, idx map[*ir.Name]int32, stkptrsize int64) *liveness {
396 lv := &liveness{
397 fn: fn,
398 f: f,
399 vars: vars,
400 idx: idx,
401 stkptrsize: stkptrsize,
402 }
403
404
405
406
407 if lc, _ := f.Cache.Liveness.(*livenessFuncCache); lc == nil {
408
409 f.Cache.Liveness = new(livenessFuncCache)
410 } else {
411 if cap(lc.be) >= f.NumBlocks() {
412 lv.be = lc.be[:f.NumBlocks()]
413 }
414 lv.livenessMap = Map{
415 Vals: lc.livenessMap.Vals,
416 UnsafeVals: lc.livenessMap.UnsafeVals,
417 UnsafeBlocks: lc.livenessMap.UnsafeBlocks,
418 DeferReturn: objw.StackMapDontCare,
419 }
420 lc.livenessMap.Vals = nil
421 lc.livenessMap.UnsafeVals = nil
422 lc.livenessMap.UnsafeBlocks = nil
423 }
424 if lv.be == nil {
425 lv.be = make([]blockEffects, f.NumBlocks())
426 }
427
428 nblocks := int32(len(f.Blocks))
429 nvars := int32(len(vars))
430 bulk := bitvec.NewBulk(nvars, nblocks*7, fn.Pos())
431 for _, b := range f.Blocks {
432 be := lv.blockEffects(b)
433
434 be.uevar = bulk.Next()
435 be.varkill = bulk.Next()
436 be.livein = bulk.Next()
437 be.liveout = bulk.Next()
438 }
439 lv.livenessMap.reset()
440
441 lv.markUnsafePoints()
442
443 lv.partLiveArgs = make(map[*ir.Name]bool)
444
445 lv.enableClobber()
446
447 return lv
448 }
449
450 func (lv *liveness) blockEffects(b *ssa.Block) *blockEffects {
451 return &lv.be[b.ID]
452 }
453
454
455
456
457 func (lv *liveness) pointerMap(liveout bitvec.BitVec, vars []*ir.Name, args, locals bitvec.BitVec) {
458 var slotsSeen map[int64]*ir.Name
459 checkForDuplicateSlots := base.Debug.MergeLocals != 0
460 if checkForDuplicateSlots {
461 slotsSeen = make(map[int64]*ir.Name)
462 }
463 for i := int32(0); ; i++ {
464 i = liveout.Next(i)
465 if i < 0 {
466 break
467 }
468 node := vars[i]
469 switch node.Class {
470 case ir.PPARAM, ir.PPARAMOUT:
471 if !node.IsOutputParamInRegisters() {
472 if node.FrameOffset() < 0 {
473 lv.f.Fatalf("Node %v has frameoffset %d\n", node.Sym().Name, node.FrameOffset())
474 }
475 typebits.SetNoCheck(node.Type(), node.FrameOffset(), args)
476 break
477 }
478 fallthrough
479 case ir.PAUTO:
480 if checkForDuplicateSlots {
481 if prev, ok := slotsSeen[node.FrameOffset()]; ok {
482 base.FatalfAt(node.Pos(), "two vars live at pointerMap generation: %q and %q", prev.Sym().Name, node.Sym().Name)
483 }
484 slotsSeen[node.FrameOffset()] = node
485 }
486 typebits.Set(node.Type(), node.FrameOffset()+lv.stkptrsize, locals)
487 }
488 }
489 }
490
491
492
493 func IsUnsafe(f *ssa.Func) bool {
494
495
496
497
498
499
500
501
502
503 return base.Flag.CompilingRuntime || f.NoSplit
504 }
505
506
507 func (lv *liveness) markUnsafePoints() {
508 if IsUnsafe(lv.f) {
509
510 lv.allUnsafe = true
511 return
512 }
513
514 lv.unsafePoints = bitvec.New(int32(lv.f.NumValues()))
515 lv.unsafeBlocks = bitvec.New(int32(lv.f.NumBlocks()))
516
517
518 for _, b := range lv.f.Blocks {
519 for _, v := range b.Values {
520 if v.Op.UnsafePoint() {
521 lv.unsafePoints.Set(int32(v.ID))
522 }
523 }
524 }
525
526 for _, b := range lv.f.Blocks {
527 for _, v := range b.Values {
528 if v.Op != ssa.OpWBend {
529 continue
530 }
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547 m := v
548 for {
549 m = m.MemoryArg()
550 if m.Block != b {
551 lv.f.Fatalf("can't find Phi before write barrier end mark %v", v)
552 }
553 if m.Op == ssa.OpPhi {
554 break
555 }
556 }
557
558 if len(m.Args) != 2 {
559 lv.f.Fatalf("phi before write barrier end mark has %d args, want 2", len(m.Args))
560 }
561 c := b.Preds[0].Block()
562 d := b.Preds[1].Block()
563
564
565
566
567 var decisionBlock *ssa.Block
568 if len(c.Preds) == 1 && c.Preds[0].Block() == d {
569 decisionBlock = d
570 } else if len(d.Preds) == 1 && d.Preds[0].Block() == c {
571 decisionBlock = c
572 } else if len(c.Preds) == 1 && len(d.Preds) == 1 && c.Preds[0].Block() == d.Preds[0].Block() {
573 decisionBlock = c.Preds[0].Block()
574 } else {
575 lv.f.Fatalf("can't find write barrier pattern %v", v)
576 }
577 if len(decisionBlock.Succs) != 2 {
578 lv.f.Fatalf("common predecessor block the wrong type %s", decisionBlock.Kind)
579 }
580
581
582
583
584
585
586 var load *ssa.Value
587 v := decisionBlock.Controls[0]
588 for {
589 if v.MemoryArg() != nil {
590
591 if sym, ok := v.Aux.(*obj.LSym); ok && sym == ir.Syms.WriteBarrier {
592 load = v
593 break
594 }
595
596
597 if sym, ok := v.Args[0].Aux.(*obj.LSym); ok && sym == ir.Syms.WriteBarrier {
598 load = v
599 break
600 }
601 v.Fatalf("load of write barrier flag not from correct global: %s", v.LongString())
602 }
603
604 if len(v.Args) == 1 || len(v.Args) == 2 && v.Args[0] == v.Args[1] {
605
606 v = v.Args[0]
607 continue
608 }
609 v.Fatalf("write barrier control value has more than one argument: %s", v.LongString())
610 }
611
612
613 found := false
614 for _, v := range decisionBlock.Values {
615 if found {
616 lv.unsafePoints.Set(int32(v.ID))
617 }
618 found = found || v == load
619 }
620 lv.unsafeBlocks.Set(int32(decisionBlock.ID))
621
622
623 for _, e := range decisionBlock.Succs {
624 x := e.Block()
625 if x == b {
626 continue
627 }
628 for _, v := range x.Values {
629 lv.unsafePoints.Set(int32(v.ID))
630 }
631 lv.unsafeBlocks.Set(int32(x.ID))
632 }
633
634
635 for _, v := range b.Values {
636 if v.Op == ssa.OpWBend {
637 break
638 }
639 lv.unsafePoints.Set(int32(v.ID))
640 }
641 }
642 }
643 }
644
645
646
647
648
649
650 func (lv *liveness) hasStackMap(v *ssa.Value) bool {
651 if !v.Op.IsCall() {
652 return false
653 }
654
655
656
657 if sym, ok := v.Aux.(*ssa.AuxCall); ok && (sym.Fn == ir.Syms.WBZero || sym.Fn == ir.Syms.WBMove) {
658 return false
659 }
660 return true
661 }
662
663
664
665
666 func (lv *liveness) prologue() {
667 lv.initcache()
668
669 for _, b := range lv.f.Blocks {
670 be := lv.blockEffects(b)
671
672
673
674 for j := len(b.Values) - 1; j >= 0; j-- {
675 pos, e := lv.valueEffects(b.Values[j])
676 if e&varkill != 0 {
677 be.varkill.Set(pos)
678 be.uevar.Unset(pos)
679 }
680 if e&uevar != 0 {
681 be.uevar.Set(pos)
682 }
683 }
684 }
685 }
686
687
688 func (lv *liveness) solve() {
689
690
691 nvars := int32(len(lv.vars))
692 newlivein := bitvec.New(nvars)
693 newliveout := bitvec.New(nvars)
694
695
696 po := lv.f.Postorder()
697
698
699
700
701
702 for change := true; change; {
703 change = false
704 for _, b := range po {
705 be := lv.blockEffects(b)
706
707 newliveout.Clear()
708 switch b.Kind {
709 case ssa.BlockRet:
710 for _, pos := range lv.cache.retuevar {
711 newliveout.Set(pos)
712 }
713 case ssa.BlockRetJmp:
714 for _, pos := range lv.cache.tailuevar {
715 newliveout.Set(pos)
716 }
717 case ssa.BlockExit:
718
719 default:
720
721
722
723
724 newliveout.Copy(lv.blockEffects(b.Succs[0].Block()).livein)
725 for _, succ := range b.Succs[1:] {
726 newliveout.Or(newliveout, lv.blockEffects(succ.Block()).livein)
727 }
728 }
729
730 if !be.liveout.Eq(newliveout) {
731 change = true
732 be.liveout.Copy(newliveout)
733 }
734
735
736
737
738
739
740 newlivein.AndNot(be.liveout, be.varkill)
741 be.livein.Or(newlivein, be.uevar)
742 }
743 }
744 }
745
746
747
748 func (lv *liveness) epilogue() {
749 nvars := int32(len(lv.vars))
750 liveout := bitvec.New(nvars)
751 livedefer := bitvec.New(nvars)
752
753
754
755
756
757
758
759
760 if lv.fn.HasDefer() {
761 for i, n := range lv.vars {
762 if n.Class == ir.PPARAMOUT {
763 if n.IsOutputParamHeapAddr() {
764
765 base.Fatalf("variable %v both output param and heap output param", n)
766 }
767 if n.Heapaddr != nil {
768
769
770 continue
771 }
772
773 livedefer.Set(int32(i))
774 }
775 if n.IsOutputParamHeapAddr() {
776
777
778
779 n.SetNeedzero(true)
780 livedefer.Set(int32(i))
781 }
782 if n.OpenDeferSlot() {
783
784
785
786
787 livedefer.Set(int32(i))
788
789 if !n.Needzero() {
790 base.Fatalf("all pointer-containing defer arg slots should have Needzero set")
791 }
792 }
793 }
794 }
795
796
797
798
799 if lv.f.Entry != lv.f.Blocks[0] {
800 lv.f.Fatalf("entry block must be first")
801 }
802
803 {
804
805 live := bitvec.New(nvars)
806 lv.livevars = append(lv.livevars, live)
807 }
808
809 for _, b := range lv.f.Blocks {
810 be := lv.blockEffects(b)
811
812
813
814 for _, v := range b.Values {
815 if !lv.hasStackMap(v) {
816 continue
817 }
818
819 live := bitvec.New(nvars)
820 lv.livevars = append(lv.livevars, live)
821 }
822
823
824 index := int32(len(lv.livevars) - 1)
825
826 liveout.Copy(be.liveout)
827 for i := len(b.Values) - 1; i >= 0; i-- {
828 v := b.Values[i]
829
830 if lv.hasStackMap(v) {
831
832
833
834 live := &lv.livevars[index]
835 live.Or(*live, liveout)
836 live.Or(*live, livedefer)
837 index--
838 }
839
840
841 pos, e := lv.valueEffects(v)
842 if e&varkill != 0 {
843 liveout.Unset(pos)
844 }
845 if e&uevar != 0 {
846 liveout.Set(pos)
847 }
848 }
849
850 if b == lv.f.Entry {
851 if index != 0 {
852 base.Fatalf("bad index for entry point: %v", index)
853 }
854
855
856 for i, n := range lv.vars {
857 if !liveout.Get(int32(i)) {
858 continue
859 }
860 if n.Class == ir.PPARAM {
861 continue
862 }
863 base.FatalfAt(n.Pos(), "bad live variable at entry of %v: %L", lv.fn.Nname, n)
864 }
865
866
867 live := &lv.livevars[index]
868 live.Or(*live, liveout)
869 }
870
871 if lv.doClobber {
872 lv.clobber(b)
873 }
874
875
876 lv.compact(b)
877 }
878
879
880 if lv.fn.OpenCodedDeferDisallowed() {
881 lv.livenessMap.DeferReturn = objw.StackMapDontCare
882 } else {
883 idx, _ := lv.stackMapSet.add(livedefer)
884 lv.livenessMap.DeferReturn = objw.StackMapIndex(idx)
885 }
886
887
888 lv.stackMaps = lv.stackMapSet.extractUnique()
889 lv.stackMapSet = bvecSet{}
890
891
892
893
894 for j, n := range lv.vars {
895 if n.Class != ir.PPARAM && lv.stackMaps[0].Get(int32(j)) {
896 lv.f.Fatalf("%v %L recorded as live on entry", lv.fn.Nname, n)
897 }
898 }
899 }
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917 func (lv *liveness) compact(b *ssa.Block) {
918 pos := 0
919 if b == lv.f.Entry {
920
921 lv.stackMapSet.add(lv.livevars[0])
922 pos++
923 }
924 for _, v := range b.Values {
925 if lv.hasStackMap(v) {
926 idx, _ := lv.stackMapSet.add(lv.livevars[pos])
927 pos++
928 lv.livenessMap.set(v, objw.StackMapIndex(idx))
929 }
930 if lv.allUnsafe || v.Op != ssa.OpClobber && lv.unsafePoints.Get(int32(v.ID)) {
931 lv.livenessMap.setUnsafeVal(v)
932 }
933 }
934 if lv.allUnsafe || lv.unsafeBlocks.Get(int32(b.ID)) {
935 lv.livenessMap.setUnsafeBlock(b)
936 }
937
938
939 lv.livevars = lv.livevars[:0]
940 }
941
942 func (lv *liveness) enableClobber() {
943
944
945 if !base.Flag.ClobberDead {
946 return
947 }
948 if lv.fn.Pragma&ir.CgoUnsafeArgs != 0 {
949
950 return
951 }
952 if len(lv.vars) > 10000 || len(lv.f.Blocks) > 10000 {
953
954
955
956 return
957 }
958 if lv.f.Name == "forkAndExecInChild" {
959
960
961
962
963
964 return
965 }
966 if lv.f.Name == "wbBufFlush" ||
967 ((lv.f.Name == "callReflect" || lv.f.Name == "callMethod") && lv.fn.ABIWrapper()) {
968
969
970
971
972
973
974
975
976
977
978 lv.noClobberArgs = true
979 }
980 if h := os.Getenv("GOCLOBBERDEADHASH"); h != "" {
981
982
983 hstr := ""
984 for _, b := range hash.Sum20([]byte(lv.f.Name)) {
985 hstr += fmt.Sprintf("%08b", b)
986 }
987 if !strings.HasSuffix(hstr, h) {
988 return
989 }
990 fmt.Printf("\t\t\tCLOBBERDEAD %s\n", lv.f.Name)
991 }
992 lv.doClobber = true
993 }
994
995
996
997 func (lv *liveness) clobber(b *ssa.Block) {
998
999 oldSched := append([]*ssa.Value{}, b.Values...)
1000 b.Values = b.Values[:0]
1001 idx := 0
1002
1003
1004 if b == lv.f.Entry {
1005 for len(oldSched) > 0 && len(oldSched[0].Args) == 0 {
1006
1007
1008
1009
1010 b.Values = append(b.Values, oldSched[0])
1011 oldSched = oldSched[1:]
1012 }
1013 clobber(lv, b, lv.livevars[0])
1014 idx++
1015 }
1016
1017
1018 for _, v := range oldSched {
1019 if !lv.hasStackMap(v) {
1020 b.Values = append(b.Values, v)
1021 continue
1022 }
1023 clobber(lv, b, lv.livevars[idx])
1024 b.Values = append(b.Values, v)
1025 idx++
1026 }
1027 }
1028
1029
1030
1031
1032 func clobber(lv *liveness, b *ssa.Block, live bitvec.BitVec) {
1033 for i, n := range lv.vars {
1034 if !live.Get(int32(i)) && !n.Addrtaken() && !n.OpenDeferSlot() && !n.IsOutputParamHeapAddr() {
1035
1036
1037
1038
1039 if lv.noClobberArgs && n.Class == ir.PPARAM {
1040 continue
1041 }
1042 clobberVar(b, n)
1043 }
1044 }
1045 }
1046
1047
1048
1049 func clobberVar(b *ssa.Block, v *ir.Name) {
1050 clobberWalk(b, v, 0, v.Type())
1051 }
1052
1053
1054
1055
1056
1057 func clobberWalk(b *ssa.Block, v *ir.Name, offset int64, t *types.Type) {
1058 if !t.HasPointers() {
1059 return
1060 }
1061 switch t.Kind() {
1062 case types.TPTR,
1063 types.TUNSAFEPTR,
1064 types.TFUNC,
1065 types.TCHAN,
1066 types.TMAP:
1067 clobberPtr(b, v, offset)
1068
1069 case types.TSTRING:
1070
1071 clobberPtr(b, v, offset)
1072
1073 case types.TINTER:
1074
1075
1076
1077 clobberPtr(b, v, offset)
1078 clobberPtr(b, v, offset+int64(types.PtrSize))
1079
1080 case types.TSLICE:
1081
1082 clobberPtr(b, v, offset)
1083
1084 case types.TARRAY:
1085 for i := int64(0); i < t.NumElem(); i++ {
1086 clobberWalk(b, v, offset+i*t.Elem().Size(), t.Elem())
1087 }
1088
1089 case types.TSTRUCT:
1090 for _, t1 := range t.Fields() {
1091 clobberWalk(b, v, offset+t1.Offset, t1.Type)
1092 }
1093
1094 default:
1095 base.Fatalf("clobberWalk: unexpected type, %v", t)
1096 }
1097 }
1098
1099
1100
1101 func clobberPtr(b *ssa.Block, v *ir.Name, offset int64) {
1102 b.NewValue0IA(src.NoXPos, ssa.OpClobber, types.TypeVoid, offset, v)
1103 }
1104
1105 func (lv *liveness) showlive(v *ssa.Value, live bitvec.BitVec) {
1106 if base.Flag.Live == 0 || ir.FuncName(lv.fn) == "init" || strings.HasPrefix(ir.FuncName(lv.fn), ".") {
1107 return
1108 }
1109 if lv.fn.Wrapper() || lv.fn.Dupok() {
1110
1111 return
1112 }
1113 if !(v == nil || v.Op.IsCall()) {
1114
1115
1116 return
1117 }
1118 if live.IsEmpty() {
1119 return
1120 }
1121
1122 pos := lv.fn.Nname.Pos()
1123 if v != nil {
1124 pos = v.Pos
1125 }
1126
1127 s := "live at "
1128 if v == nil {
1129 s += fmt.Sprintf("entry to %s:", ir.FuncName(lv.fn))
1130 } else if sym, ok := v.Aux.(*ssa.AuxCall); ok && sym.Fn != nil {
1131 fn := sym.Fn.Name
1132 if pos := strings.Index(fn, "."); pos >= 0 {
1133 fn = fn[pos+1:]
1134 }
1135 s += fmt.Sprintf("call to %s:", fn)
1136 } else {
1137 s += "indirect call:"
1138 }
1139
1140
1141
1142 var names []string
1143 for j, n := range lv.vars {
1144 if live.Get(int32(j)) {
1145 names = append(names, n.Sym().Name)
1146 }
1147 }
1148 sort.Strings(names)
1149 for _, v := range names {
1150 s += " " + v
1151 }
1152
1153 base.WarnfAt(pos, "%s", s)
1154 }
1155
1156 func (lv *liveness) printbvec(printed bool, name string, live bitvec.BitVec) bool {
1157 if live.IsEmpty() {
1158 return printed
1159 }
1160
1161 if !printed {
1162 fmt.Printf("\t")
1163 } else {
1164 fmt.Printf(" ")
1165 }
1166 fmt.Printf("%s=", name)
1167
1168 comma := ""
1169 for i, n := range lv.vars {
1170 if !live.Get(int32(i)) {
1171 continue
1172 }
1173 fmt.Printf("%s%s", comma, n.Sym().Name)
1174 comma = ","
1175 }
1176 return true
1177 }
1178
1179
1180 func (lv *liveness) printeffect(printed bool, name string, pos int32, x bool) bool {
1181 if !x {
1182 return printed
1183 }
1184 if !printed {
1185 fmt.Printf("\t")
1186 } else {
1187 fmt.Printf(" ")
1188 }
1189 fmt.Printf("%s=", name)
1190 if x {
1191 fmt.Printf("%s", lv.vars[pos].Sym().Name)
1192 }
1193
1194 return true
1195 }
1196
1197
1198
1199
1200 func (lv *liveness) printDebug() {
1201 fmt.Printf("liveness: %s\n", ir.FuncName(lv.fn))
1202
1203 for i, b := range lv.f.Blocks {
1204 if i > 0 {
1205 fmt.Printf("\n")
1206 }
1207
1208
1209 fmt.Printf("bb#%d pred=", b.ID)
1210 for j, pred := range b.Preds {
1211 if j > 0 {
1212 fmt.Printf(",")
1213 }
1214 fmt.Printf("%d", pred.Block().ID)
1215 }
1216 fmt.Printf(" succ=")
1217 for j, succ := range b.Succs {
1218 if j > 0 {
1219 fmt.Printf(",")
1220 }
1221 fmt.Printf("%d", succ.Block().ID)
1222 }
1223 fmt.Printf("\n")
1224
1225 be := lv.blockEffects(b)
1226
1227
1228 printed := false
1229 printed = lv.printbvec(printed, "uevar", be.uevar)
1230 printed = lv.printbvec(printed, "livein", be.livein)
1231 if printed {
1232 fmt.Printf("\n")
1233 }
1234
1235
1236
1237 if b == lv.f.Entry {
1238 live := lv.stackMaps[0]
1239 fmt.Printf("(%s) function entry\n", base.FmtPos(lv.fn.Nname.Pos()))
1240 fmt.Printf("\tlive=")
1241 printed = false
1242 for j, n := range lv.vars {
1243 if !live.Get(int32(j)) {
1244 continue
1245 }
1246 if printed {
1247 fmt.Printf(",")
1248 }
1249 fmt.Printf("%v", n)
1250 printed = true
1251 }
1252 fmt.Printf("\n")
1253 }
1254
1255 for _, v := range b.Values {
1256 fmt.Printf("(%s) %v\n", base.FmtPos(v.Pos), v.LongString())
1257
1258 pcdata := lv.livenessMap.Get(v)
1259
1260 pos, effect := lv.valueEffects(v)
1261 printed = false
1262 printed = lv.printeffect(printed, "uevar", pos, effect&uevar != 0)
1263 printed = lv.printeffect(printed, "varkill", pos, effect&varkill != 0)
1264 if printed {
1265 fmt.Printf("\n")
1266 }
1267
1268 if pcdata.StackMapValid() {
1269 fmt.Printf("\tlive=")
1270 printed = false
1271 if pcdata.StackMapValid() {
1272 live := lv.stackMaps[pcdata]
1273 for j, n := range lv.vars {
1274 if !live.Get(int32(j)) {
1275 continue
1276 }
1277 if printed {
1278 fmt.Printf(",")
1279 }
1280 fmt.Printf("%v", n)
1281 printed = true
1282 }
1283 }
1284 fmt.Printf("\n")
1285 }
1286
1287 if lv.livenessMap.GetUnsafe(v) {
1288 fmt.Printf("\tunsafe-point\n")
1289 }
1290 }
1291 if lv.livenessMap.GetUnsafeBlock(b) {
1292 fmt.Printf("\tunsafe-block\n")
1293 }
1294
1295
1296 fmt.Printf("end\n")
1297 printed = false
1298 printed = lv.printbvec(printed, "varkill", be.varkill)
1299 printed = lv.printbvec(printed, "liveout", be.liveout)
1300 if printed {
1301 fmt.Printf("\n")
1302 }
1303 }
1304
1305 fmt.Printf("\n")
1306 }
1307
1308
1309
1310
1311
1312 func (lv *liveness) emit() (argsSym, liveSym *obj.LSym) {
1313
1314
1315
1316 var maxArgNode *ir.Name
1317 for _, n := range lv.vars {
1318 switch n.Class {
1319 case ir.PPARAM, ir.PPARAMOUT:
1320 if !n.IsOutputParamInRegisters() {
1321 if maxArgNode == nil || n.FrameOffset() > maxArgNode.FrameOffset() {
1322 maxArgNode = n
1323 }
1324 }
1325 }
1326 }
1327
1328 var maxArgs int64
1329 if maxArgNode != nil {
1330 maxArgs = maxArgNode.FrameOffset() + types.PtrDataSize(maxArgNode.Type())
1331 }
1332
1333
1334
1335
1336
1337
1338
1339
1340 maxLocals := lv.stkptrsize
1341
1342
1343 var argsSymTmp, liveSymTmp obj.LSym
1344
1345 args := bitvec.New(int32(maxArgs / int64(types.PtrSize)))
1346 aoff := objw.Uint32(&argsSymTmp, 0, uint32(len(lv.stackMaps)))
1347 aoff = objw.Uint32(&argsSymTmp, aoff, uint32(args.N))
1348
1349 locals := bitvec.New(int32(maxLocals / int64(types.PtrSize)))
1350 loff := objw.Uint32(&liveSymTmp, 0, uint32(len(lv.stackMaps)))
1351 loff = objw.Uint32(&liveSymTmp, loff, uint32(locals.N))
1352
1353 for _, live := range lv.stackMaps {
1354 args.Clear()
1355 locals.Clear()
1356
1357 lv.pointerMap(live, lv.vars, args, locals)
1358
1359 aoff = objw.BitVec(&argsSymTmp, aoff, args)
1360 loff = objw.BitVec(&liveSymTmp, loff, locals)
1361 }
1362
1363
1364
1365 return base.Ctxt.GCLocalsSym(argsSymTmp.P), base.Ctxt.GCLocalsSym(liveSymTmp.P)
1366 }
1367
1368
1369
1370
1371
1372
1373 func Compute(curfn *ir.Func, f *ssa.Func, stkptrsize int64, pp *objw.Progs) (Map, map[*ir.Name]bool) {
1374
1375 vars, idx := getvariables(curfn)
1376 lv := newliveness(curfn, f, vars, idx, stkptrsize)
1377
1378
1379 lv.prologue()
1380 lv.solve()
1381 lv.epilogue()
1382 if base.Flag.Live > 0 {
1383 lv.showlive(nil, lv.stackMaps[0])
1384 for _, b := range f.Blocks {
1385 for _, val := range b.Values {
1386 if idx := lv.livenessMap.Get(val); idx.StackMapValid() {
1387 lv.showlive(val, lv.stackMaps[idx])
1388 }
1389 }
1390 }
1391 }
1392 if base.Flag.Live >= 2 {
1393 lv.printDebug()
1394 }
1395
1396
1397 {
1398 cache := f.Cache.Liveness.(*livenessFuncCache)
1399 if cap(lv.be) < 2000 {
1400 for i := range lv.be {
1401 lv.be[i] = blockEffects{}
1402 }
1403 cache.be = lv.be
1404 }
1405 if len(lv.livenessMap.Vals) < 2000 {
1406 cache.livenessMap = lv.livenessMap
1407 }
1408 }
1409
1410
1411 ls := curfn.LSym
1412 fninfo := ls.Func()
1413 fninfo.GCArgs, fninfo.GCLocals = lv.emit()
1414
1415 p := pp.Prog(obj.AFUNCDATA)
1416 p.From.SetConst(rtabi.FUNCDATA_ArgsPointerMaps)
1417 p.To.Type = obj.TYPE_MEM
1418 p.To.Name = obj.NAME_EXTERN
1419 p.To.Sym = fninfo.GCArgs
1420
1421 p = pp.Prog(obj.AFUNCDATA)
1422 p.From.SetConst(rtabi.FUNCDATA_LocalsPointerMaps)
1423 p.To.Type = obj.TYPE_MEM
1424 p.To.Name = obj.NAME_EXTERN
1425 p.To.Sym = fninfo.GCLocals
1426
1427 if x := lv.emitStackObjects(); x != nil {
1428 p := pp.Prog(obj.AFUNCDATA)
1429 p.From.SetConst(rtabi.FUNCDATA_StackObjects)
1430 p.To.Type = obj.TYPE_MEM
1431 p.To.Name = obj.NAME_EXTERN
1432 p.To.Sym = x
1433 }
1434
1435 return lv.livenessMap, lv.partLiveArgs
1436 }
1437
1438 func (lv *liveness) emitStackObjects() *obj.LSym {
1439 var vars []*ir.Name
1440 for _, n := range lv.fn.Dcl {
1441 if shouldTrack(n) && n.Addrtaken() && n.Esc() != ir.EscHeap {
1442 vars = append(vars, n)
1443 }
1444 }
1445 if len(vars) == 0 {
1446 return nil
1447 }
1448
1449
1450 slices.SortFunc(vars, func(a, b *ir.Name) int { return cmp.Compare(a.FrameOffset(), b.FrameOffset()) })
1451
1452
1453
1454 x := base.Ctxt.Lookup(lv.fn.LSym.Name + ".stkobj")
1455 x.Set(obj.AttrContentAddressable, true)
1456 lv.fn.LSym.Func().StackObjects = x
1457 off := 0
1458 off = objw.Uintptr(x, off, uint64(len(vars)))
1459 for _, v := range vars {
1460
1461
1462
1463
1464
1465 frameOffset := v.FrameOffset()
1466 if frameOffset != int64(int32(frameOffset)) {
1467 base.Fatalf("frame offset too big: %v %d", v, frameOffset)
1468 }
1469 off = objw.Uint32(x, off, uint32(frameOffset))
1470
1471 t := v.Type()
1472 sz := t.Size()
1473 if sz != int64(int32(sz)) {
1474 base.Fatalf("stack object too big: %v of type %v, size %d", v, t, sz)
1475 }
1476 lsym, useGCProg, ptrdata := reflectdata.GCSym(t)
1477 if useGCProg {
1478 ptrdata = -ptrdata
1479 }
1480 off = objw.Uint32(x, off, uint32(sz))
1481 off = objw.Uint32(x, off, uint32(ptrdata))
1482 off = objw.SymPtrOff(x, off, lsym)
1483 }
1484
1485 if base.Flag.Live != 0 {
1486 for _, v := range vars {
1487 base.WarnfAt(v.Pos(), "stack object %v %v", v, v.Type())
1488 }
1489 }
1490
1491 return x
1492 }
1493
1494
1495
1496
1497
1498
1499
1500
1501
1502
1503
1504
1505
1506
1507 func isfat(t *types.Type) bool {
1508 if t != nil {
1509 switch t.Kind() {
1510 case types.TSLICE, types.TSTRING,
1511 types.TINTER:
1512 return true
1513 case types.TARRAY:
1514
1515 if t.NumElem() == 1 {
1516 return isfat(t.Elem())
1517 }
1518 return true
1519 case types.TSTRUCT:
1520
1521 if t.NumFields() == 1 {
1522 return isfat(t.Field(0).Type)
1523 }
1524 return true
1525 }
1526 }
1527
1528 return false
1529 }
1530
1531
1532
1533
1534 func WriteFuncMap(fn *ir.Func, abiInfo *abi.ABIParamResultInfo) {
1535 if ir.FuncName(fn) == "_" {
1536 return
1537 }
1538 nptr := int(abiInfo.ArgWidth() / int64(types.PtrSize))
1539 bv := bitvec.New(int32(nptr))
1540
1541 for _, p := range abiInfo.InParams() {
1542 typebits.SetNoCheck(p.Type, p.FrameOffset(abiInfo), bv)
1543 }
1544
1545 nbitmap := 1
1546 if fn.Type().NumResults() > 0 {
1547 nbitmap = 2
1548 }
1549 lsym := base.Ctxt.Lookup(fn.LSym.Name + ".args_stackmap")
1550 lsym.Set(obj.AttrLinkname, true)
1551 off := objw.Uint32(lsym, 0, uint32(nbitmap))
1552 off = objw.Uint32(lsym, off, uint32(bv.N))
1553 off = objw.BitVec(lsym, off, bv)
1554
1555 if fn.Type().NumResults() > 0 {
1556 for _, p := range abiInfo.OutParams() {
1557 if len(p.Registers) == 0 {
1558 typebits.SetNoCheck(p.Type, p.FrameOffset(abiInfo), bv)
1559 }
1560 }
1561 off = objw.BitVec(lsym, off, bv)
1562 }
1563
1564 objw.Global(lsym, int32(off), obj.RODATA|obj.LOCAL)
1565 }
1566
View as plain text