1
2
3
4
5
6
7
8
9
10
11
12
13
14
15 package liveness
16
17 import (
18 "fmt"
19 "os"
20 "sort"
21 "strings"
22
23 "cmd/compile/internal/abi"
24 "cmd/compile/internal/base"
25 "cmd/compile/internal/bitvec"
26 "cmd/compile/internal/ir"
27 "cmd/compile/internal/objw"
28 "cmd/compile/internal/reflectdata"
29 "cmd/compile/internal/ssa"
30 "cmd/compile/internal/typebits"
31 "cmd/compile/internal/types"
32 "cmd/internal/notsha256"
33 "cmd/internal/obj"
34 "cmd/internal/src"
35
36 rtabi "internal/abi"
37 )
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86 type blockEffects struct {
87
88
89
90
91
92 uevar bitvec.BitVec
93 varkill bitvec.BitVec
94
95
96
97
98
99 livein bitvec.BitVec
100 liveout bitvec.BitVec
101 }
102
103
104 type liveness struct {
105 fn *ir.Func
106 f *ssa.Func
107 vars []*ir.Name
108 idx map[*ir.Name]int32
109 stkptrsize int64
110
111 be []blockEffects
112
113
114
115 allUnsafe bool
116
117
118 unsafePoints bitvec.BitVec
119
120
121
122 unsafeBlocks bitvec.BitVec
123
124
125
126
127
128
129 livevars []bitvec.BitVec
130
131
132
133 livenessMap Map
134 stackMapSet bvecSet
135 stackMaps []bitvec.BitVec
136
137 cache progeffectscache
138
139
140
141
142 partLiveArgs map[*ir.Name]bool
143
144 doClobber bool
145 noClobberArgs bool
146
147
148
149
150 conservativeWrites bool
151 }
152
153
154
155
156 type Map struct {
157 Vals map[ssa.ID]objw.StackMapIndex
158 UnsafeVals map[ssa.ID]bool
159 UnsafeBlocks map[ssa.ID]bool
160
161
162 DeferReturn objw.StackMapIndex
163 }
164
165 func (m *Map) reset() {
166 if m.Vals == nil {
167 m.Vals = make(map[ssa.ID]objw.StackMapIndex)
168 m.UnsafeVals = make(map[ssa.ID]bool)
169 m.UnsafeBlocks = make(map[ssa.ID]bool)
170 } else {
171 for k := range m.Vals {
172 delete(m.Vals, k)
173 }
174 for k := range m.UnsafeVals {
175 delete(m.UnsafeVals, k)
176 }
177 for k := range m.UnsafeBlocks {
178 delete(m.UnsafeBlocks, k)
179 }
180 }
181 m.DeferReturn = objw.StackMapDontCare
182 }
183
184 func (m *Map) set(v *ssa.Value, i objw.StackMapIndex) {
185 m.Vals[v.ID] = i
186 }
187 func (m *Map) setUnsafeVal(v *ssa.Value) {
188 m.UnsafeVals[v.ID] = true
189 }
190 func (m *Map) setUnsafeBlock(b *ssa.Block) {
191 m.UnsafeBlocks[b.ID] = true
192 }
193
194 func (m Map) Get(v *ssa.Value) objw.StackMapIndex {
195
196 if idx, ok := m.Vals[v.ID]; ok {
197 return idx
198 }
199 return objw.StackMapDontCare
200 }
201 func (m Map) GetUnsafe(v *ssa.Value) bool {
202
203 return m.UnsafeVals[v.ID]
204 }
205 func (m Map) GetUnsafeBlock(b *ssa.Block) bool {
206
207 return m.UnsafeBlocks[b.ID]
208 }
209
210 type progeffectscache struct {
211 retuevar []int32
212 tailuevar []int32
213 initialized bool
214 }
215
216
217
218
219
220
221
222 func shouldTrack(n *ir.Name) bool {
223 return (n.Class == ir.PAUTO && n.Esc() != ir.EscHeap || n.Class == ir.PPARAM || n.Class == ir.PPARAMOUT) && n.Type().HasPointers()
224 }
225
226
227
228 func getvariables(fn *ir.Func) ([]*ir.Name, map[*ir.Name]int32) {
229 var vars []*ir.Name
230 for _, n := range fn.Dcl {
231 if shouldTrack(n) {
232 vars = append(vars, n)
233 }
234 }
235 idx := make(map[*ir.Name]int32, len(vars))
236 for i, n := range vars {
237 idx[n] = int32(i)
238 }
239 return vars, idx
240 }
241
242 func (lv *liveness) initcache() {
243 if lv.cache.initialized {
244 base.Fatalf("liveness cache initialized twice")
245 return
246 }
247 lv.cache.initialized = true
248
249 for i, node := range lv.vars {
250 switch node.Class {
251 case ir.PPARAM:
252
253
254
255
256
257
258 lv.cache.tailuevar = append(lv.cache.tailuevar, int32(i))
259
260 case ir.PPARAMOUT:
261
262
263
264 lv.cache.retuevar = append(lv.cache.retuevar, int32(i))
265 }
266 }
267 }
268
269
270
271
272
273
274
275
276
277
278 type liveEffect int
279
280 const (
281 uevar liveEffect = 1 << iota
282 varkill
283 )
284
285
286
287
288 func (lv *liveness) valueEffects(v *ssa.Value) (int32, liveEffect) {
289 n, e := affectedVar(v)
290 if e == 0 || n == nil {
291 return -1, 0
292 }
293
294
295
296
297 switch v.Op {
298 case ssa.OpVarDef, ssa.OpVarLive, ssa.OpKeepAlive:
299 if !n.Used() {
300 return -1, 0
301 }
302 }
303
304 if n.Class == ir.PPARAM && !n.Addrtaken() && n.Type().Size() > int64(types.PtrSize) {
305
306
307 lv.partLiveArgs[n] = true
308 }
309
310 var effect liveEffect
311
312
313
314
315
316
317 if e&(ssa.SymRead|ssa.SymAddr) != 0 {
318 effect |= uevar
319 }
320 if e&ssa.SymWrite != 0 {
321 if !isfat(n.Type()) || v.Op == ssa.OpVarDef {
322 effect |= varkill
323 } else if lv.conservativeWrites {
324 effect |= uevar
325 }
326 }
327
328 if effect == 0 {
329 return -1, 0
330 }
331
332 if pos, ok := lv.idx[n]; ok {
333 return pos, effect
334 }
335 return -1, 0
336 }
337
338
339 func affectedVar(v *ssa.Value) (*ir.Name, ssa.SymEffect) {
340
341 switch v.Op {
342 case ssa.OpLoadReg:
343 n, _ := ssa.AutoVar(v.Args[0])
344 return n, ssa.SymRead
345 case ssa.OpStoreReg:
346 n, _ := ssa.AutoVar(v)
347 return n, ssa.SymWrite
348
349 case ssa.OpArgIntReg:
350
351
352
353
354
355
356
357
358
359
360
361
362 n, _ := ssa.AutoVar(v)
363 return n, ssa.SymRead
364
365 case ssa.OpVarLive:
366 return v.Aux.(*ir.Name), ssa.SymRead
367 case ssa.OpVarDef:
368 return v.Aux.(*ir.Name), ssa.SymWrite
369 case ssa.OpKeepAlive:
370 n, _ := ssa.AutoVar(v.Args[0])
371 return n, ssa.SymRead
372 }
373
374 e := v.Op.SymEffect()
375 if e == 0 {
376 return nil, 0
377 }
378
379 switch a := v.Aux.(type) {
380 case nil, *obj.LSym:
381
382 return nil, e
383 case *ir.Name:
384 return a, e
385 default:
386 base.Fatalf("weird aux: %s", v.LongString())
387 return nil, e
388 }
389 }
390
391 type livenessFuncCache struct {
392 be []blockEffects
393 livenessMap Map
394 }
395
396
397
398
399 func newliveness(fn *ir.Func, f *ssa.Func, vars []*ir.Name, idx map[*ir.Name]int32, stkptrsize int64) *liveness {
400 lv := &liveness{
401 fn: fn,
402 f: f,
403 vars: vars,
404 idx: idx,
405 stkptrsize: stkptrsize,
406 }
407
408
409
410
411 if lc, _ := f.Cache.Liveness.(*livenessFuncCache); lc == nil {
412
413 f.Cache.Liveness = new(livenessFuncCache)
414 } else {
415 if cap(lc.be) >= f.NumBlocks() {
416 lv.be = lc.be[:f.NumBlocks()]
417 }
418 lv.livenessMap = Map{
419 Vals: lc.livenessMap.Vals,
420 UnsafeVals: lc.livenessMap.UnsafeVals,
421 UnsafeBlocks: lc.livenessMap.UnsafeBlocks,
422 DeferReturn: objw.StackMapDontCare,
423 }
424 lc.livenessMap.Vals = nil
425 lc.livenessMap.UnsafeVals = nil
426 lc.livenessMap.UnsafeBlocks = nil
427 }
428 if lv.be == nil {
429 lv.be = make([]blockEffects, f.NumBlocks())
430 }
431
432 nblocks := int32(len(f.Blocks))
433 nvars := int32(len(vars))
434 bulk := bitvec.NewBulk(nvars, nblocks*7)
435 for _, b := range f.Blocks {
436 be := lv.blockEffects(b)
437
438 be.uevar = bulk.Next()
439 be.varkill = bulk.Next()
440 be.livein = bulk.Next()
441 be.liveout = bulk.Next()
442 }
443 lv.livenessMap.reset()
444
445 lv.markUnsafePoints()
446
447 lv.partLiveArgs = make(map[*ir.Name]bool)
448
449 lv.enableClobber()
450
451 return lv
452 }
453
454 func (lv *liveness) blockEffects(b *ssa.Block) *blockEffects {
455 return &lv.be[b.ID]
456 }
457
458
459
460
461 func (lv *liveness) pointerMap(liveout bitvec.BitVec, vars []*ir.Name, args, locals bitvec.BitVec) {
462 var slotsSeen map[int64]*ir.Name
463 checkForDuplicateSlots := base.Debug.MergeLocals != 0
464 if checkForDuplicateSlots {
465 slotsSeen = make(map[int64]*ir.Name)
466 }
467 for i := int32(0); ; i++ {
468 i = liveout.Next(i)
469 if i < 0 {
470 break
471 }
472 node := vars[i]
473 switch node.Class {
474 case ir.PPARAM, ir.PPARAMOUT:
475 if !node.IsOutputParamInRegisters() {
476 if node.FrameOffset() < 0 {
477 lv.f.Fatalf("Node %v has frameoffset %d\n", node.Sym().Name, node.FrameOffset())
478 }
479 typebits.SetNoCheck(node.Type(), node.FrameOffset(), args)
480 break
481 }
482 fallthrough
483 case ir.PAUTO:
484 if checkForDuplicateSlots {
485 if prev, ok := slotsSeen[node.FrameOffset()]; ok {
486 base.FatalfAt(node.Pos(), "two vars live at pointerMap generation: %q and %q", prev.Sym().Name, node.Sym().Name)
487 }
488 slotsSeen[node.FrameOffset()] = node
489 }
490 typebits.Set(node.Type(), node.FrameOffset()+lv.stkptrsize, locals)
491 }
492 }
493 }
494
495
496
497 func IsUnsafe(f *ssa.Func) bool {
498
499
500
501
502
503
504
505
506
507 return base.Flag.CompilingRuntime || f.NoSplit
508 }
509
510
511 func (lv *liveness) markUnsafePoints() {
512 if IsUnsafe(lv.f) {
513
514 lv.allUnsafe = true
515 return
516 }
517
518 lv.unsafePoints = bitvec.New(int32(lv.f.NumValues()))
519 lv.unsafeBlocks = bitvec.New(int32(lv.f.NumBlocks()))
520
521
522 for _, b := range lv.f.Blocks {
523 for _, v := range b.Values {
524 if v.Op.UnsafePoint() {
525 lv.unsafePoints.Set(int32(v.ID))
526 }
527 }
528 }
529
530 for _, b := range lv.f.Blocks {
531 for _, v := range b.Values {
532 if v.Op != ssa.OpWBend {
533 continue
534 }
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551 m := v
552 for {
553 m = m.MemoryArg()
554 if m.Block != b {
555 lv.f.Fatalf("can't find Phi before write barrier end mark %v", v)
556 }
557 if m.Op == ssa.OpPhi {
558 break
559 }
560 }
561
562 if len(m.Args) != 2 {
563 lv.f.Fatalf("phi before write barrier end mark has %d args, want 2", len(m.Args))
564 }
565 c := b.Preds[0].Block()
566 d := b.Preds[1].Block()
567
568
569
570
571 var decisionBlock *ssa.Block
572 if len(c.Preds) == 1 && c.Preds[0].Block() == d {
573 decisionBlock = d
574 } else if len(d.Preds) == 1 && d.Preds[0].Block() == c {
575 decisionBlock = c
576 } else if len(c.Preds) == 1 && len(d.Preds) == 1 && c.Preds[0].Block() == d.Preds[0].Block() {
577 decisionBlock = c.Preds[0].Block()
578 } else {
579 lv.f.Fatalf("can't find write barrier pattern %v", v)
580 }
581 if len(decisionBlock.Succs) != 2 {
582 lv.f.Fatalf("common predecessor block the wrong type %s", decisionBlock.Kind)
583 }
584
585
586
587
588
589
590 var load *ssa.Value
591 v := decisionBlock.Controls[0]
592 for {
593 if v.MemoryArg() != nil {
594
595 if sym, ok := v.Aux.(*obj.LSym); ok && sym == ir.Syms.WriteBarrier {
596 load = v
597 break
598 }
599
600
601 if sym, ok := v.Args[0].Aux.(*obj.LSym); ok && sym == ir.Syms.WriteBarrier {
602 load = v
603 break
604 }
605 v.Fatalf("load of write barrier flag not from correct global: %s", v.LongString())
606 }
607
608 if len(v.Args) == 1 || len(v.Args) == 2 && v.Args[0] == v.Args[1] {
609
610 v = v.Args[0]
611 continue
612 }
613 v.Fatalf("write barrier control value has more than one argument: %s", v.LongString())
614 }
615
616
617 found := false
618 for _, v := range decisionBlock.Values {
619 if found {
620 lv.unsafePoints.Set(int32(v.ID))
621 }
622 found = found || v == load
623 }
624 lv.unsafeBlocks.Set(int32(decisionBlock.ID))
625
626
627 for _, e := range decisionBlock.Succs {
628 x := e.Block()
629 if x == b {
630 continue
631 }
632 for _, v := range x.Values {
633 lv.unsafePoints.Set(int32(v.ID))
634 }
635 lv.unsafeBlocks.Set(int32(x.ID))
636 }
637
638
639 for _, v := range b.Values {
640 if v.Op == ssa.OpWBend {
641 break
642 }
643 lv.unsafePoints.Set(int32(v.ID))
644 }
645 }
646 }
647 }
648
649
650
651
652
653
654 func (lv *liveness) hasStackMap(v *ssa.Value) bool {
655 if !v.Op.IsCall() {
656 return false
657 }
658
659
660
661 if sym, ok := v.Aux.(*ssa.AuxCall); ok && (sym.Fn == ir.Syms.WBZero || sym.Fn == ir.Syms.WBMove) {
662 return false
663 }
664 return true
665 }
666
667
668
669
670 func (lv *liveness) prologue() {
671 lv.initcache()
672
673 for _, b := range lv.f.Blocks {
674 be := lv.blockEffects(b)
675
676
677
678 for j := len(b.Values) - 1; j >= 0; j-- {
679 pos, e := lv.valueEffects(b.Values[j])
680 if e&varkill != 0 {
681 be.varkill.Set(pos)
682 be.uevar.Unset(pos)
683 }
684 if e&uevar != 0 {
685 be.uevar.Set(pos)
686 }
687 }
688 }
689 }
690
691
692 func (lv *liveness) solve() {
693
694
695 nvars := int32(len(lv.vars))
696 newlivein := bitvec.New(nvars)
697 newliveout := bitvec.New(nvars)
698
699
700 po := lv.f.Postorder()
701
702
703
704
705
706 for change := true; change; {
707 change = false
708 for _, b := range po {
709 be := lv.blockEffects(b)
710
711 newliveout.Clear()
712 switch b.Kind {
713 case ssa.BlockRet:
714 for _, pos := range lv.cache.retuevar {
715 newliveout.Set(pos)
716 }
717 case ssa.BlockRetJmp:
718 for _, pos := range lv.cache.tailuevar {
719 newliveout.Set(pos)
720 }
721 case ssa.BlockExit:
722
723 default:
724
725
726
727
728 newliveout.Copy(lv.blockEffects(b.Succs[0].Block()).livein)
729 for _, succ := range b.Succs[1:] {
730 newliveout.Or(newliveout, lv.blockEffects(succ.Block()).livein)
731 }
732 }
733
734 if !be.liveout.Eq(newliveout) {
735 change = true
736 be.liveout.Copy(newliveout)
737 }
738
739
740
741
742
743
744 newlivein.AndNot(be.liveout, be.varkill)
745 be.livein.Or(newlivein, be.uevar)
746 }
747 }
748 }
749
750
751
752 func (lv *liveness) epilogue() {
753 nvars := int32(len(lv.vars))
754 liveout := bitvec.New(nvars)
755 livedefer := bitvec.New(nvars)
756
757
758
759
760
761
762
763
764 if lv.fn.HasDefer() {
765 for i, n := range lv.vars {
766 if n.Class == ir.PPARAMOUT {
767 if n.IsOutputParamHeapAddr() {
768
769 base.Fatalf("variable %v both output param and heap output param", n)
770 }
771 if n.Heapaddr != nil {
772
773
774 continue
775 }
776
777 livedefer.Set(int32(i))
778 }
779 if n.IsOutputParamHeapAddr() {
780
781
782
783 n.SetNeedzero(true)
784 livedefer.Set(int32(i))
785 }
786 if n.OpenDeferSlot() {
787
788
789
790
791 livedefer.Set(int32(i))
792
793 if !n.Needzero() {
794 base.Fatalf("all pointer-containing defer arg slots should have Needzero set")
795 }
796 }
797 }
798 }
799
800
801
802
803 if lv.f.Entry != lv.f.Blocks[0] {
804 lv.f.Fatalf("entry block must be first")
805 }
806
807 {
808
809 live := bitvec.New(nvars)
810 lv.livevars = append(lv.livevars, live)
811 }
812
813 for _, b := range lv.f.Blocks {
814 be := lv.blockEffects(b)
815
816
817
818 for _, v := range b.Values {
819 if !lv.hasStackMap(v) {
820 continue
821 }
822
823 live := bitvec.New(nvars)
824 lv.livevars = append(lv.livevars, live)
825 }
826
827
828 index := int32(len(lv.livevars) - 1)
829
830 liveout.Copy(be.liveout)
831 for i := len(b.Values) - 1; i >= 0; i-- {
832 v := b.Values[i]
833
834 if lv.hasStackMap(v) {
835
836
837
838 live := &lv.livevars[index]
839 live.Or(*live, liveout)
840 live.Or(*live, livedefer)
841 index--
842 }
843
844
845 pos, e := lv.valueEffects(v)
846 if e&varkill != 0 {
847 liveout.Unset(pos)
848 }
849 if e&uevar != 0 {
850 liveout.Set(pos)
851 }
852 }
853
854 if b == lv.f.Entry {
855 if index != 0 {
856 base.Fatalf("bad index for entry point: %v", index)
857 }
858
859
860 for i, n := range lv.vars {
861 if !liveout.Get(int32(i)) {
862 continue
863 }
864 if n.Class == ir.PPARAM {
865 continue
866 }
867 base.FatalfAt(n.Pos(), "bad live variable at entry of %v: %L", lv.fn.Nname, n)
868 }
869
870
871 live := &lv.livevars[index]
872 live.Or(*live, liveout)
873 }
874
875 if lv.doClobber {
876 lv.clobber(b)
877 }
878
879
880 lv.compact(b)
881 }
882
883
884 if lv.fn.OpenCodedDeferDisallowed() {
885 lv.livenessMap.DeferReturn = objw.StackMapDontCare
886 } else {
887 idx, _ := lv.stackMapSet.add(livedefer)
888 lv.livenessMap.DeferReturn = objw.StackMapIndex(idx)
889 }
890
891
892 lv.stackMaps = lv.stackMapSet.extractUnique()
893 lv.stackMapSet = bvecSet{}
894
895
896
897
898 for j, n := range lv.vars {
899 if n.Class != ir.PPARAM && lv.stackMaps[0].Get(int32(j)) {
900 lv.f.Fatalf("%v %L recorded as live on entry", lv.fn.Nname, n)
901 }
902 }
903 }
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921 func (lv *liveness) compact(b *ssa.Block) {
922 pos := 0
923 if b == lv.f.Entry {
924
925 lv.stackMapSet.add(lv.livevars[0])
926 pos++
927 }
928 for _, v := range b.Values {
929 if lv.hasStackMap(v) {
930 idx, _ := lv.stackMapSet.add(lv.livevars[pos])
931 pos++
932 lv.livenessMap.set(v, objw.StackMapIndex(idx))
933 }
934 if lv.allUnsafe || v.Op != ssa.OpClobber && lv.unsafePoints.Get(int32(v.ID)) {
935 lv.livenessMap.setUnsafeVal(v)
936 }
937 }
938 if lv.allUnsafe || lv.unsafeBlocks.Get(int32(b.ID)) {
939 lv.livenessMap.setUnsafeBlock(b)
940 }
941
942
943 lv.livevars = lv.livevars[:0]
944 }
945
946 func (lv *liveness) enableClobber() {
947
948
949 if !base.Flag.ClobberDead {
950 return
951 }
952 if lv.fn.Pragma&ir.CgoUnsafeArgs != 0 {
953
954 return
955 }
956 if len(lv.vars) > 10000 || len(lv.f.Blocks) > 10000 {
957
958
959
960 return
961 }
962 if lv.f.Name == "forkAndExecInChild" {
963
964
965
966
967
968 return
969 }
970 if lv.f.Name == "wbBufFlush" ||
971 ((lv.f.Name == "callReflect" || lv.f.Name == "callMethod") && lv.fn.ABIWrapper()) {
972
973
974
975
976
977
978
979
980
981
982 lv.noClobberArgs = true
983 }
984 if h := os.Getenv("GOCLOBBERDEADHASH"); h != "" {
985
986
987 hstr := ""
988 for _, b := range notsha256.Sum256([]byte(lv.f.Name)) {
989 hstr += fmt.Sprintf("%08b", b)
990 }
991 if !strings.HasSuffix(hstr, h) {
992 return
993 }
994 fmt.Printf("\t\t\tCLOBBERDEAD %s\n", lv.f.Name)
995 }
996 lv.doClobber = true
997 }
998
999
1000
1001 func (lv *liveness) clobber(b *ssa.Block) {
1002
1003 oldSched := append([]*ssa.Value{}, b.Values...)
1004 b.Values = b.Values[:0]
1005 idx := 0
1006
1007
1008 if b == lv.f.Entry {
1009 for len(oldSched) > 0 && len(oldSched[0].Args) == 0 {
1010
1011
1012
1013
1014 b.Values = append(b.Values, oldSched[0])
1015 oldSched = oldSched[1:]
1016 }
1017 clobber(lv, b, lv.livevars[0])
1018 idx++
1019 }
1020
1021
1022 for _, v := range oldSched {
1023 if !lv.hasStackMap(v) {
1024 b.Values = append(b.Values, v)
1025 continue
1026 }
1027 clobber(lv, b, lv.livevars[idx])
1028 b.Values = append(b.Values, v)
1029 idx++
1030 }
1031 }
1032
1033
1034
1035
1036 func clobber(lv *liveness, b *ssa.Block, live bitvec.BitVec) {
1037 for i, n := range lv.vars {
1038 if !live.Get(int32(i)) && !n.Addrtaken() && !n.OpenDeferSlot() && !n.IsOutputParamHeapAddr() {
1039
1040
1041
1042
1043 if lv.noClobberArgs && n.Class == ir.PPARAM {
1044 continue
1045 }
1046 clobberVar(b, n)
1047 }
1048 }
1049 }
1050
1051
1052
1053 func clobberVar(b *ssa.Block, v *ir.Name) {
1054 clobberWalk(b, v, 0, v.Type())
1055 }
1056
1057
1058
1059
1060
1061 func clobberWalk(b *ssa.Block, v *ir.Name, offset int64, t *types.Type) {
1062 if !t.HasPointers() {
1063 return
1064 }
1065 switch t.Kind() {
1066 case types.TPTR,
1067 types.TUNSAFEPTR,
1068 types.TFUNC,
1069 types.TCHAN,
1070 types.TMAP:
1071 clobberPtr(b, v, offset)
1072
1073 case types.TSTRING:
1074
1075 clobberPtr(b, v, offset)
1076
1077 case types.TINTER:
1078
1079
1080
1081 clobberPtr(b, v, offset)
1082 clobberPtr(b, v, offset+int64(types.PtrSize))
1083
1084 case types.TSLICE:
1085
1086 clobberPtr(b, v, offset)
1087
1088 case types.TARRAY:
1089 for i := int64(0); i < t.NumElem(); i++ {
1090 clobberWalk(b, v, offset+i*t.Elem().Size(), t.Elem())
1091 }
1092
1093 case types.TSTRUCT:
1094 for _, t1 := range t.Fields() {
1095 clobberWalk(b, v, offset+t1.Offset, t1.Type)
1096 }
1097
1098 default:
1099 base.Fatalf("clobberWalk: unexpected type, %v", t)
1100 }
1101 }
1102
1103
1104
1105 func clobberPtr(b *ssa.Block, v *ir.Name, offset int64) {
1106 b.NewValue0IA(src.NoXPos, ssa.OpClobber, types.TypeVoid, offset, v)
1107 }
1108
1109 func (lv *liveness) showlive(v *ssa.Value, live bitvec.BitVec) {
1110 if base.Flag.Live == 0 || ir.FuncName(lv.fn) == "init" || strings.HasPrefix(ir.FuncName(lv.fn), ".") {
1111 return
1112 }
1113 if lv.fn.Wrapper() || lv.fn.Dupok() {
1114
1115 return
1116 }
1117 if !(v == nil || v.Op.IsCall()) {
1118
1119
1120 return
1121 }
1122 if live.IsEmpty() {
1123 return
1124 }
1125
1126 pos := lv.fn.Nname.Pos()
1127 if v != nil {
1128 pos = v.Pos
1129 }
1130
1131 s := "live at "
1132 if v == nil {
1133 s += fmt.Sprintf("entry to %s:", ir.FuncName(lv.fn))
1134 } else if sym, ok := v.Aux.(*ssa.AuxCall); ok && sym.Fn != nil {
1135 fn := sym.Fn.Name
1136 if pos := strings.Index(fn, "."); pos >= 0 {
1137 fn = fn[pos+1:]
1138 }
1139 s += fmt.Sprintf("call to %s:", fn)
1140 } else {
1141 s += "indirect call:"
1142 }
1143
1144
1145
1146 var names []string
1147 for j, n := range lv.vars {
1148 if live.Get(int32(j)) {
1149 names = append(names, n.Sym().Name)
1150 }
1151 }
1152 sort.Strings(names)
1153 for _, v := range names {
1154 s += " " + v
1155 }
1156
1157 base.WarnfAt(pos, s)
1158 }
1159
1160 func (lv *liveness) printbvec(printed bool, name string, live bitvec.BitVec) bool {
1161 if live.IsEmpty() {
1162 return printed
1163 }
1164
1165 if !printed {
1166 fmt.Printf("\t")
1167 } else {
1168 fmt.Printf(" ")
1169 }
1170 fmt.Printf("%s=", name)
1171
1172 comma := ""
1173 for i, n := range lv.vars {
1174 if !live.Get(int32(i)) {
1175 continue
1176 }
1177 fmt.Printf("%s%s", comma, n.Sym().Name)
1178 comma = ","
1179 }
1180 return true
1181 }
1182
1183
1184 func (lv *liveness) printeffect(printed bool, name string, pos int32, x bool) bool {
1185 if !x {
1186 return printed
1187 }
1188 if !printed {
1189 fmt.Printf("\t")
1190 } else {
1191 fmt.Printf(" ")
1192 }
1193 fmt.Printf("%s=", name)
1194 if x {
1195 fmt.Printf("%s", lv.vars[pos].Sym().Name)
1196 }
1197
1198 return true
1199 }
1200
1201
1202
1203
1204 func (lv *liveness) printDebug() {
1205 fmt.Printf("liveness: %s\n", ir.FuncName(lv.fn))
1206
1207 for i, b := range lv.f.Blocks {
1208 if i > 0 {
1209 fmt.Printf("\n")
1210 }
1211
1212
1213 fmt.Printf("bb#%d pred=", b.ID)
1214 for j, pred := range b.Preds {
1215 if j > 0 {
1216 fmt.Printf(",")
1217 }
1218 fmt.Printf("%d", pred.Block().ID)
1219 }
1220 fmt.Printf(" succ=")
1221 for j, succ := range b.Succs {
1222 if j > 0 {
1223 fmt.Printf(",")
1224 }
1225 fmt.Printf("%d", succ.Block().ID)
1226 }
1227 fmt.Printf("\n")
1228
1229 be := lv.blockEffects(b)
1230
1231
1232 printed := false
1233 printed = lv.printbvec(printed, "uevar", be.uevar)
1234 printed = lv.printbvec(printed, "livein", be.livein)
1235 if printed {
1236 fmt.Printf("\n")
1237 }
1238
1239
1240
1241 if b == lv.f.Entry {
1242 live := lv.stackMaps[0]
1243 fmt.Printf("(%s) function entry\n", base.FmtPos(lv.fn.Nname.Pos()))
1244 fmt.Printf("\tlive=")
1245 printed = false
1246 for j, n := range lv.vars {
1247 if !live.Get(int32(j)) {
1248 continue
1249 }
1250 if printed {
1251 fmt.Printf(",")
1252 }
1253 fmt.Printf("%v", n)
1254 printed = true
1255 }
1256 fmt.Printf("\n")
1257 }
1258
1259 for _, v := range b.Values {
1260 fmt.Printf("(%s) %v\n", base.FmtPos(v.Pos), v.LongString())
1261
1262 pcdata := lv.livenessMap.Get(v)
1263
1264 pos, effect := lv.valueEffects(v)
1265 printed = false
1266 printed = lv.printeffect(printed, "uevar", pos, effect&uevar != 0)
1267 printed = lv.printeffect(printed, "varkill", pos, effect&varkill != 0)
1268 if printed {
1269 fmt.Printf("\n")
1270 }
1271
1272 if pcdata.StackMapValid() {
1273 fmt.Printf("\tlive=")
1274 printed = false
1275 if pcdata.StackMapValid() {
1276 live := lv.stackMaps[pcdata]
1277 for j, n := range lv.vars {
1278 if !live.Get(int32(j)) {
1279 continue
1280 }
1281 if printed {
1282 fmt.Printf(",")
1283 }
1284 fmt.Printf("%v", n)
1285 printed = true
1286 }
1287 }
1288 fmt.Printf("\n")
1289 }
1290
1291 if lv.livenessMap.GetUnsafe(v) {
1292 fmt.Printf("\tunsafe-point\n")
1293 }
1294 }
1295 if lv.livenessMap.GetUnsafeBlock(b) {
1296 fmt.Printf("\tunsafe-block\n")
1297 }
1298
1299
1300 fmt.Printf("end\n")
1301 printed = false
1302 printed = lv.printbvec(printed, "varkill", be.varkill)
1303 printed = lv.printbvec(printed, "liveout", be.liveout)
1304 if printed {
1305 fmt.Printf("\n")
1306 }
1307 }
1308
1309 fmt.Printf("\n")
1310 }
1311
1312
1313
1314
1315
1316 func (lv *liveness) emit() (argsSym, liveSym *obj.LSym) {
1317
1318
1319
1320 var maxArgNode *ir.Name
1321 for _, n := range lv.vars {
1322 switch n.Class {
1323 case ir.PPARAM, ir.PPARAMOUT:
1324 if !n.IsOutputParamInRegisters() {
1325 if maxArgNode == nil || n.FrameOffset() > maxArgNode.FrameOffset() {
1326 maxArgNode = n
1327 }
1328 }
1329 }
1330 }
1331
1332 var maxArgs int64
1333 if maxArgNode != nil {
1334 maxArgs = maxArgNode.FrameOffset() + types.PtrDataSize(maxArgNode.Type())
1335 }
1336
1337
1338
1339
1340
1341
1342
1343
1344 maxLocals := lv.stkptrsize
1345
1346
1347 var argsSymTmp, liveSymTmp obj.LSym
1348
1349 args := bitvec.New(int32(maxArgs / int64(types.PtrSize)))
1350 aoff := objw.Uint32(&argsSymTmp, 0, uint32(len(lv.stackMaps)))
1351 aoff = objw.Uint32(&argsSymTmp, aoff, uint32(args.N))
1352
1353 locals := bitvec.New(int32(maxLocals / int64(types.PtrSize)))
1354 loff := objw.Uint32(&liveSymTmp, 0, uint32(len(lv.stackMaps)))
1355 loff = objw.Uint32(&liveSymTmp, loff, uint32(locals.N))
1356
1357 for _, live := range lv.stackMaps {
1358 args.Clear()
1359 locals.Clear()
1360
1361 lv.pointerMap(live, lv.vars, args, locals)
1362
1363 aoff = objw.BitVec(&argsSymTmp, aoff, args)
1364 loff = objw.BitVec(&liveSymTmp, loff, locals)
1365 }
1366
1367
1368
1369 return base.Ctxt.GCLocalsSym(argsSymTmp.P), base.Ctxt.GCLocalsSym(liveSymTmp.P)
1370 }
1371
1372
1373
1374
1375
1376
1377 func Compute(curfn *ir.Func, f *ssa.Func, stkptrsize int64, pp *objw.Progs) (Map, map[*ir.Name]bool) {
1378
1379 vars, idx := getvariables(curfn)
1380 lv := newliveness(curfn, f, vars, idx, stkptrsize)
1381
1382
1383 lv.prologue()
1384 lv.solve()
1385 lv.epilogue()
1386 if base.Flag.Live > 0 {
1387 lv.showlive(nil, lv.stackMaps[0])
1388 for _, b := range f.Blocks {
1389 for _, val := range b.Values {
1390 if idx := lv.livenessMap.Get(val); idx.StackMapValid() {
1391 lv.showlive(val, lv.stackMaps[idx])
1392 }
1393 }
1394 }
1395 }
1396 if base.Flag.Live >= 2 {
1397 lv.printDebug()
1398 }
1399
1400
1401 {
1402 cache := f.Cache.Liveness.(*livenessFuncCache)
1403 if cap(lv.be) < 2000 {
1404 for i := range lv.be {
1405 lv.be[i] = blockEffects{}
1406 }
1407 cache.be = lv.be
1408 }
1409 if len(lv.livenessMap.Vals) < 2000 {
1410 cache.livenessMap = lv.livenessMap
1411 }
1412 }
1413
1414
1415 ls := curfn.LSym
1416 fninfo := ls.Func()
1417 fninfo.GCArgs, fninfo.GCLocals = lv.emit()
1418
1419 p := pp.Prog(obj.AFUNCDATA)
1420 p.From.SetConst(rtabi.FUNCDATA_ArgsPointerMaps)
1421 p.To.Type = obj.TYPE_MEM
1422 p.To.Name = obj.NAME_EXTERN
1423 p.To.Sym = fninfo.GCArgs
1424
1425 p = pp.Prog(obj.AFUNCDATA)
1426 p.From.SetConst(rtabi.FUNCDATA_LocalsPointerMaps)
1427 p.To.Type = obj.TYPE_MEM
1428 p.To.Name = obj.NAME_EXTERN
1429 p.To.Sym = fninfo.GCLocals
1430
1431 if x := lv.emitStackObjects(); x != nil {
1432 p := pp.Prog(obj.AFUNCDATA)
1433 p.From.SetConst(rtabi.FUNCDATA_StackObjects)
1434 p.To.Type = obj.TYPE_MEM
1435 p.To.Name = obj.NAME_EXTERN
1436 p.To.Sym = x
1437 }
1438
1439 return lv.livenessMap, lv.partLiveArgs
1440 }
1441
1442 func (lv *liveness) emitStackObjects() *obj.LSym {
1443 var vars []*ir.Name
1444 for _, n := range lv.fn.Dcl {
1445 if shouldTrack(n) && n.Addrtaken() && n.Esc() != ir.EscHeap {
1446 vars = append(vars, n)
1447 }
1448 }
1449 if len(vars) == 0 {
1450 return nil
1451 }
1452
1453
1454 sort.Slice(vars, func(i, j int) bool { return vars[i].FrameOffset() < vars[j].FrameOffset() })
1455
1456
1457
1458 x := base.Ctxt.Lookup(lv.fn.LSym.Name + ".stkobj")
1459 x.Set(obj.AttrContentAddressable, true)
1460 lv.fn.LSym.Func().StackObjects = x
1461 off := 0
1462 off = objw.Uintptr(x, off, uint64(len(vars)))
1463 for _, v := range vars {
1464
1465
1466
1467
1468
1469 frameOffset := v.FrameOffset()
1470 if frameOffset != int64(int32(frameOffset)) {
1471 base.Fatalf("frame offset too big: %v %d", v, frameOffset)
1472 }
1473 off = objw.Uint32(x, off, uint32(frameOffset))
1474
1475 t := v.Type()
1476 sz := t.Size()
1477 if sz != int64(int32(sz)) {
1478 base.Fatalf("stack object too big: %v of type %v, size %d", v, t, sz)
1479 }
1480 lsym, useGCProg, ptrdata := reflectdata.GCSym(t)
1481 if useGCProg {
1482 ptrdata = -ptrdata
1483 }
1484 off = objw.Uint32(x, off, uint32(sz))
1485 off = objw.Uint32(x, off, uint32(ptrdata))
1486 off = objw.SymPtrOff(x, off, lsym)
1487 }
1488
1489 if base.Flag.Live != 0 {
1490 for _, v := range vars {
1491 base.WarnfAt(v.Pos(), "stack object %v %v", v, v.Type())
1492 }
1493 }
1494
1495 return x
1496 }
1497
1498
1499
1500
1501
1502
1503
1504
1505
1506
1507
1508
1509
1510
1511 func isfat(t *types.Type) bool {
1512 if t != nil {
1513 switch t.Kind() {
1514 case types.TSLICE, types.TSTRING,
1515 types.TINTER:
1516 return true
1517 case types.TARRAY:
1518
1519 if t.NumElem() == 1 {
1520 return isfat(t.Elem())
1521 }
1522 return true
1523 case types.TSTRUCT:
1524
1525 if t.NumFields() == 1 {
1526 return isfat(t.Field(0).Type)
1527 }
1528 return true
1529 }
1530 }
1531
1532 return false
1533 }
1534
1535
1536
1537
1538 func WriteFuncMap(fn *ir.Func, abiInfo *abi.ABIParamResultInfo) {
1539 if ir.FuncName(fn) == "_" {
1540 return
1541 }
1542 nptr := int(abiInfo.ArgWidth() / int64(types.PtrSize))
1543 bv := bitvec.New(int32(nptr))
1544
1545 for _, p := range abiInfo.InParams() {
1546 typebits.SetNoCheck(p.Type, p.FrameOffset(abiInfo), bv)
1547 }
1548
1549 nbitmap := 1
1550 if fn.Type().NumResults() > 0 {
1551 nbitmap = 2
1552 }
1553 lsym := base.Ctxt.Lookup(fn.LSym.Name + ".args_stackmap")
1554 lsym.Set(obj.AttrLinkname, true)
1555 off := objw.Uint32(lsym, 0, uint32(nbitmap))
1556 off = objw.Uint32(lsym, off, uint32(bv.N))
1557 off = objw.BitVec(lsym, off, bv)
1558
1559 if fn.Type().NumResults() > 0 {
1560 for _, p := range abiInfo.OutParams() {
1561 if len(p.Registers) == 0 {
1562 typebits.SetNoCheck(p.Type, p.FrameOffset(abiInfo), bv)
1563 }
1564 }
1565 off = objw.BitVec(lsym, off, bv)
1566 }
1567
1568 objw.Global(lsym, int32(off), obj.RODATA|obj.LOCAL)
1569 }
1570
View as plain text