1
2
3
4
5 package ssa
6
7 import (
8 "cmd/compile/internal/abi"
9 "cmd/compile/internal/base"
10 "cmd/compile/internal/ir"
11 "cmd/compile/internal/types"
12 "cmd/internal/src"
13 "fmt"
14 )
15
16 func postExpandCallsDecompose(f *Func) {
17 decomposeUser(f)
18 decomposeBuiltin(f)
19 }
20
21 func expandCalls(f *Func) {
22
23
24
25
26
27
28 sp, _ := f.spSb()
29
30 x := &expandState{
31 f: f,
32 debug: f.pass.debug,
33 regSize: f.Config.RegSize,
34 sp: sp,
35 typs: &f.Config.Types,
36 wideSelects: make(map[*Value]*Value),
37 commonArgs: make(map[selKey]*Value),
38 commonSelectors: make(map[selKey]*Value),
39 memForCall: make(map[ID]*Value),
40 }
41
42
43 if f.Config.BigEndian {
44 x.firstOp = OpInt64Hi
45 x.secondOp = OpInt64Lo
46 x.firstType = x.typs.Int32
47 x.secondType = x.typs.UInt32
48 } else {
49 x.firstOp = OpInt64Lo
50 x.secondOp = OpInt64Hi
51 x.firstType = x.typs.UInt32
52 x.secondType = x.typs.Int32
53 }
54
55
56 var selects []*Value
57 var calls []*Value
58 var args []*Value
59 var exitBlocks []*Block
60
61 var m0 *Value
62
63
64
65
66
67 for _, b := range f.Blocks {
68 for _, v := range b.Values {
69 switch v.Op {
70 case OpInitMem:
71 m0 = v
72
73 case OpClosureLECall, OpInterLECall, OpStaticLECall, OpTailLECall:
74 calls = append(calls, v)
75
76 case OpArg:
77 args = append(args, v)
78
79 case OpStore:
80 if a := v.Args[1]; a.Op == OpSelectN && !CanSSA(a.Type) {
81 if a.Uses > 1 {
82 panic(fmt.Errorf("Saw double use of wide SelectN %s operand of Store %s",
83 a.LongString(), v.LongString()))
84 }
85 x.wideSelects[a] = v
86 }
87
88 case OpSelectN:
89 if v.Type == types.TypeMem {
90
91 call := v.Args[0]
92 aux := call.Aux.(*AuxCall)
93 mem := x.memForCall[call.ID]
94 if mem == nil {
95 v.AuxInt = int64(aux.abiInfo.OutRegistersUsed())
96 x.memForCall[call.ID] = v
97 } else {
98 panic(fmt.Errorf("Saw two memories for call %v, %v and %v", call, mem, v))
99 }
100 } else {
101 selects = append(selects, v)
102 }
103
104 case OpSelectNAddr:
105 call := v.Args[0]
106 which := v.AuxInt
107 aux := call.Aux.(*AuxCall)
108 pt := v.Type
109 off := x.offsetFrom(x.f.Entry, x.sp, aux.OffsetOfResult(which), pt)
110 v.copyOf(off)
111 }
112 }
113
114
115
116 if isBlockMultiValueExit(b) {
117 exitBlocks = append(exitBlocks, b)
118 }
119 }
120
121
122 for _, v := range args {
123 var rc registerCursor
124 a := x.prAssignForArg(v)
125 aux := x.f.OwnAux
126 regs := a.Registers
127 var offset int64
128 if len(regs) == 0 {
129 offset = a.FrameOffset(aux.abiInfo)
130 }
131 auxBase := x.offsetFrom(x.f.Entry, x.sp, offset, types.NewPtr(v.Type))
132 rc.init(regs, aux.abiInfo, nil, auxBase, 0)
133 x.rewriteSelectOrArg(f.Entry.Pos, f.Entry, v, v, m0, v.Type, rc)
134 }
135
136
137 for _, v := range selects {
138 if v.Op == OpInvalid {
139 continue
140 }
141
142 call := v.Args[0]
143 aux := call.Aux.(*AuxCall)
144 mem := x.memForCall[call.ID]
145 if mem == nil {
146 mem = call.Block.NewValue1I(call.Pos, OpSelectN, types.TypeMem, int64(aux.abiInfo.OutRegistersUsed()), call)
147 x.memForCall[call.ID] = mem
148 }
149
150 i := v.AuxInt
151 regs := aux.RegsOfResult(i)
152
153
154 if store := x.wideSelects[v]; store != nil {
155
156 storeAddr := store.Args[0]
157 mem := store.Args[2]
158 if len(regs) > 0 {
159
160 var rc registerCursor
161 rc.init(regs, aux.abiInfo, nil, storeAddr, 0)
162 mem = x.rewriteWideSelectToStores(call.Pos, call.Block, v, mem, v.Type, rc)
163 store.copyOf(mem)
164 } else {
165
166 offset := aux.OffsetOfResult(i)
167 auxBase := x.offsetFrom(x.f.Entry, x.sp, offset, types.NewPtr(v.Type))
168
169
170 move := store.Block.NewValue3A(store.Pos, OpMove, types.TypeMem, v.Type, storeAddr, auxBase, mem)
171 move.AuxInt = v.Type.Size()
172 store.copyOf(move)
173 }
174 continue
175 }
176
177 var auxBase *Value
178 if len(regs) == 0 {
179 offset := aux.OffsetOfResult(i)
180 auxBase = x.offsetFrom(x.f.Entry, x.sp, offset, types.NewPtr(v.Type))
181 }
182 var rc registerCursor
183 rc.init(regs, aux.abiInfo, nil, auxBase, 0)
184 x.rewriteSelectOrArg(call.Pos, call.Block, v, v, mem, v.Type, rc)
185 }
186
187 rewriteCall := func(v *Value, newOp Op, argStart int) {
188
189 x.rewriteCallArgs(v, argStart)
190 v.Op = newOp
191 rts := abi.RegisterTypes(v.Aux.(*AuxCall).abiInfo.OutParams())
192 v.Type = types.NewResults(append(rts, types.TypeMem))
193 }
194
195
196 for _, v := range calls {
197 switch v.Op {
198 case OpStaticLECall:
199 rewriteCall(v, OpStaticCall, 0)
200 case OpTailLECall:
201 rewriteCall(v, OpTailCall, 0)
202 case OpClosureLECall:
203 rewriteCall(v, OpClosureCall, 2)
204 case OpInterLECall:
205 rewriteCall(v, OpInterCall, 1)
206 }
207 }
208
209
210 for _, b := range exitBlocks {
211 v := b.Controls[0]
212 x.rewriteFuncResults(v, b, f.OwnAux)
213 b.SetControl(v)
214 }
215
216 }
217
218 func (x *expandState) rewriteFuncResults(v *Value, b *Block, aux *AuxCall) {
219
220
221
222
223
224 m0 := v.MemoryArg()
225 mem := m0
226
227 allResults := []*Value{}
228 var oldArgs []*Value
229 argsWithoutMem := v.Args[:len(v.Args)-1]
230
231 for j, a := range argsWithoutMem {
232 oldArgs = append(oldArgs, a)
233 i := int64(j)
234 auxType := aux.TypeOfResult(i)
235 auxBase := b.NewValue2A(v.Pos, OpLocalAddr, types.NewPtr(auxType), aux.NameOfResult(i), x.sp, mem)
236 auxOffset := int64(0)
237 aRegs := aux.RegsOfResult(int64(j))
238 if a.Op == OpDereference {
239 a.Op = OpLoad
240 }
241 var rc registerCursor
242 var result *[]*Value
243 if len(aRegs) > 0 {
244 result = &allResults
245 } else {
246 if a.Op == OpLoad && a.Args[0].Op == OpLocalAddr && a.Args[0].Aux == aux.NameOfResult(i) {
247 continue
248 }
249 }
250 rc.init(aRegs, aux.abiInfo, result, auxBase, auxOffset)
251 mem = x.decomposeAsNecessary(v.Pos, b, a, mem, rc)
252 }
253 v.resetArgs()
254 v.AddArgs(allResults...)
255 v.AddArg(mem)
256 for _, a := range oldArgs {
257 if a.Uses == 0 {
258 if x.debug > 1 {
259 x.Printf("...marking %v unused\n", a.LongString())
260 }
261 x.invalidateRecursively(a)
262 }
263 }
264 v.Type = types.NewResults(append(abi.RegisterTypes(aux.abiInfo.OutParams()), types.TypeMem))
265 return
266 }
267
268 func (x *expandState) rewriteCallArgs(v *Value, firstArg int) {
269 if x.debug > 1 {
270 x.indent(3)
271 defer x.indent(-3)
272 x.Printf("rewriteCallArgs(%s; %d)\n", v.LongString(), firstArg)
273 }
274
275 aux := v.Aux.(*AuxCall)
276 m0 := v.MemoryArg()
277 mem := m0
278 allResults := []*Value{}
279 oldArgs := []*Value{}
280 argsWithoutMem := v.Args[firstArg : len(v.Args)-1]
281
282 sp := x.sp
283 if v.Op == OpTailLECall {
284
285
286 sp = v.Block.NewValue1(src.NoXPos, OpGetCallerSP, x.typs.Uintptr, mem)
287 }
288
289 for i, a := range argsWithoutMem {
290 oldArgs = append(oldArgs, a)
291 auxI := int64(i)
292 aRegs := aux.RegsOfArg(auxI)
293 aType := aux.TypeOfArg(auxI)
294
295 if a.Op == OpDereference {
296 a.Op = OpLoad
297 }
298 var rc registerCursor
299 var result *[]*Value
300 var aOffset int64
301 if len(aRegs) > 0 {
302 result = &allResults
303 } else {
304 aOffset = aux.OffsetOfArg(auxI)
305 }
306 if v.Op == OpTailLECall && a.Op == OpArg && a.AuxInt == 0 {
307
308
309 n := a.Aux.(*ir.Name)
310 if n.Class == ir.PPARAM && n.FrameOffset()+x.f.Config.ctxt.Arch.FixedFrameSize == aOffset {
311 continue
312 }
313 }
314 if x.debug > 1 {
315 x.Printf("...storeArg %s, %v, %d\n", a.LongString(), aType, aOffset)
316 }
317
318 rc.init(aRegs, aux.abiInfo, result, sp, aOffset)
319 mem = x.decomposeAsNecessary(v.Pos, v.Block, a, mem, rc)
320 }
321 var preArgStore [2]*Value
322 preArgs := append(preArgStore[:0], v.Args[0:firstArg]...)
323 v.resetArgs()
324 v.AddArgs(preArgs...)
325 v.AddArgs(allResults...)
326 v.AddArg(mem)
327 for _, a := range oldArgs {
328 if a.Uses == 0 {
329 x.invalidateRecursively(a)
330 }
331 }
332
333 return
334 }
335
336 func (x *expandState) decomposePair(pos src.XPos, b *Block, a, mem *Value, t0, t1 *types.Type, o0, o1 Op, rc *registerCursor) *Value {
337 e := b.NewValue1(pos, o0, t0, a)
338 pos = pos.WithNotStmt()
339 mem = x.decomposeAsNecessary(pos, b, e, mem, rc.next(t0))
340 e = b.NewValue1(pos, o1, t1, a)
341 mem = x.decomposeAsNecessary(pos, b, e, mem, rc.next(t1))
342 return mem
343 }
344
345 func (x *expandState) decomposeOne(pos src.XPos, b *Block, a, mem *Value, t0 *types.Type, o0 Op, rc *registerCursor) *Value {
346 e := b.NewValue1(pos, o0, t0, a)
347 pos = pos.WithNotStmt()
348 mem = x.decomposeAsNecessary(pos, b, e, mem, rc.next(t0))
349 return mem
350 }
351
352
353
354
355
356
357
358
359
360 func (x *expandState) decomposeAsNecessary(pos src.XPos, b *Block, a, m0 *Value, rc registerCursor) *Value {
361 if x.debug > 1 {
362 x.indent(3)
363 defer x.indent(-3)
364 }
365 at := a.Type
366 if at.Size() == 0 {
367 return m0
368 }
369 if a.Op == OpDereference {
370 a.Op = OpLoad
371 }
372
373 if !rc.hasRegs() && !CanSSA(at) {
374 dst := x.offsetFrom(b, rc.storeDest, rc.storeOffset, types.NewPtr(at))
375 if x.debug > 1 {
376 x.Printf("...recur store %s at %s\n", a.LongString(), dst.LongString())
377 }
378 if a.Op == OpLoad {
379 m0 = b.NewValue3A(pos, OpMove, types.TypeMem, at, dst, a.Args[0], m0)
380 m0.AuxInt = at.Size()
381 return m0
382 } else {
383 panic(fmt.Errorf("Store of not a load"))
384 }
385 }
386
387 mem := m0
388 switch at.Kind() {
389 case types.TARRAY:
390 et := at.Elem()
391 for i := int64(0); i < at.NumElem(); i++ {
392 e := b.NewValue1I(pos, OpArraySelect, et, i, a)
393 pos = pos.WithNotStmt()
394 mem = x.decomposeAsNecessary(pos, b, e, mem, rc.next(et))
395 }
396 return mem
397
398 case types.TSTRUCT:
399 if at.IsSIMD() {
400 break
401 }
402 for i := 0; i < at.NumFields(); i++ {
403 et := at.Field(i).Type
404 e := b.NewValue1I(pos, OpStructSelect, et, int64(i), a)
405 pos = pos.WithNotStmt()
406 if x.debug > 1 {
407 x.Printf("...recur decompose %s, %v\n", e.LongString(), et)
408 }
409 mem = x.decomposeAsNecessary(pos, b, e, mem, rc.next(et))
410 }
411 return mem
412
413 case types.TSLICE:
414 mem = x.decomposeOne(pos, b, a, mem, at.Elem().PtrTo(), OpSlicePtr, &rc)
415 pos = pos.WithNotStmt()
416 mem = x.decomposeOne(pos, b, a, mem, x.typs.Int, OpSliceLen, &rc)
417 return x.decomposeOne(pos, b, a, mem, x.typs.Int, OpSliceCap, &rc)
418
419 case types.TSTRING:
420 return x.decomposePair(pos, b, a, mem, x.typs.BytePtr, x.typs.Int, OpStringPtr, OpStringLen, &rc)
421
422 case types.TINTER:
423 mem = x.decomposeOne(pos, b, a, mem, x.typs.Uintptr, OpITab, &rc)
424 pos = pos.WithNotStmt()
425
426 if a.Op == OpIMake {
427 data := a.Args[1]
428 for data.Op == OpStructMake || data.Op == OpArrayMake1 {
429
430
431 for _, a := range data.Args {
432 if a.Type.Size() > 0 {
433 data = a
434 break
435 }
436 }
437 }
438 return x.decomposeAsNecessary(pos, b, data, mem, rc.next(data.Type))
439 }
440 return x.decomposeOne(pos, b, a, mem, x.typs.BytePtr, OpIData, &rc)
441
442 case types.TCOMPLEX64:
443 return x.decomposePair(pos, b, a, mem, x.typs.Float32, x.typs.Float32, OpComplexReal, OpComplexImag, &rc)
444
445 case types.TCOMPLEX128:
446 return x.decomposePair(pos, b, a, mem, x.typs.Float64, x.typs.Float64, OpComplexReal, OpComplexImag, &rc)
447
448 case types.TINT64:
449 if at.Size() > x.regSize {
450 return x.decomposePair(pos, b, a, mem, x.firstType, x.secondType, x.firstOp, x.secondOp, &rc)
451 }
452 case types.TUINT64:
453 if at.Size() > x.regSize {
454 return x.decomposePair(pos, b, a, mem, x.typs.UInt32, x.typs.UInt32, x.firstOp, x.secondOp, &rc)
455 }
456 }
457
458
459
460 if rc.hasRegs() {
461 if x.debug > 1 {
462 x.Printf("...recur addArg %s\n", a.LongString())
463 }
464 rc.addArg(a)
465 } else {
466 dst := x.offsetFrom(b, rc.storeDest, rc.storeOffset, types.NewPtr(at))
467 if x.debug > 1 {
468 x.Printf("...recur store %s at %s\n", a.LongString(), dst.LongString())
469 }
470 mem = b.NewValue3A(pos, OpStore, types.TypeMem, at, dst, a, mem)
471 }
472
473 return mem
474 }
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489 func (x *expandState) rewriteSelectOrArg(pos src.XPos, b *Block, container, a, m0 *Value, at *types.Type, rc registerCursor) *Value {
490
491 if at == types.TypeMem {
492 a.copyOf(m0)
493 return a
494 }
495
496 makeOf := func(a *Value, op Op, args []*Value) *Value {
497 if a == nil {
498 a = b.NewValue0(pos, op, at)
499 a.AddArgs(args...)
500 } else {
501 a.resetArgs()
502 a.Aux, a.AuxInt = nil, 0
503 a.Pos, a.Op, a.Type = pos, op, at
504 a.AddArgs(args...)
505 }
506 return a
507 }
508
509 if at.Size() == 0 {
510
511 if at.IsArray() {
512 return makeOf(a, OpArrayMake0, nil)
513 }
514 if at.IsStruct() {
515 return makeOf(a, OpStructMake, nil)
516 }
517 return a
518 }
519
520 sk := selKey{from: container, size: 0, offsetOrIndex: rc.storeOffset, typ: at}
521 dupe := x.commonSelectors[sk]
522 if dupe != nil {
523 if a == nil {
524 return dupe
525 }
526 a.copyOf(dupe)
527 return a
528 }
529
530 var argStore [10]*Value
531 args := argStore[:0]
532
533 addArg := func(a0 *Value) {
534 if a0 == nil {
535 as := "<nil>"
536 if a != nil {
537 as = a.LongString()
538 }
539 panic(fmt.Errorf("a0 should not be nil, a=%v, container=%v, at=%v", as, container.LongString(), at))
540 }
541 args = append(args, a0)
542 }
543
544 switch at.Kind() {
545 case types.TARRAY:
546 et := at.Elem()
547 for i := int64(0); i < at.NumElem(); i++ {
548 e := x.rewriteSelectOrArg(pos, b, container, nil, m0, et, rc.next(et))
549 addArg(e)
550 }
551 a = makeOf(a, OpArrayMake1, args)
552 x.commonSelectors[sk] = a
553 return a
554
555 case types.TSTRUCT:
556
557 if at.IsSIMD() {
558 break
559 }
560 for i := 0; i < at.NumFields(); i++ {
561 et := at.Field(i).Type
562 e := x.rewriteSelectOrArg(pos, b, container, nil, m0, et, rc.next(et))
563 if e == nil {
564 panic(fmt.Errorf("nil e, et=%v, et.Size()=%d, i=%d", et, et.Size(), i))
565 }
566 addArg(e)
567 pos = pos.WithNotStmt()
568 }
569 if at.NumFields() > 4 {
570 panic(fmt.Errorf("Too many fields (%d, %d bytes), container=%s", at.NumFields(), at.Size(), container.LongString()))
571 }
572 a = makeOf(a, OpStructMake, args)
573 x.commonSelectors[sk] = a
574 return a
575
576 case types.TSLICE:
577 addArg(x.rewriteSelectOrArg(pos, b, container, nil, m0, at.Elem().PtrTo(), rc.next(x.typs.BytePtr)))
578 pos = pos.WithNotStmt()
579 addArg(x.rewriteSelectOrArg(pos, b, container, nil, m0, x.typs.Int, rc.next(x.typs.Int)))
580 addArg(x.rewriteSelectOrArg(pos, b, container, nil, m0, x.typs.Int, rc.next(x.typs.Int)))
581 a = makeOf(a, OpSliceMake, args)
582 x.commonSelectors[sk] = a
583 return a
584
585 case types.TSTRING:
586 addArg(x.rewriteSelectOrArg(pos, b, container, nil, m0, x.typs.BytePtr, rc.next(x.typs.BytePtr)))
587 pos = pos.WithNotStmt()
588 addArg(x.rewriteSelectOrArg(pos, b, container, nil, m0, x.typs.Int, rc.next(x.typs.Int)))
589 a = makeOf(a, OpStringMake, args)
590 x.commonSelectors[sk] = a
591 return a
592
593 case types.TINTER:
594 addArg(x.rewriteSelectOrArg(pos, b, container, nil, m0, x.typs.Uintptr, rc.next(x.typs.Uintptr)))
595 pos = pos.WithNotStmt()
596 addArg(x.rewriteSelectOrArg(pos, b, container, nil, m0, x.typs.BytePtr, rc.next(x.typs.BytePtr)))
597 a = makeOf(a, OpIMake, args)
598 x.commonSelectors[sk] = a
599 return a
600
601 case types.TCOMPLEX64:
602 addArg(x.rewriteSelectOrArg(pos, b, container, nil, m0, x.typs.Float32, rc.next(x.typs.Float32)))
603 pos = pos.WithNotStmt()
604 addArg(x.rewriteSelectOrArg(pos, b, container, nil, m0, x.typs.Float32, rc.next(x.typs.Float32)))
605 a = makeOf(a, OpComplexMake, args)
606 x.commonSelectors[sk] = a
607 return a
608
609 case types.TCOMPLEX128:
610 addArg(x.rewriteSelectOrArg(pos, b, container, nil, m0, x.typs.Float64, rc.next(x.typs.Float64)))
611 pos = pos.WithNotStmt()
612 addArg(x.rewriteSelectOrArg(pos, b, container, nil, m0, x.typs.Float64, rc.next(x.typs.Float64)))
613 a = makeOf(a, OpComplexMake, args)
614 x.commonSelectors[sk] = a
615 return a
616
617 case types.TINT64:
618 if at.Size() > x.regSize {
619 addArg(x.rewriteSelectOrArg(pos, b, container, nil, m0, x.firstType, rc.next(x.firstType)))
620 pos = pos.WithNotStmt()
621 addArg(x.rewriteSelectOrArg(pos, b, container, nil, m0, x.secondType, rc.next(x.secondType)))
622 if !x.f.Config.BigEndian {
623
624 args[0], args[1] = args[1], args[0]
625 }
626 a = makeOf(a, OpInt64Make, args)
627 x.commonSelectors[sk] = a
628 return a
629 }
630 case types.TUINT64:
631 if at.Size() > x.regSize {
632 addArg(x.rewriteSelectOrArg(pos, b, container, nil, m0, x.typs.UInt32, rc.next(x.typs.UInt32)))
633 pos = pos.WithNotStmt()
634 addArg(x.rewriteSelectOrArg(pos, b, container, nil, m0, x.typs.UInt32, rc.next(x.typs.UInt32)))
635 if !x.f.Config.BigEndian {
636
637 args[0], args[1] = args[1], args[0]
638 }
639 a = makeOf(a, OpInt64Make, args)
640 x.commonSelectors[sk] = a
641 return a
642 }
643 }
644
645
646
647
648
649 if container.Op == OpArg {
650 if rc.hasRegs() {
651 op, i := rc.ArgOpAndRegisterFor()
652 name := container.Aux.(*ir.Name)
653 a = makeOf(a, op, nil)
654 a.AuxInt = i
655 a.Aux = &AuxNameOffset{name, rc.storeOffset}
656 } else {
657 key := selKey{container, rc.storeOffset, at.Size(), at}
658 w := x.commonArgs[key]
659 if w != nil && w.Uses != 0 {
660 if a == nil {
661 a = w
662 } else {
663 a.copyOf(w)
664 }
665 } else {
666 if a == nil {
667 aux := container.Aux
668 auxInt := container.AuxInt + rc.storeOffset
669 a = container.Block.NewValue0IA(container.Pos, OpArg, at, auxInt, aux)
670 } else {
671
672 }
673 x.commonArgs[key] = a
674 }
675 }
676 } else if container.Op == OpSelectN {
677 call := container.Args[0]
678 aux := call.Aux.(*AuxCall)
679 which := container.AuxInt
680
681 if at == types.TypeMem {
682 if a != m0 || a != x.memForCall[call.ID] {
683 panic(fmt.Errorf("Memories %s, %s, and %s should all be equal after %s", a.LongString(), m0.LongString(), x.memForCall[call.ID], call.LongString()))
684 }
685 } else if rc.hasRegs() {
686 firstReg := uint32(0)
687 for i := 0; i < int(which); i++ {
688 firstReg += uint32(len(aux.abiInfo.OutParam(i).Registers))
689 }
690 reg := int64(rc.nextSlice + Abi1RO(firstReg))
691 a = makeOf(a, OpSelectN, []*Value{call})
692 a.AuxInt = reg
693 } else {
694 off := x.offsetFrom(x.f.Entry, x.sp, rc.storeOffset+aux.OffsetOfResult(which), types.NewPtr(at))
695 a = makeOf(a, OpLoad, []*Value{off, m0})
696 }
697
698 } else {
699 panic(fmt.Errorf("Expected container OpArg or OpSelectN, saw %v instead", container.LongString()))
700 }
701
702 x.commonSelectors[sk] = a
703 return a
704 }
705
706
707
708
709
710 func (x *expandState) rewriteWideSelectToStores(pos src.XPos, b *Block, container, m0 *Value, at *types.Type, rc registerCursor) *Value {
711
712 if at.Size() == 0 {
713 return m0
714 }
715
716 switch at.Kind() {
717 case types.TARRAY:
718 et := at.Elem()
719 for i := int64(0); i < at.NumElem(); i++ {
720 m0 = x.rewriteWideSelectToStores(pos, b, container, m0, et, rc.next(et))
721 }
722 return m0
723
724 case types.TSTRUCT:
725
726 if at.IsSIMD() {
727 break
728 }
729 for i := 0; i < at.NumFields(); i++ {
730 et := at.Field(i).Type
731 m0 = x.rewriteWideSelectToStores(pos, b, container, m0, et, rc.next(et))
732 pos = pos.WithNotStmt()
733 }
734 return m0
735
736 case types.TSLICE:
737 m0 = x.rewriteWideSelectToStores(pos, b, container, m0, at.Elem().PtrTo(), rc.next(x.typs.BytePtr))
738 pos = pos.WithNotStmt()
739 m0 = x.rewriteWideSelectToStores(pos, b, container, m0, x.typs.Int, rc.next(x.typs.Int))
740 m0 = x.rewriteWideSelectToStores(pos, b, container, m0, x.typs.Int, rc.next(x.typs.Int))
741 return m0
742
743 case types.TSTRING:
744 m0 = x.rewriteWideSelectToStores(pos, b, container, m0, x.typs.BytePtr, rc.next(x.typs.BytePtr))
745 pos = pos.WithNotStmt()
746 m0 = x.rewriteWideSelectToStores(pos, b, container, m0, x.typs.Int, rc.next(x.typs.Int))
747 return m0
748
749 case types.TINTER:
750 m0 = x.rewriteWideSelectToStores(pos, b, container, m0, x.typs.Uintptr, rc.next(x.typs.Uintptr))
751 pos = pos.WithNotStmt()
752 m0 = x.rewriteWideSelectToStores(pos, b, container, m0, x.typs.BytePtr, rc.next(x.typs.BytePtr))
753 return m0
754
755 case types.TCOMPLEX64:
756 m0 = x.rewriteWideSelectToStores(pos, b, container, m0, x.typs.Float32, rc.next(x.typs.Float32))
757 pos = pos.WithNotStmt()
758 m0 = x.rewriteWideSelectToStores(pos, b, container, m0, x.typs.Float32, rc.next(x.typs.Float32))
759 return m0
760
761 case types.TCOMPLEX128:
762 m0 = x.rewriteWideSelectToStores(pos, b, container, m0, x.typs.Float64, rc.next(x.typs.Float64))
763 pos = pos.WithNotStmt()
764 m0 = x.rewriteWideSelectToStores(pos, b, container, m0, x.typs.Float64, rc.next(x.typs.Float64))
765 return m0
766
767 case types.TINT64:
768 if at.Size() > x.regSize {
769 m0 = x.rewriteWideSelectToStores(pos, b, container, m0, x.firstType, rc.next(x.firstType))
770 pos = pos.WithNotStmt()
771 m0 = x.rewriteWideSelectToStores(pos, b, container, m0, x.secondType, rc.next(x.secondType))
772 return m0
773 }
774 case types.TUINT64:
775 if at.Size() > x.regSize {
776 m0 = x.rewriteWideSelectToStores(pos, b, container, m0, x.typs.UInt32, rc.next(x.typs.UInt32))
777 pos = pos.WithNotStmt()
778 m0 = x.rewriteWideSelectToStores(pos, b, container, m0, x.typs.UInt32, rc.next(x.typs.UInt32))
779 return m0
780 }
781 }
782
783
784 if container.Op == OpSelectN {
785 call := container.Args[0]
786 aux := call.Aux.(*AuxCall)
787 which := container.AuxInt
788
789 if rc.hasRegs() {
790 firstReg := uint32(0)
791 for i := 0; i < int(which); i++ {
792 firstReg += uint32(len(aux.abiInfo.OutParam(i).Registers))
793 }
794 reg := int64(rc.nextSlice + Abi1RO(firstReg))
795 a := b.NewValue1I(pos, OpSelectN, at, reg, call)
796 dst := x.offsetFrom(b, rc.storeDest, rc.storeOffset, types.NewPtr(at))
797 m0 = b.NewValue3A(pos, OpStore, types.TypeMem, at, dst, a, m0)
798 } else {
799 panic(fmt.Errorf("Expected rc to have registers"))
800 }
801 } else {
802 panic(fmt.Errorf("Expected container OpSelectN, saw %v instead", container.LongString()))
803 }
804 return m0
805 }
806
807 func isBlockMultiValueExit(b *Block) bool {
808 return (b.Kind == BlockRet || b.Kind == BlockRetJmp) && b.Controls[0] != nil && b.Controls[0].Op == OpMakeResult
809 }
810
811 type Abi1RO uint8
812
813
814 type registerCursor struct {
815 storeDest *Value
816 storeOffset int64
817 regs []abi.RegIndex
818 nextSlice Abi1RO
819 config *abi.ABIConfig
820 regValues *[]*Value
821 }
822
823 func (c *registerCursor) String() string {
824 dest := "<none>"
825 if c.storeDest != nil {
826 dest = fmt.Sprintf("%s+%d", c.storeDest.String(), c.storeOffset)
827 }
828 regs := "<none>"
829 if c.regValues != nil {
830 regs = ""
831 for i, x := range *c.regValues {
832 if i > 0 {
833 regs = regs + "; "
834 }
835 regs = regs + x.LongString()
836 }
837 }
838
839
840 return fmt.Sprintf("RCSR{storeDest=%v, regsLen=%d, nextSlice=%d, regValues=[%s]}", dest, len(c.regs), c.nextSlice, regs)
841 }
842
843
844
845 func (c *registerCursor) next(t *types.Type) registerCursor {
846 c.storeOffset = types.RoundUp(c.storeOffset, t.Alignment())
847 rc := *c
848 c.storeOffset = types.RoundUp(c.storeOffset+t.Size(), t.Alignment())
849 if int(c.nextSlice) < len(c.regs) {
850 w := c.config.NumParamRegs(t)
851 c.nextSlice += Abi1RO(w)
852 }
853 return rc
854 }
855
856
857 func (c *registerCursor) plus(regWidth Abi1RO) registerCursor {
858 rc := *c
859 rc.nextSlice += regWidth
860 return rc
861 }
862
863 func (c *registerCursor) init(regs []abi.RegIndex, info *abi.ABIParamResultInfo, result *[]*Value, storeDest *Value, storeOffset int64) {
864 c.regs = regs
865 c.nextSlice = 0
866 c.storeOffset = storeOffset
867 c.storeDest = storeDest
868 c.config = info.Config()
869 c.regValues = result
870 }
871
872 func (c *registerCursor) addArg(v *Value) {
873 *c.regValues = append(*c.regValues, v)
874 }
875
876 func (c *registerCursor) hasRegs() bool {
877 return len(c.regs) > 0
878 }
879
880 func (c *registerCursor) ArgOpAndRegisterFor() (Op, int64) {
881 r := c.regs[c.nextSlice]
882 return ArgOpAndRegisterFor(r, c.config)
883 }
884
885
886
887 func ArgOpAndRegisterFor(r abi.RegIndex, abiConfig *abi.ABIConfig) (Op, int64) {
888 i := abiConfig.FloatIndexFor(r)
889 if i >= 0 {
890 return OpArgFloatReg, i
891 }
892 return OpArgIntReg, int64(r)
893 }
894
895 type selKey struct {
896 from *Value
897 offsetOrIndex int64
898 size int64
899 typ *types.Type
900 }
901
902 type expandState struct {
903 f *Func
904 debug int
905 regSize int64
906 sp *Value
907 typs *Types
908
909 firstOp Op
910 secondOp Op
911 firstType *types.Type
912 secondType *types.Type
913
914 wideSelects map[*Value]*Value
915 commonSelectors map[selKey]*Value
916 commonArgs map[selKey]*Value
917 memForCall map[ID]*Value
918 indentLevel int
919 }
920
921
922 func (x *expandState) offsetFrom(b *Block, from *Value, offset int64, pt *types.Type) *Value {
923 ft := from.Type
924 if offset == 0 {
925 if ft == pt {
926 return from
927 }
928
929 if (ft.IsPtr() || ft.IsUnsafePtr()) && pt.IsPtr() {
930 return from
931 }
932 }
933
934 for from.Op == OpOffPtr {
935 offset += from.AuxInt
936 from = from.Args[0]
937 }
938 if from == x.sp {
939 return x.f.ConstOffPtrSP(pt, offset, x.sp)
940 }
941 return b.NewValue1I(from.Pos.WithNotStmt(), OpOffPtr, pt, offset, from)
942 }
943
944
945 func (x *expandState) prAssignForArg(v *Value) *abi.ABIParamAssignment {
946 if v.Op != OpArg {
947 panic(fmt.Errorf("Wanted OpArg, instead saw %s", v.LongString()))
948 }
949 return ParamAssignmentForArgName(x.f, v.Aux.(*ir.Name))
950 }
951
952
953 func ParamAssignmentForArgName(f *Func, name *ir.Name) *abi.ABIParamAssignment {
954 abiInfo := f.OwnAux.abiInfo
955 ip := abiInfo.InParams()
956 for i, a := range ip {
957 if a.Name == name {
958 return &ip[i]
959 }
960 }
961 panic(fmt.Errorf("Did not match param %v in prInfo %+v", name, abiInfo.InParams()))
962 }
963
964
965 func (x *expandState) indent(n int) {
966 x.indentLevel += n
967 }
968
969
970 func (x *expandState) Printf(format string, a ...any) (n int, err error) {
971 if x.indentLevel > 0 {
972 fmt.Printf("%[1]*s", x.indentLevel, "")
973 }
974 return fmt.Printf(format, a...)
975 }
976
977 func (x *expandState) invalidateRecursively(a *Value) {
978 var s string
979 if x.debug > 0 {
980 plus := " "
981 if a.Pos.IsStmt() == src.PosIsStmt {
982 plus = " +"
983 }
984 s = a.String() + plus + a.Pos.LineNumber() + " " + a.LongString()
985 if x.debug > 1 {
986 x.Printf("...marking %v unused\n", s)
987 }
988 }
989 lost := a.invalidateRecursively()
990 if x.debug&1 != 0 && lost {
991 x.Printf("Lost statement marker in %s on former %s\n", base.Ctxt.Pkgpath+"."+x.f.Name, s)
992 }
993 }
994
View as plain text