1
2
3
4
5 package arm64
6
7 import (
8 "math"
9
10 "cmd/compile/internal/base"
11 "cmd/compile/internal/ir"
12 "cmd/compile/internal/logopt"
13 "cmd/compile/internal/objw"
14 "cmd/compile/internal/ssa"
15 "cmd/compile/internal/ssagen"
16 "cmd/compile/internal/types"
17 "cmd/internal/obj"
18 "cmd/internal/obj/arm64"
19 )
20
21
22 func loadByType(t *types.Type) obj.As {
23 if t.IsFloat() {
24 switch t.Size() {
25 case 4:
26 return arm64.AFMOVS
27 case 8:
28 return arm64.AFMOVD
29 }
30 } else {
31 switch t.Size() {
32 case 1:
33 if t.IsSigned() {
34 return arm64.AMOVB
35 } else {
36 return arm64.AMOVBU
37 }
38 case 2:
39 if t.IsSigned() {
40 return arm64.AMOVH
41 } else {
42 return arm64.AMOVHU
43 }
44 case 4:
45 if t.IsSigned() {
46 return arm64.AMOVW
47 } else {
48 return arm64.AMOVWU
49 }
50 case 8:
51 return arm64.AMOVD
52 }
53 }
54 panic("bad load type")
55 }
56
57
58 func storeByType(t *types.Type) obj.As {
59 if t.IsFloat() {
60 switch t.Size() {
61 case 4:
62 return arm64.AFMOVS
63 case 8:
64 return arm64.AFMOVD
65 }
66 } else {
67 switch t.Size() {
68 case 1:
69 return arm64.AMOVB
70 case 2:
71 return arm64.AMOVH
72 case 4:
73 return arm64.AMOVW
74 case 8:
75 return arm64.AMOVD
76 }
77 }
78 panic("bad store type")
79 }
80
81
82
83 func loadByType2(t *types.Type) obj.As {
84 if t.IsFloat() {
85 switch t.Size() {
86 case 4:
87 return arm64.AFLDPS
88 case 8:
89 return arm64.AFLDPD
90 }
91 } else {
92 switch t.Size() {
93 case 4:
94 return arm64.ALDPW
95 case 8:
96 return arm64.ALDP
97 }
98 }
99 return obj.AXXX
100 }
101
102
103
104 func storeByType2(t *types.Type) obj.As {
105 if t.IsFloat() {
106 switch t.Size() {
107 case 4:
108 return arm64.AFSTPS
109 case 8:
110 return arm64.AFSTPD
111 }
112 } else {
113 switch t.Size() {
114 case 4:
115 return arm64.ASTPW
116 case 8:
117 return arm64.ASTP
118 }
119 }
120 return obj.AXXX
121 }
122
123
124 func makeshift(v *ssa.Value, reg int16, typ int64, s int64) int64 {
125 if s < 0 || s >= 64 {
126 v.Fatalf("shift out of range: %d", s)
127 }
128 return int64(reg&31)<<16 | typ | (s&63)<<10
129 }
130
131
132 func genshift(s *ssagen.State, v *ssa.Value, as obj.As, r0, r1, r int16, typ int64, n int64) *obj.Prog {
133 p := s.Prog(as)
134 p.From.Type = obj.TYPE_SHIFT
135 p.From.Offset = makeshift(v, r1, typ, n)
136 p.Reg = r0
137 if r != 0 {
138 p.To.Type = obj.TYPE_REG
139 p.To.Reg = r
140 }
141 return p
142 }
143
144
145
146 func genIndexedOperand(op ssa.Op, base, idx int16) obj.Addr {
147
148 mop := obj.Addr{Type: obj.TYPE_MEM, Reg: base}
149 switch op {
150 case ssa.OpARM64MOVDloadidx8, ssa.OpARM64MOVDstoreidx8, ssa.OpARM64MOVDstorezeroidx8,
151 ssa.OpARM64FMOVDloadidx8, ssa.OpARM64FMOVDstoreidx8:
152 mop.Index = arm64.REG_LSL | 3<<5 | idx&31
153 case ssa.OpARM64MOVWloadidx4, ssa.OpARM64MOVWUloadidx4, ssa.OpARM64MOVWstoreidx4, ssa.OpARM64MOVWstorezeroidx4,
154 ssa.OpARM64FMOVSloadidx4, ssa.OpARM64FMOVSstoreidx4:
155 mop.Index = arm64.REG_LSL | 2<<5 | idx&31
156 case ssa.OpARM64MOVHloadidx2, ssa.OpARM64MOVHUloadidx2, ssa.OpARM64MOVHstoreidx2, ssa.OpARM64MOVHstorezeroidx2:
157 mop.Index = arm64.REG_LSL | 1<<5 | idx&31
158 default:
159 mop.Index = idx
160 }
161 return mop
162 }
163
164 func ssaGenValue(s *ssagen.State, v *ssa.Value) {
165 switch v.Op {
166 case ssa.OpCopy, ssa.OpARM64MOVDreg:
167 if v.Type.IsMemory() {
168 return
169 }
170 x := v.Args[0].Reg()
171 y := v.Reg()
172 if x == y {
173 return
174 }
175 as := arm64.AMOVD
176 if v.Type.IsFloat() {
177 switch v.Type.Size() {
178 case 4:
179 as = arm64.AFMOVS
180 case 8:
181 as = arm64.AFMOVD
182 default:
183 panic("bad float size")
184 }
185 }
186 p := s.Prog(as)
187 p.From.Type = obj.TYPE_REG
188 p.From.Reg = x
189 p.To.Type = obj.TYPE_REG
190 p.To.Reg = y
191 case ssa.OpARM64MOVDnop:
192
193 case ssa.OpLoadReg:
194 if v.Type.IsFlags() {
195 v.Fatalf("load flags not implemented: %v", v.LongString())
196 return
197 }
198 p := s.Prog(loadByType(v.Type))
199 ssagen.AddrAuto(&p.From, v.Args[0])
200 p.To.Type = obj.TYPE_REG
201 p.To.Reg = v.Reg()
202 case ssa.OpStoreReg:
203 if v.Type.IsFlags() {
204 v.Fatalf("store flags not implemented: %v", v.LongString())
205 return
206 }
207 p := s.Prog(storeByType(v.Type))
208 p.From.Type = obj.TYPE_REG
209 p.From.Reg = v.Args[0].Reg()
210 ssagen.AddrAuto(&p.To, v)
211 case ssa.OpArgIntReg, ssa.OpArgFloatReg:
212 ssagen.CheckArgReg(v)
213
214
215 args := v.Block.Func.RegArgs
216 if len(args) == 0 {
217 break
218 }
219 v.Block.Func.RegArgs = nil
220
221 for i := 0; i < len(args); i++ {
222 a := args[i]
223
224 addr := ssagen.SpillSlotAddr(a, arm64.REGSP, base.Ctxt.Arch.FixedFrameSize)
225
226 if i < len(args)-1 {
227 b := args[i+1]
228 if a.Type.Size() == b.Type.Size() &&
229 a.Type.IsFloat() == b.Type.IsFloat() &&
230 b.Offset == a.Offset+a.Type.Size() {
231 ld := loadByType2(a.Type)
232 st := storeByType2(a.Type)
233 if ld != obj.AXXX && st != obj.AXXX {
234 s.FuncInfo().AddSpill(obj.RegSpill{Reg: a.Reg, Reg2: b.Reg, Addr: addr, Unspill: ld, Spill: st})
235 i++
236 continue
237 }
238 }
239 }
240
241 s.FuncInfo().AddSpill(obj.RegSpill{Reg: a.Reg, Addr: addr, Unspill: loadByType(a.Type), Spill: storeByType(a.Type)})
242 }
243
244 case ssa.OpARM64ADD,
245 ssa.OpARM64SUB,
246 ssa.OpARM64AND,
247 ssa.OpARM64OR,
248 ssa.OpARM64XOR,
249 ssa.OpARM64BIC,
250 ssa.OpARM64EON,
251 ssa.OpARM64ORN,
252 ssa.OpARM64MUL,
253 ssa.OpARM64MULW,
254 ssa.OpARM64MNEG,
255 ssa.OpARM64MNEGW,
256 ssa.OpARM64MULH,
257 ssa.OpARM64UMULH,
258 ssa.OpARM64MULL,
259 ssa.OpARM64UMULL,
260 ssa.OpARM64DIV,
261 ssa.OpARM64UDIV,
262 ssa.OpARM64DIVW,
263 ssa.OpARM64UDIVW,
264 ssa.OpARM64MOD,
265 ssa.OpARM64UMOD,
266 ssa.OpARM64MODW,
267 ssa.OpARM64UMODW,
268 ssa.OpARM64SLL,
269 ssa.OpARM64SRL,
270 ssa.OpARM64SRA,
271 ssa.OpARM64FADDS,
272 ssa.OpARM64FADDD,
273 ssa.OpARM64FSUBS,
274 ssa.OpARM64FSUBD,
275 ssa.OpARM64FMULS,
276 ssa.OpARM64FMULD,
277 ssa.OpARM64FNMULS,
278 ssa.OpARM64FNMULD,
279 ssa.OpARM64FDIVS,
280 ssa.OpARM64FDIVD,
281 ssa.OpARM64FMINS,
282 ssa.OpARM64FMIND,
283 ssa.OpARM64FMAXS,
284 ssa.OpARM64FMAXD,
285 ssa.OpARM64ROR,
286 ssa.OpARM64RORW:
287 r := v.Reg()
288 r1 := v.Args[0].Reg()
289 r2 := v.Args[1].Reg()
290 p := s.Prog(v.Op.Asm())
291 p.From.Type = obj.TYPE_REG
292 p.From.Reg = r2
293 p.Reg = r1
294 p.To.Type = obj.TYPE_REG
295 p.To.Reg = r
296 case ssa.OpARM64FMADDS,
297 ssa.OpARM64FMADDD,
298 ssa.OpARM64FNMADDS,
299 ssa.OpARM64FNMADDD,
300 ssa.OpARM64FMSUBS,
301 ssa.OpARM64FMSUBD,
302 ssa.OpARM64FNMSUBS,
303 ssa.OpARM64FNMSUBD,
304 ssa.OpARM64MADD,
305 ssa.OpARM64MADDW,
306 ssa.OpARM64MSUB,
307 ssa.OpARM64MSUBW:
308 rt := v.Reg()
309 ra := v.Args[0].Reg()
310 rm := v.Args[1].Reg()
311 rn := v.Args[2].Reg()
312 p := s.Prog(v.Op.Asm())
313 p.Reg = ra
314 p.From.Type = obj.TYPE_REG
315 p.From.Reg = rm
316 p.AddRestSourceReg(rn)
317 p.To.Type = obj.TYPE_REG
318 p.To.Reg = rt
319 case ssa.OpARM64ADDconst,
320 ssa.OpARM64SUBconst,
321 ssa.OpARM64ANDconst,
322 ssa.OpARM64ORconst,
323 ssa.OpARM64XORconst,
324 ssa.OpARM64SLLconst,
325 ssa.OpARM64SRLconst,
326 ssa.OpARM64SRAconst,
327 ssa.OpARM64RORconst,
328 ssa.OpARM64RORWconst:
329 p := s.Prog(v.Op.Asm())
330 p.From.Type = obj.TYPE_CONST
331 p.From.Offset = v.AuxInt
332 p.Reg = v.Args[0].Reg()
333 p.To.Type = obj.TYPE_REG
334 p.To.Reg = v.Reg()
335 case ssa.OpARM64ADDSconstflags:
336 p := s.Prog(v.Op.Asm())
337 p.From.Type = obj.TYPE_CONST
338 p.From.Offset = v.AuxInt
339 p.Reg = v.Args[0].Reg()
340 p.To.Type = obj.TYPE_REG
341 p.To.Reg = v.Reg0()
342 case ssa.OpARM64ADCzerocarry:
343 p := s.Prog(v.Op.Asm())
344 p.From.Type = obj.TYPE_REG
345 p.From.Reg = arm64.REGZERO
346 p.Reg = arm64.REGZERO
347 p.To.Type = obj.TYPE_REG
348 p.To.Reg = v.Reg()
349 case ssa.OpARM64ADCSflags,
350 ssa.OpARM64ADDSflags,
351 ssa.OpARM64SBCSflags,
352 ssa.OpARM64SUBSflags:
353 r := v.Reg0()
354 r1 := v.Args[0].Reg()
355 r2 := v.Args[1].Reg()
356 p := s.Prog(v.Op.Asm())
357 p.From.Type = obj.TYPE_REG
358 p.From.Reg = r2
359 p.Reg = r1
360 p.To.Type = obj.TYPE_REG
361 p.To.Reg = r
362 case ssa.OpARM64NEGSflags:
363 p := s.Prog(v.Op.Asm())
364 p.From.Type = obj.TYPE_REG
365 p.From.Reg = v.Args[0].Reg()
366 p.To.Type = obj.TYPE_REG
367 p.To.Reg = v.Reg0()
368 case ssa.OpARM64NGCzerocarry:
369 p := s.Prog(v.Op.Asm())
370 p.From.Type = obj.TYPE_REG
371 p.From.Reg = arm64.REGZERO
372 p.To.Type = obj.TYPE_REG
373 p.To.Reg = v.Reg()
374 case ssa.OpARM64EXTRconst,
375 ssa.OpARM64EXTRWconst:
376 p := s.Prog(v.Op.Asm())
377 p.From.Type = obj.TYPE_CONST
378 p.From.Offset = v.AuxInt
379 p.AddRestSourceReg(v.Args[0].Reg())
380 p.Reg = v.Args[1].Reg()
381 p.To.Type = obj.TYPE_REG
382 p.To.Reg = v.Reg()
383 case ssa.OpARM64MVNshiftLL, ssa.OpARM64NEGshiftLL:
384 genshift(s, v, v.Op.Asm(), 0, v.Args[0].Reg(), v.Reg(), arm64.SHIFT_LL, v.AuxInt)
385 case ssa.OpARM64MVNshiftRL, ssa.OpARM64NEGshiftRL:
386 genshift(s, v, v.Op.Asm(), 0, v.Args[0].Reg(), v.Reg(), arm64.SHIFT_LR, v.AuxInt)
387 case ssa.OpARM64MVNshiftRA, ssa.OpARM64NEGshiftRA:
388 genshift(s, v, v.Op.Asm(), 0, v.Args[0].Reg(), v.Reg(), arm64.SHIFT_AR, v.AuxInt)
389 case ssa.OpARM64MVNshiftRO:
390 genshift(s, v, v.Op.Asm(), 0, v.Args[0].Reg(), v.Reg(), arm64.SHIFT_ROR, v.AuxInt)
391 case ssa.OpARM64ADDshiftLL,
392 ssa.OpARM64SUBshiftLL,
393 ssa.OpARM64ANDshiftLL,
394 ssa.OpARM64ORshiftLL,
395 ssa.OpARM64XORshiftLL,
396 ssa.OpARM64EONshiftLL,
397 ssa.OpARM64ORNshiftLL,
398 ssa.OpARM64BICshiftLL:
399 genshift(s, v, v.Op.Asm(), v.Args[0].Reg(), v.Args[1].Reg(), v.Reg(), arm64.SHIFT_LL, v.AuxInt)
400 case ssa.OpARM64ADDshiftRL,
401 ssa.OpARM64SUBshiftRL,
402 ssa.OpARM64ANDshiftRL,
403 ssa.OpARM64ORshiftRL,
404 ssa.OpARM64XORshiftRL,
405 ssa.OpARM64EONshiftRL,
406 ssa.OpARM64ORNshiftRL,
407 ssa.OpARM64BICshiftRL:
408 genshift(s, v, v.Op.Asm(), v.Args[0].Reg(), v.Args[1].Reg(), v.Reg(), arm64.SHIFT_LR, v.AuxInt)
409 case ssa.OpARM64ADDshiftRA,
410 ssa.OpARM64SUBshiftRA,
411 ssa.OpARM64ANDshiftRA,
412 ssa.OpARM64ORshiftRA,
413 ssa.OpARM64XORshiftRA,
414 ssa.OpARM64EONshiftRA,
415 ssa.OpARM64ORNshiftRA,
416 ssa.OpARM64BICshiftRA:
417 genshift(s, v, v.Op.Asm(), v.Args[0].Reg(), v.Args[1].Reg(), v.Reg(), arm64.SHIFT_AR, v.AuxInt)
418 case ssa.OpARM64ANDshiftRO,
419 ssa.OpARM64ORshiftRO,
420 ssa.OpARM64XORshiftRO,
421 ssa.OpARM64EONshiftRO,
422 ssa.OpARM64ORNshiftRO,
423 ssa.OpARM64BICshiftRO:
424 genshift(s, v, v.Op.Asm(), v.Args[0].Reg(), v.Args[1].Reg(), v.Reg(), arm64.SHIFT_ROR, v.AuxInt)
425 case ssa.OpARM64MOVDconst:
426 p := s.Prog(v.Op.Asm())
427 p.From.Type = obj.TYPE_CONST
428 p.From.Offset = v.AuxInt
429 p.To.Type = obj.TYPE_REG
430 p.To.Reg = v.Reg()
431 case ssa.OpARM64FMOVSconst,
432 ssa.OpARM64FMOVDconst:
433 p := s.Prog(v.Op.Asm())
434 p.From.Type = obj.TYPE_FCONST
435 p.From.Val = math.Float64frombits(uint64(v.AuxInt))
436 p.To.Type = obj.TYPE_REG
437 p.To.Reg = v.Reg()
438 case ssa.OpARM64FCMPS0,
439 ssa.OpARM64FCMPD0:
440 p := s.Prog(v.Op.Asm())
441 p.From.Type = obj.TYPE_FCONST
442 p.From.Val = math.Float64frombits(0)
443 p.Reg = v.Args[0].Reg()
444 case ssa.OpARM64CMP,
445 ssa.OpARM64CMPW,
446 ssa.OpARM64CMN,
447 ssa.OpARM64CMNW,
448 ssa.OpARM64TST,
449 ssa.OpARM64TSTW,
450 ssa.OpARM64FCMPS,
451 ssa.OpARM64FCMPD:
452 p := s.Prog(v.Op.Asm())
453 p.From.Type = obj.TYPE_REG
454 p.From.Reg = v.Args[1].Reg()
455 p.Reg = v.Args[0].Reg()
456 case ssa.OpARM64CMPconst,
457 ssa.OpARM64CMPWconst,
458 ssa.OpARM64CMNconst,
459 ssa.OpARM64CMNWconst,
460 ssa.OpARM64TSTconst,
461 ssa.OpARM64TSTWconst:
462 p := s.Prog(v.Op.Asm())
463 p.From.Type = obj.TYPE_CONST
464 p.From.Offset = v.AuxInt
465 p.Reg = v.Args[0].Reg()
466 case ssa.OpARM64CMPshiftLL, ssa.OpARM64CMNshiftLL, ssa.OpARM64TSTshiftLL:
467 genshift(s, v, v.Op.Asm(), v.Args[0].Reg(), v.Args[1].Reg(), 0, arm64.SHIFT_LL, v.AuxInt)
468 case ssa.OpARM64CMPshiftRL, ssa.OpARM64CMNshiftRL, ssa.OpARM64TSTshiftRL:
469 genshift(s, v, v.Op.Asm(), v.Args[0].Reg(), v.Args[1].Reg(), 0, arm64.SHIFT_LR, v.AuxInt)
470 case ssa.OpARM64CMPshiftRA, ssa.OpARM64CMNshiftRA, ssa.OpARM64TSTshiftRA:
471 genshift(s, v, v.Op.Asm(), v.Args[0].Reg(), v.Args[1].Reg(), 0, arm64.SHIFT_AR, v.AuxInt)
472 case ssa.OpARM64TSTshiftRO:
473 genshift(s, v, v.Op.Asm(), v.Args[0].Reg(), v.Args[1].Reg(), 0, arm64.SHIFT_ROR, v.AuxInt)
474 case ssa.OpARM64MOVDaddr:
475 p := s.Prog(arm64.AMOVD)
476 p.From.Type = obj.TYPE_ADDR
477 p.From.Reg = v.Args[0].Reg()
478 p.To.Type = obj.TYPE_REG
479 p.To.Reg = v.Reg()
480
481 var wantreg string
482
483
484
485
486
487 switch v.Aux.(type) {
488 default:
489 v.Fatalf("aux is of unknown type %T", v.Aux)
490 case *obj.LSym:
491 wantreg = "SB"
492 ssagen.AddAux(&p.From, v)
493 case *ir.Name:
494 wantreg = "SP"
495 ssagen.AddAux(&p.From, v)
496 case nil:
497
498 wantreg = "SP"
499 p.From.Offset = v.AuxInt
500 }
501 if reg := v.Args[0].RegName(); reg != wantreg {
502 v.Fatalf("bad reg %s for symbol type %T, want %s", reg, v.Aux, wantreg)
503 }
504 case ssa.OpARM64MOVBload,
505 ssa.OpARM64MOVBUload,
506 ssa.OpARM64MOVHload,
507 ssa.OpARM64MOVHUload,
508 ssa.OpARM64MOVWload,
509 ssa.OpARM64MOVWUload,
510 ssa.OpARM64MOVDload,
511 ssa.OpARM64FMOVSload,
512 ssa.OpARM64FMOVDload:
513 p := s.Prog(v.Op.Asm())
514 p.From.Type = obj.TYPE_MEM
515 p.From.Reg = v.Args[0].Reg()
516 ssagen.AddAux(&p.From, v)
517 p.To.Type = obj.TYPE_REG
518 p.To.Reg = v.Reg()
519 case ssa.OpARM64LDP:
520 p := s.Prog(v.Op.Asm())
521 p.From.Type = obj.TYPE_MEM
522 p.From.Reg = v.Args[0].Reg()
523 ssagen.AddAux(&p.From, v)
524 p.To.Type = obj.TYPE_REGREG
525 p.To.Reg = v.Reg0()
526 p.To.Offset = int64(v.Reg1())
527 case ssa.OpARM64MOVBloadidx,
528 ssa.OpARM64MOVBUloadidx,
529 ssa.OpARM64MOVHloadidx,
530 ssa.OpARM64MOVHUloadidx,
531 ssa.OpARM64MOVWloadidx,
532 ssa.OpARM64MOVWUloadidx,
533 ssa.OpARM64MOVDloadidx,
534 ssa.OpARM64FMOVSloadidx,
535 ssa.OpARM64FMOVDloadidx,
536 ssa.OpARM64MOVHloadidx2,
537 ssa.OpARM64MOVHUloadidx2,
538 ssa.OpARM64MOVWloadidx4,
539 ssa.OpARM64MOVWUloadidx4,
540 ssa.OpARM64MOVDloadidx8,
541 ssa.OpARM64FMOVDloadidx8,
542 ssa.OpARM64FMOVSloadidx4:
543 p := s.Prog(v.Op.Asm())
544 p.From = genIndexedOperand(v.Op, v.Args[0].Reg(), v.Args[1].Reg())
545 p.To.Type = obj.TYPE_REG
546 p.To.Reg = v.Reg()
547 case ssa.OpARM64LDAR,
548 ssa.OpARM64LDARB,
549 ssa.OpARM64LDARW:
550 p := s.Prog(v.Op.Asm())
551 p.From.Type = obj.TYPE_MEM
552 p.From.Reg = v.Args[0].Reg()
553 ssagen.AddAux(&p.From, v)
554 p.To.Type = obj.TYPE_REG
555 p.To.Reg = v.Reg0()
556 case ssa.OpARM64MOVBstore,
557 ssa.OpARM64MOVHstore,
558 ssa.OpARM64MOVWstore,
559 ssa.OpARM64MOVDstore,
560 ssa.OpARM64FMOVSstore,
561 ssa.OpARM64FMOVDstore,
562 ssa.OpARM64STLRB,
563 ssa.OpARM64STLR,
564 ssa.OpARM64STLRW:
565 p := s.Prog(v.Op.Asm())
566 p.From.Type = obj.TYPE_REG
567 p.From.Reg = v.Args[1].Reg()
568 p.To.Type = obj.TYPE_MEM
569 p.To.Reg = v.Args[0].Reg()
570 ssagen.AddAux(&p.To, v)
571 case ssa.OpARM64MOVBstoreidx,
572 ssa.OpARM64MOVHstoreidx,
573 ssa.OpARM64MOVWstoreidx,
574 ssa.OpARM64MOVDstoreidx,
575 ssa.OpARM64FMOVSstoreidx,
576 ssa.OpARM64FMOVDstoreidx,
577 ssa.OpARM64MOVHstoreidx2,
578 ssa.OpARM64MOVWstoreidx4,
579 ssa.OpARM64FMOVSstoreidx4,
580 ssa.OpARM64MOVDstoreidx8,
581 ssa.OpARM64FMOVDstoreidx8:
582 p := s.Prog(v.Op.Asm())
583 p.To = genIndexedOperand(v.Op, v.Args[0].Reg(), v.Args[1].Reg())
584 p.From.Type = obj.TYPE_REG
585 p.From.Reg = v.Args[2].Reg()
586 case ssa.OpARM64STP:
587 p := s.Prog(v.Op.Asm())
588 p.From.Type = obj.TYPE_REGREG
589 p.From.Reg = v.Args[1].Reg()
590 p.From.Offset = int64(v.Args[2].Reg())
591 p.To.Type = obj.TYPE_MEM
592 p.To.Reg = v.Args[0].Reg()
593 ssagen.AddAux(&p.To, v)
594 case ssa.OpARM64MOVBstorezero,
595 ssa.OpARM64MOVHstorezero,
596 ssa.OpARM64MOVWstorezero,
597 ssa.OpARM64MOVDstorezero:
598 p := s.Prog(v.Op.Asm())
599 p.From.Type = obj.TYPE_REG
600 p.From.Reg = arm64.REGZERO
601 p.To.Type = obj.TYPE_MEM
602 p.To.Reg = v.Args[0].Reg()
603 ssagen.AddAux(&p.To, v)
604 case ssa.OpARM64MOVBstorezeroidx,
605 ssa.OpARM64MOVHstorezeroidx,
606 ssa.OpARM64MOVWstorezeroidx,
607 ssa.OpARM64MOVDstorezeroidx,
608 ssa.OpARM64MOVHstorezeroidx2,
609 ssa.OpARM64MOVWstorezeroidx4,
610 ssa.OpARM64MOVDstorezeroidx8:
611 p := s.Prog(v.Op.Asm())
612 p.To = genIndexedOperand(v.Op, v.Args[0].Reg(), v.Args[1].Reg())
613 p.From.Type = obj.TYPE_REG
614 p.From.Reg = arm64.REGZERO
615 case ssa.OpARM64MOVQstorezero:
616 p := s.Prog(v.Op.Asm())
617 p.From.Type = obj.TYPE_REGREG
618 p.From.Reg = arm64.REGZERO
619 p.From.Offset = int64(arm64.REGZERO)
620 p.To.Type = obj.TYPE_MEM
621 p.To.Reg = v.Args[0].Reg()
622 ssagen.AddAux(&p.To, v)
623 case ssa.OpARM64BFI,
624 ssa.OpARM64BFXIL:
625 p := s.Prog(v.Op.Asm())
626 p.From.Type = obj.TYPE_CONST
627 p.From.Offset = v.AuxInt >> 8
628 p.AddRestSourceConst(v.AuxInt & 0xff)
629 p.Reg = v.Args[1].Reg()
630 p.To.Type = obj.TYPE_REG
631 p.To.Reg = v.Reg()
632 case ssa.OpARM64SBFIZ,
633 ssa.OpARM64SBFX,
634 ssa.OpARM64UBFIZ,
635 ssa.OpARM64UBFX:
636 p := s.Prog(v.Op.Asm())
637 p.From.Type = obj.TYPE_CONST
638 p.From.Offset = v.AuxInt >> 8
639 p.AddRestSourceConst(v.AuxInt & 0xff)
640 p.Reg = v.Args[0].Reg()
641 p.To.Type = obj.TYPE_REG
642 p.To.Reg = v.Reg()
643 case ssa.OpARM64LoweredAtomicExchange64,
644 ssa.OpARM64LoweredAtomicExchange32,
645 ssa.OpARM64LoweredAtomicExchange8:
646
647
648
649 var ld, st obj.As
650 switch v.Op {
651 case ssa.OpARM64LoweredAtomicExchange8:
652 ld = arm64.ALDAXRB
653 st = arm64.ASTLXRB
654 case ssa.OpARM64LoweredAtomicExchange32:
655 ld = arm64.ALDAXRW
656 st = arm64.ASTLXRW
657 case ssa.OpARM64LoweredAtomicExchange64:
658 ld = arm64.ALDAXR
659 st = arm64.ASTLXR
660 }
661 r0 := v.Args[0].Reg()
662 r1 := v.Args[1].Reg()
663 out := v.Reg0()
664 p := s.Prog(ld)
665 p.From.Type = obj.TYPE_MEM
666 p.From.Reg = r0
667 p.To.Type = obj.TYPE_REG
668 p.To.Reg = out
669 p1 := s.Prog(st)
670 p1.From.Type = obj.TYPE_REG
671 p1.From.Reg = r1
672 p1.To.Type = obj.TYPE_MEM
673 p1.To.Reg = r0
674 p1.RegTo2 = arm64.REGTMP
675 p2 := s.Prog(arm64.ACBNZ)
676 p2.From.Type = obj.TYPE_REG
677 p2.From.Reg = arm64.REGTMP
678 p2.To.Type = obj.TYPE_BRANCH
679 p2.To.SetTarget(p)
680 case ssa.OpARM64LoweredAtomicExchange64Variant,
681 ssa.OpARM64LoweredAtomicExchange32Variant,
682 ssa.OpARM64LoweredAtomicExchange8Variant:
683 var swap obj.As
684 switch v.Op {
685 case ssa.OpARM64LoweredAtomicExchange8Variant:
686 swap = arm64.ASWPALB
687 case ssa.OpARM64LoweredAtomicExchange32Variant:
688 swap = arm64.ASWPALW
689 case ssa.OpARM64LoweredAtomicExchange64Variant:
690 swap = arm64.ASWPALD
691 }
692 r0 := v.Args[0].Reg()
693 r1 := v.Args[1].Reg()
694 out := v.Reg0()
695
696
697 p := s.Prog(swap)
698 p.From.Type = obj.TYPE_REG
699 p.From.Reg = r1
700 p.To.Type = obj.TYPE_MEM
701 p.To.Reg = r0
702 p.RegTo2 = out
703
704 case ssa.OpARM64LoweredAtomicAdd64,
705 ssa.OpARM64LoweredAtomicAdd32:
706
707
708
709
710 ld := arm64.ALDAXR
711 st := arm64.ASTLXR
712 if v.Op == ssa.OpARM64LoweredAtomicAdd32 {
713 ld = arm64.ALDAXRW
714 st = arm64.ASTLXRW
715 }
716 r0 := v.Args[0].Reg()
717 r1 := v.Args[1].Reg()
718 out := v.Reg0()
719 p := s.Prog(ld)
720 p.From.Type = obj.TYPE_MEM
721 p.From.Reg = r0
722 p.To.Type = obj.TYPE_REG
723 p.To.Reg = out
724 p1 := s.Prog(arm64.AADD)
725 p1.From.Type = obj.TYPE_REG
726 p1.From.Reg = r1
727 p1.To.Type = obj.TYPE_REG
728 p1.To.Reg = out
729 p2 := s.Prog(st)
730 p2.From.Type = obj.TYPE_REG
731 p2.From.Reg = out
732 p2.To.Type = obj.TYPE_MEM
733 p2.To.Reg = r0
734 p2.RegTo2 = arm64.REGTMP
735 p3 := s.Prog(arm64.ACBNZ)
736 p3.From.Type = obj.TYPE_REG
737 p3.From.Reg = arm64.REGTMP
738 p3.To.Type = obj.TYPE_BRANCH
739 p3.To.SetTarget(p)
740 case ssa.OpARM64LoweredAtomicAdd64Variant,
741 ssa.OpARM64LoweredAtomicAdd32Variant:
742
743
744 op := arm64.ALDADDALD
745 if v.Op == ssa.OpARM64LoweredAtomicAdd32Variant {
746 op = arm64.ALDADDALW
747 }
748 r0 := v.Args[0].Reg()
749 r1 := v.Args[1].Reg()
750 out := v.Reg0()
751 p := s.Prog(op)
752 p.From.Type = obj.TYPE_REG
753 p.From.Reg = r1
754 p.To.Type = obj.TYPE_MEM
755 p.To.Reg = r0
756 p.RegTo2 = out
757 p1 := s.Prog(arm64.AADD)
758 p1.From.Type = obj.TYPE_REG
759 p1.From.Reg = r1
760 p1.To.Type = obj.TYPE_REG
761 p1.To.Reg = out
762 case ssa.OpARM64LoweredAtomicCas64,
763 ssa.OpARM64LoweredAtomicCas32:
764
765
766
767
768
769
770 ld := arm64.ALDAXR
771 st := arm64.ASTLXR
772 cmp := arm64.ACMP
773 if v.Op == ssa.OpARM64LoweredAtomicCas32 {
774 ld = arm64.ALDAXRW
775 st = arm64.ASTLXRW
776 cmp = arm64.ACMPW
777 }
778 r0 := v.Args[0].Reg()
779 r1 := v.Args[1].Reg()
780 r2 := v.Args[2].Reg()
781 out := v.Reg0()
782 p := s.Prog(ld)
783 p.From.Type = obj.TYPE_MEM
784 p.From.Reg = r0
785 p.To.Type = obj.TYPE_REG
786 p.To.Reg = arm64.REGTMP
787 p1 := s.Prog(cmp)
788 p1.From.Type = obj.TYPE_REG
789 p1.From.Reg = r1
790 p1.Reg = arm64.REGTMP
791 p2 := s.Prog(arm64.ABNE)
792 p2.To.Type = obj.TYPE_BRANCH
793 p3 := s.Prog(st)
794 p3.From.Type = obj.TYPE_REG
795 p3.From.Reg = r2
796 p3.To.Type = obj.TYPE_MEM
797 p3.To.Reg = r0
798 p3.RegTo2 = arm64.REGTMP
799 p4 := s.Prog(arm64.ACBNZ)
800 p4.From.Type = obj.TYPE_REG
801 p4.From.Reg = arm64.REGTMP
802 p4.To.Type = obj.TYPE_BRANCH
803 p4.To.SetTarget(p)
804 p5 := s.Prog(arm64.ACSET)
805 p5.From.Type = obj.TYPE_SPECIAL
806 p5.From.Offset = int64(arm64.SPOP_EQ)
807 p5.To.Type = obj.TYPE_REG
808 p5.To.Reg = out
809 p2.To.SetTarget(p5)
810 case ssa.OpARM64LoweredAtomicCas64Variant,
811 ssa.OpARM64LoweredAtomicCas32Variant:
812
813
814
815
816
817
818
819 cas := arm64.ACASALD
820 cmp := arm64.ACMP
821 mov := arm64.AMOVD
822 if v.Op == ssa.OpARM64LoweredAtomicCas32Variant {
823 cas = arm64.ACASALW
824 cmp = arm64.ACMPW
825 mov = arm64.AMOVW
826 }
827 r0 := v.Args[0].Reg()
828 r1 := v.Args[1].Reg()
829 r2 := v.Args[2].Reg()
830 out := v.Reg0()
831
832
833 p := s.Prog(mov)
834 p.From.Type = obj.TYPE_REG
835 p.From.Reg = r1
836 p.To.Type = obj.TYPE_REG
837 p.To.Reg = arm64.REGTMP
838
839
840 p1 := s.Prog(cas)
841 p1.From.Type = obj.TYPE_REG
842 p1.From.Reg = arm64.REGTMP
843 p1.To.Type = obj.TYPE_MEM
844 p1.To.Reg = r0
845 p1.RegTo2 = r2
846
847
848 p2 := s.Prog(cmp)
849 p2.From.Type = obj.TYPE_REG
850 p2.From.Reg = r1
851 p2.Reg = arm64.REGTMP
852
853
854 p3 := s.Prog(arm64.ACSET)
855 p3.From.Type = obj.TYPE_SPECIAL
856 p3.From.Offset = int64(arm64.SPOP_EQ)
857 p3.To.Type = obj.TYPE_REG
858 p3.To.Reg = out
859
860 case ssa.OpARM64LoweredAtomicAnd64,
861 ssa.OpARM64LoweredAtomicOr64,
862 ssa.OpARM64LoweredAtomicAnd32,
863 ssa.OpARM64LoweredAtomicOr32,
864 ssa.OpARM64LoweredAtomicAnd8,
865 ssa.OpARM64LoweredAtomicOr8:
866
867
868
869
870 ld := arm64.ALDAXR
871 st := arm64.ASTLXR
872 if v.Op == ssa.OpARM64LoweredAtomicAnd32 || v.Op == ssa.OpARM64LoweredAtomicOr32 {
873 ld = arm64.ALDAXRW
874 st = arm64.ASTLXRW
875 }
876 if v.Op == ssa.OpARM64LoweredAtomicAnd8 || v.Op == ssa.OpARM64LoweredAtomicOr8 {
877 ld = arm64.ALDAXRB
878 st = arm64.ASTLXRB
879 }
880 r0 := v.Args[0].Reg()
881 r1 := v.Args[1].Reg()
882 out := v.Reg0()
883 tmp := v.RegTmp()
884 p := s.Prog(ld)
885 p.From.Type = obj.TYPE_MEM
886 p.From.Reg = r0
887 p.To.Type = obj.TYPE_REG
888 p.To.Reg = out
889 p1 := s.Prog(v.Op.Asm())
890 p1.From.Type = obj.TYPE_REG
891 p1.From.Reg = r1
892 p1.Reg = out
893 p1.To.Type = obj.TYPE_REG
894 p1.To.Reg = tmp
895 p2 := s.Prog(st)
896 p2.From.Type = obj.TYPE_REG
897 p2.From.Reg = tmp
898 p2.To.Type = obj.TYPE_MEM
899 p2.To.Reg = r0
900 p2.RegTo2 = arm64.REGTMP
901 p3 := s.Prog(arm64.ACBNZ)
902 p3.From.Type = obj.TYPE_REG
903 p3.From.Reg = arm64.REGTMP
904 p3.To.Type = obj.TYPE_BRANCH
905 p3.To.SetTarget(p)
906
907 case ssa.OpARM64LoweredAtomicAnd8Variant,
908 ssa.OpARM64LoweredAtomicAnd32Variant,
909 ssa.OpARM64LoweredAtomicAnd64Variant:
910 atomic_clear := arm64.ALDCLRALD
911 if v.Op == ssa.OpARM64LoweredAtomicAnd32Variant {
912 atomic_clear = arm64.ALDCLRALW
913 }
914 if v.Op == ssa.OpARM64LoweredAtomicAnd8Variant {
915 atomic_clear = arm64.ALDCLRALB
916 }
917 r0 := v.Args[0].Reg()
918 r1 := v.Args[1].Reg()
919 out := v.Reg0()
920
921
922 p := s.Prog(arm64.AMVN)
923 p.From.Type = obj.TYPE_REG
924 p.From.Reg = r1
925 p.To.Type = obj.TYPE_REG
926 p.To.Reg = arm64.REGTMP
927
928
929 p1 := s.Prog(atomic_clear)
930 p1.From.Type = obj.TYPE_REG
931 p1.From.Reg = arm64.REGTMP
932 p1.To.Type = obj.TYPE_MEM
933 p1.To.Reg = r0
934 p1.RegTo2 = out
935
936 case ssa.OpARM64LoweredAtomicOr8Variant,
937 ssa.OpARM64LoweredAtomicOr32Variant,
938 ssa.OpARM64LoweredAtomicOr64Variant:
939 atomic_or := arm64.ALDORALD
940 if v.Op == ssa.OpARM64LoweredAtomicOr32Variant {
941 atomic_or = arm64.ALDORALW
942 }
943 if v.Op == ssa.OpARM64LoweredAtomicOr8Variant {
944 atomic_or = arm64.ALDORALB
945 }
946 r0 := v.Args[0].Reg()
947 r1 := v.Args[1].Reg()
948 out := v.Reg0()
949
950
951 p := s.Prog(atomic_or)
952 p.From.Type = obj.TYPE_REG
953 p.From.Reg = r1
954 p.To.Type = obj.TYPE_MEM
955 p.To.Reg = r0
956 p.RegTo2 = out
957
958 case ssa.OpARM64MOVBreg,
959 ssa.OpARM64MOVBUreg,
960 ssa.OpARM64MOVHreg,
961 ssa.OpARM64MOVHUreg,
962 ssa.OpARM64MOVWreg,
963 ssa.OpARM64MOVWUreg:
964 a := v.Args[0]
965 for a.Op == ssa.OpCopy || a.Op == ssa.OpARM64MOVDreg {
966 a = a.Args[0]
967 }
968 if a.Op == ssa.OpLoadReg {
969 t := a.Type
970 switch {
971 case v.Op == ssa.OpARM64MOVBreg && t.Size() == 1 && t.IsSigned(),
972 v.Op == ssa.OpARM64MOVBUreg && t.Size() == 1 && !t.IsSigned(),
973 v.Op == ssa.OpARM64MOVHreg && t.Size() == 2 && t.IsSigned(),
974 v.Op == ssa.OpARM64MOVHUreg && t.Size() == 2 && !t.IsSigned(),
975 v.Op == ssa.OpARM64MOVWreg && t.Size() == 4 && t.IsSigned(),
976 v.Op == ssa.OpARM64MOVWUreg && t.Size() == 4 && !t.IsSigned():
977
978 if v.Reg() == v.Args[0].Reg() {
979 return
980 }
981 p := s.Prog(arm64.AMOVD)
982 p.From.Type = obj.TYPE_REG
983 p.From.Reg = v.Args[0].Reg()
984 p.To.Type = obj.TYPE_REG
985 p.To.Reg = v.Reg()
986 return
987 default:
988 }
989 }
990 fallthrough
991 case ssa.OpARM64MVN,
992 ssa.OpARM64NEG,
993 ssa.OpARM64FABSD,
994 ssa.OpARM64FMOVDfpgp,
995 ssa.OpARM64FMOVDgpfp,
996 ssa.OpARM64FMOVSfpgp,
997 ssa.OpARM64FMOVSgpfp,
998 ssa.OpARM64FNEGS,
999 ssa.OpARM64FNEGD,
1000 ssa.OpARM64FSQRTS,
1001 ssa.OpARM64FSQRTD,
1002 ssa.OpARM64FCVTZSSW,
1003 ssa.OpARM64FCVTZSDW,
1004 ssa.OpARM64FCVTZUSW,
1005 ssa.OpARM64FCVTZUDW,
1006 ssa.OpARM64FCVTZSS,
1007 ssa.OpARM64FCVTZSD,
1008 ssa.OpARM64FCVTZUS,
1009 ssa.OpARM64FCVTZUD,
1010 ssa.OpARM64SCVTFWS,
1011 ssa.OpARM64SCVTFWD,
1012 ssa.OpARM64SCVTFS,
1013 ssa.OpARM64SCVTFD,
1014 ssa.OpARM64UCVTFWS,
1015 ssa.OpARM64UCVTFWD,
1016 ssa.OpARM64UCVTFS,
1017 ssa.OpARM64UCVTFD,
1018 ssa.OpARM64FCVTSD,
1019 ssa.OpARM64FCVTDS,
1020 ssa.OpARM64REV,
1021 ssa.OpARM64REVW,
1022 ssa.OpARM64REV16,
1023 ssa.OpARM64REV16W,
1024 ssa.OpARM64RBIT,
1025 ssa.OpARM64RBITW,
1026 ssa.OpARM64CLZ,
1027 ssa.OpARM64CLZW,
1028 ssa.OpARM64FRINTAD,
1029 ssa.OpARM64FRINTMD,
1030 ssa.OpARM64FRINTND,
1031 ssa.OpARM64FRINTPD,
1032 ssa.OpARM64FRINTZD:
1033 p := s.Prog(v.Op.Asm())
1034 p.From.Type = obj.TYPE_REG
1035 p.From.Reg = v.Args[0].Reg()
1036 p.To.Type = obj.TYPE_REG
1037 p.To.Reg = v.Reg()
1038 case ssa.OpARM64LoweredRound32F, ssa.OpARM64LoweredRound64F:
1039
1040 case ssa.OpARM64VCNT:
1041 p := s.Prog(v.Op.Asm())
1042 p.From.Type = obj.TYPE_REG
1043 p.From.Reg = (v.Args[0].Reg()-arm64.REG_F0)&31 + arm64.REG_ARNG + ((arm64.ARNG_8B & 15) << 5)
1044 p.To.Type = obj.TYPE_REG
1045 p.To.Reg = (v.Reg()-arm64.REG_F0)&31 + arm64.REG_ARNG + ((arm64.ARNG_8B & 15) << 5)
1046 case ssa.OpARM64VUADDLV:
1047 p := s.Prog(v.Op.Asm())
1048 p.From.Type = obj.TYPE_REG
1049 p.From.Reg = (v.Args[0].Reg()-arm64.REG_F0)&31 + arm64.REG_ARNG + ((arm64.ARNG_8B & 15) << 5)
1050 p.To.Type = obj.TYPE_REG
1051 p.To.Reg = v.Reg() - arm64.REG_F0 + arm64.REG_V0
1052 case ssa.OpARM64CSEL, ssa.OpARM64CSEL0:
1053 r1 := int16(arm64.REGZERO)
1054 if v.Op != ssa.OpARM64CSEL0 {
1055 r1 = v.Args[1].Reg()
1056 }
1057 p := s.Prog(v.Op.Asm())
1058 p.From.Type = obj.TYPE_SPECIAL
1059 condCode := condBits[ssa.Op(v.AuxInt)]
1060 p.From.Offset = int64(condCode)
1061 p.Reg = v.Args[0].Reg()
1062 p.AddRestSourceReg(r1)
1063 p.To.Type = obj.TYPE_REG
1064 p.To.Reg = v.Reg()
1065 case ssa.OpARM64CSINC, ssa.OpARM64CSINV, ssa.OpARM64CSNEG:
1066 p := s.Prog(v.Op.Asm())
1067 p.From.Type = obj.TYPE_SPECIAL
1068 condCode := condBits[ssa.Op(v.AuxInt)]
1069 p.From.Offset = int64(condCode)
1070 p.Reg = v.Args[0].Reg()
1071 p.AddRestSourceReg(v.Args[1].Reg())
1072 p.To.Type = obj.TYPE_REG
1073 p.To.Reg = v.Reg()
1074 case ssa.OpARM64CSETM:
1075 p := s.Prog(arm64.ACSETM)
1076 p.From.Type = obj.TYPE_SPECIAL
1077 condCode := condBits[ssa.Op(v.AuxInt)]
1078 p.From.Offset = int64(condCode)
1079 p.To.Type = obj.TYPE_REG
1080 p.To.Reg = v.Reg()
1081 case ssa.OpARM64DUFFZERO:
1082
1083 p := s.Prog(obj.ADUFFZERO)
1084 p.To.Type = obj.TYPE_MEM
1085 p.To.Name = obj.NAME_EXTERN
1086 p.To.Sym = ir.Syms.Duffzero
1087 p.To.Offset = v.AuxInt
1088 case ssa.OpARM64LoweredZero:
1089
1090
1091
1092
1093 p := s.Prog(arm64.ASTP)
1094 p.Scond = arm64.C_XPOST
1095 p.From.Type = obj.TYPE_REGREG
1096 p.From.Reg = arm64.REGZERO
1097 p.From.Offset = int64(arm64.REGZERO)
1098 p.To.Type = obj.TYPE_MEM
1099 p.To.Reg = arm64.REG_R16
1100 p.To.Offset = 16
1101 p2 := s.Prog(arm64.ACMP)
1102 p2.From.Type = obj.TYPE_REG
1103 p2.From.Reg = v.Args[1].Reg()
1104 p2.Reg = arm64.REG_R16
1105 p3 := s.Prog(arm64.ABLE)
1106 p3.To.Type = obj.TYPE_BRANCH
1107 p3.To.SetTarget(p)
1108 case ssa.OpARM64DUFFCOPY:
1109 p := s.Prog(obj.ADUFFCOPY)
1110 p.To.Type = obj.TYPE_MEM
1111 p.To.Name = obj.NAME_EXTERN
1112 p.To.Sym = ir.Syms.Duffcopy
1113 p.To.Offset = v.AuxInt
1114 case ssa.OpARM64LoweredMove:
1115
1116
1117
1118
1119
1120 p := s.Prog(arm64.ALDP)
1121 p.Scond = arm64.C_XPOST
1122 p.From.Type = obj.TYPE_MEM
1123 p.From.Reg = arm64.REG_R16
1124 p.From.Offset = 16
1125 p.To.Type = obj.TYPE_REGREG
1126 p.To.Reg = arm64.REG_R25
1127 p.To.Offset = int64(arm64.REGTMP)
1128 p2 := s.Prog(arm64.ASTP)
1129 p2.Scond = arm64.C_XPOST
1130 p2.From.Type = obj.TYPE_REGREG
1131 p2.From.Reg = arm64.REG_R25
1132 p2.From.Offset = int64(arm64.REGTMP)
1133 p2.To.Type = obj.TYPE_MEM
1134 p2.To.Reg = arm64.REG_R17
1135 p2.To.Offset = 16
1136 p3 := s.Prog(arm64.ACMP)
1137 p3.From.Type = obj.TYPE_REG
1138 p3.From.Reg = v.Args[2].Reg()
1139 p3.Reg = arm64.REG_R16
1140 p4 := s.Prog(arm64.ABLE)
1141 p4.To.Type = obj.TYPE_BRANCH
1142 p4.To.SetTarget(p)
1143 case ssa.OpARM64CALLstatic, ssa.OpARM64CALLclosure, ssa.OpARM64CALLinter:
1144 s.Call(v)
1145 case ssa.OpARM64CALLtail:
1146 s.TailCall(v)
1147 case ssa.OpARM64LoweredWB:
1148 p := s.Prog(obj.ACALL)
1149 p.To.Type = obj.TYPE_MEM
1150 p.To.Name = obj.NAME_EXTERN
1151
1152 p.To.Sym = ir.Syms.GCWriteBarrier[v.AuxInt-1]
1153
1154 case ssa.OpARM64LoweredPanicBoundsA, ssa.OpARM64LoweredPanicBoundsB, ssa.OpARM64LoweredPanicBoundsC:
1155 p := s.Prog(obj.ACALL)
1156 p.To.Type = obj.TYPE_MEM
1157 p.To.Name = obj.NAME_EXTERN
1158 p.To.Sym = ssagen.BoundsCheckFunc[v.AuxInt]
1159 s.UseArgs(16)
1160 case ssa.OpARM64LoweredNilCheck:
1161
1162 p := s.Prog(arm64.AMOVB)
1163 p.From.Type = obj.TYPE_MEM
1164 p.From.Reg = v.Args[0].Reg()
1165 ssagen.AddAux(&p.From, v)
1166 p.To.Type = obj.TYPE_REG
1167 p.To.Reg = arm64.REGTMP
1168 if logopt.Enabled() {
1169 logopt.LogOpt(v.Pos, "nilcheck", "genssa", v.Block.Func.Name)
1170 }
1171 if base.Debug.Nil != 0 && v.Pos.Line() > 1 {
1172 base.WarnfAt(v.Pos, "generated nil check")
1173 }
1174 case ssa.OpARM64Equal,
1175 ssa.OpARM64NotEqual,
1176 ssa.OpARM64LessThan,
1177 ssa.OpARM64LessEqual,
1178 ssa.OpARM64GreaterThan,
1179 ssa.OpARM64GreaterEqual,
1180 ssa.OpARM64LessThanU,
1181 ssa.OpARM64LessEqualU,
1182 ssa.OpARM64GreaterThanU,
1183 ssa.OpARM64GreaterEqualU,
1184 ssa.OpARM64LessThanF,
1185 ssa.OpARM64LessEqualF,
1186 ssa.OpARM64GreaterThanF,
1187 ssa.OpARM64GreaterEqualF,
1188 ssa.OpARM64NotLessThanF,
1189 ssa.OpARM64NotLessEqualF,
1190 ssa.OpARM64NotGreaterThanF,
1191 ssa.OpARM64NotGreaterEqualF,
1192 ssa.OpARM64LessThanNoov,
1193 ssa.OpARM64GreaterEqualNoov:
1194
1195 p := s.Prog(arm64.ACSET)
1196 p.From.Type = obj.TYPE_SPECIAL
1197 condCode := condBits[v.Op]
1198 p.From.Offset = int64(condCode)
1199 p.To.Type = obj.TYPE_REG
1200 p.To.Reg = v.Reg()
1201 case ssa.OpARM64PRFM:
1202 p := s.Prog(v.Op.Asm())
1203 p.From.Type = obj.TYPE_MEM
1204 p.From.Reg = v.Args[0].Reg()
1205 p.To.Type = obj.TYPE_CONST
1206 p.To.Offset = v.AuxInt
1207 case ssa.OpARM64LoweredGetClosurePtr:
1208
1209 ssagen.CheckLoweredGetClosurePtr(v)
1210 case ssa.OpARM64LoweredGetCallerSP:
1211
1212 p := s.Prog(arm64.AMOVD)
1213 p.From.Type = obj.TYPE_ADDR
1214 p.From.Offset = -base.Ctxt.Arch.FixedFrameSize
1215 p.From.Name = obj.NAME_PARAM
1216 p.To.Type = obj.TYPE_REG
1217 p.To.Reg = v.Reg()
1218 case ssa.OpARM64LoweredGetCallerPC:
1219 p := s.Prog(obj.AGETCALLERPC)
1220 p.To.Type = obj.TYPE_REG
1221 p.To.Reg = v.Reg()
1222 case ssa.OpARM64DMB:
1223 p := s.Prog(v.Op.Asm())
1224 p.From.Type = obj.TYPE_CONST
1225 p.From.Offset = v.AuxInt
1226 case ssa.OpARM64FlagConstant:
1227 v.Fatalf("FlagConstant op should never make it to codegen %v", v.LongString())
1228 case ssa.OpARM64InvertFlags:
1229 v.Fatalf("InvertFlags should never make it to codegen %v", v.LongString())
1230 case ssa.OpClobber:
1231
1232
1233
1234 p := s.Prog(arm64.AMOVW)
1235 p.From.Type = obj.TYPE_CONST
1236 p.From.Offset = 0xdeaddead
1237 p.To.Type = obj.TYPE_REG
1238 p.To.Reg = arm64.REGTMP
1239 p = s.Prog(arm64.AMOVW)
1240 p.From.Type = obj.TYPE_REG
1241 p.From.Reg = arm64.REGTMP
1242 p.To.Type = obj.TYPE_MEM
1243 p.To.Reg = arm64.REGSP
1244 ssagen.AddAux(&p.To, v)
1245 p = s.Prog(arm64.AMOVW)
1246 p.From.Type = obj.TYPE_REG
1247 p.From.Reg = arm64.REGTMP
1248 p.To.Type = obj.TYPE_MEM
1249 p.To.Reg = arm64.REGSP
1250 ssagen.AddAux2(&p.To, v, v.AuxInt+4)
1251 case ssa.OpClobberReg:
1252 x := uint64(0xdeaddeaddeaddead)
1253 p := s.Prog(arm64.AMOVD)
1254 p.From.Type = obj.TYPE_CONST
1255 p.From.Offset = int64(x)
1256 p.To.Type = obj.TYPE_REG
1257 p.To.Reg = v.Reg()
1258 default:
1259 v.Fatalf("genValue not implemented: %s", v.LongString())
1260 }
1261 }
1262
1263 var condBits = map[ssa.Op]arm64.SpecialOperand{
1264 ssa.OpARM64Equal: arm64.SPOP_EQ,
1265 ssa.OpARM64NotEqual: arm64.SPOP_NE,
1266 ssa.OpARM64LessThan: arm64.SPOP_LT,
1267 ssa.OpARM64LessThanU: arm64.SPOP_LO,
1268 ssa.OpARM64LessEqual: arm64.SPOP_LE,
1269 ssa.OpARM64LessEqualU: arm64.SPOP_LS,
1270 ssa.OpARM64GreaterThan: arm64.SPOP_GT,
1271 ssa.OpARM64GreaterThanU: arm64.SPOP_HI,
1272 ssa.OpARM64GreaterEqual: arm64.SPOP_GE,
1273 ssa.OpARM64GreaterEqualU: arm64.SPOP_HS,
1274 ssa.OpARM64LessThanF: arm64.SPOP_MI,
1275 ssa.OpARM64LessEqualF: arm64.SPOP_LS,
1276 ssa.OpARM64GreaterThanF: arm64.SPOP_GT,
1277 ssa.OpARM64GreaterEqualF: arm64.SPOP_GE,
1278
1279
1280 ssa.OpARM64NotLessThanF: arm64.SPOP_PL,
1281 ssa.OpARM64NotLessEqualF: arm64.SPOP_HI,
1282 ssa.OpARM64NotGreaterThanF: arm64.SPOP_LE,
1283 ssa.OpARM64NotGreaterEqualF: arm64.SPOP_LT,
1284
1285 ssa.OpARM64LessThanNoov: arm64.SPOP_MI,
1286 ssa.OpARM64GreaterEqualNoov: arm64.SPOP_PL,
1287 }
1288
1289 var blockJump = map[ssa.BlockKind]struct {
1290 asm, invasm obj.As
1291 }{
1292 ssa.BlockARM64EQ: {arm64.ABEQ, arm64.ABNE},
1293 ssa.BlockARM64NE: {arm64.ABNE, arm64.ABEQ},
1294 ssa.BlockARM64LT: {arm64.ABLT, arm64.ABGE},
1295 ssa.BlockARM64GE: {arm64.ABGE, arm64.ABLT},
1296 ssa.BlockARM64LE: {arm64.ABLE, arm64.ABGT},
1297 ssa.BlockARM64GT: {arm64.ABGT, arm64.ABLE},
1298 ssa.BlockARM64ULT: {arm64.ABLO, arm64.ABHS},
1299 ssa.BlockARM64UGE: {arm64.ABHS, arm64.ABLO},
1300 ssa.BlockARM64UGT: {arm64.ABHI, arm64.ABLS},
1301 ssa.BlockARM64ULE: {arm64.ABLS, arm64.ABHI},
1302 ssa.BlockARM64Z: {arm64.ACBZ, arm64.ACBNZ},
1303 ssa.BlockARM64NZ: {arm64.ACBNZ, arm64.ACBZ},
1304 ssa.BlockARM64ZW: {arm64.ACBZW, arm64.ACBNZW},
1305 ssa.BlockARM64NZW: {arm64.ACBNZW, arm64.ACBZW},
1306 ssa.BlockARM64TBZ: {arm64.ATBZ, arm64.ATBNZ},
1307 ssa.BlockARM64TBNZ: {arm64.ATBNZ, arm64.ATBZ},
1308 ssa.BlockARM64FLT: {arm64.ABMI, arm64.ABPL},
1309 ssa.BlockARM64FGE: {arm64.ABGE, arm64.ABLT},
1310 ssa.BlockARM64FLE: {arm64.ABLS, arm64.ABHI},
1311 ssa.BlockARM64FGT: {arm64.ABGT, arm64.ABLE},
1312 ssa.BlockARM64LTnoov: {arm64.ABMI, arm64.ABPL},
1313 ssa.BlockARM64GEnoov: {arm64.ABPL, arm64.ABMI},
1314 }
1315
1316
1317 var leJumps = [2][2]ssagen.IndexJump{
1318 {{Jump: arm64.ABEQ, Index: 0}, {Jump: arm64.ABPL, Index: 1}},
1319 {{Jump: arm64.ABMI, Index: 0}, {Jump: arm64.ABEQ, Index: 0}},
1320 }
1321
1322
1323 var gtJumps = [2][2]ssagen.IndexJump{
1324 {{Jump: arm64.ABMI, Index: 1}, {Jump: arm64.ABEQ, Index: 1}},
1325 {{Jump: arm64.ABEQ, Index: 1}, {Jump: arm64.ABPL, Index: 0}},
1326 }
1327
1328 func ssaGenBlock(s *ssagen.State, b, next *ssa.Block) {
1329 switch b.Kind {
1330 case ssa.BlockPlain:
1331 if b.Succs[0].Block() != next {
1332 p := s.Prog(obj.AJMP)
1333 p.To.Type = obj.TYPE_BRANCH
1334 s.Branches = append(s.Branches, ssagen.Branch{P: p, B: b.Succs[0].Block()})
1335 }
1336
1337 case ssa.BlockDefer:
1338
1339
1340
1341 p := s.Prog(arm64.ACMP)
1342 p.From.Type = obj.TYPE_CONST
1343 p.From.Offset = 0
1344 p.Reg = arm64.REG_R0
1345 p = s.Prog(arm64.ABNE)
1346 p.To.Type = obj.TYPE_BRANCH
1347 s.Branches = append(s.Branches, ssagen.Branch{P: p, B: b.Succs[1].Block()})
1348 if b.Succs[0].Block() != next {
1349 p := s.Prog(obj.AJMP)
1350 p.To.Type = obj.TYPE_BRANCH
1351 s.Branches = append(s.Branches, ssagen.Branch{P: p, B: b.Succs[0].Block()})
1352 }
1353
1354 case ssa.BlockExit, ssa.BlockRetJmp:
1355
1356 case ssa.BlockRet:
1357 s.Prog(obj.ARET)
1358
1359 case ssa.BlockARM64EQ, ssa.BlockARM64NE,
1360 ssa.BlockARM64LT, ssa.BlockARM64GE,
1361 ssa.BlockARM64LE, ssa.BlockARM64GT,
1362 ssa.BlockARM64ULT, ssa.BlockARM64UGT,
1363 ssa.BlockARM64ULE, ssa.BlockARM64UGE,
1364 ssa.BlockARM64Z, ssa.BlockARM64NZ,
1365 ssa.BlockARM64ZW, ssa.BlockARM64NZW,
1366 ssa.BlockARM64FLT, ssa.BlockARM64FGE,
1367 ssa.BlockARM64FLE, ssa.BlockARM64FGT,
1368 ssa.BlockARM64LTnoov, ssa.BlockARM64GEnoov:
1369 jmp := blockJump[b.Kind]
1370 var p *obj.Prog
1371 switch next {
1372 case b.Succs[0].Block():
1373 p = s.Br(jmp.invasm, b.Succs[1].Block())
1374 case b.Succs[1].Block():
1375 p = s.Br(jmp.asm, b.Succs[0].Block())
1376 default:
1377 if b.Likely != ssa.BranchUnlikely {
1378 p = s.Br(jmp.asm, b.Succs[0].Block())
1379 s.Br(obj.AJMP, b.Succs[1].Block())
1380 } else {
1381 p = s.Br(jmp.invasm, b.Succs[1].Block())
1382 s.Br(obj.AJMP, b.Succs[0].Block())
1383 }
1384 }
1385 if !b.Controls[0].Type.IsFlags() {
1386 p.From.Type = obj.TYPE_REG
1387 p.From.Reg = b.Controls[0].Reg()
1388 }
1389 case ssa.BlockARM64TBZ, ssa.BlockARM64TBNZ:
1390 jmp := blockJump[b.Kind]
1391 var p *obj.Prog
1392 switch next {
1393 case b.Succs[0].Block():
1394 p = s.Br(jmp.invasm, b.Succs[1].Block())
1395 case b.Succs[1].Block():
1396 p = s.Br(jmp.asm, b.Succs[0].Block())
1397 default:
1398 if b.Likely != ssa.BranchUnlikely {
1399 p = s.Br(jmp.asm, b.Succs[0].Block())
1400 s.Br(obj.AJMP, b.Succs[1].Block())
1401 } else {
1402 p = s.Br(jmp.invasm, b.Succs[1].Block())
1403 s.Br(obj.AJMP, b.Succs[0].Block())
1404 }
1405 }
1406 p.From.Offset = b.AuxInt
1407 p.From.Type = obj.TYPE_CONST
1408 p.Reg = b.Controls[0].Reg()
1409
1410 case ssa.BlockARM64LEnoov:
1411 s.CombJump(b, next, &leJumps)
1412 case ssa.BlockARM64GTnoov:
1413 s.CombJump(b, next, >Jumps)
1414
1415 case ssa.BlockARM64JUMPTABLE:
1416
1417
1418 p := s.Prog(arm64.AMOVD)
1419 p.From = genIndexedOperand(ssa.OpARM64MOVDloadidx8, b.Controls[1].Reg(), b.Controls[0].Reg())
1420 p.To.Type = obj.TYPE_REG
1421 p.To.Reg = arm64.REGTMP
1422 p = s.Prog(obj.AJMP)
1423 p.To.Type = obj.TYPE_MEM
1424 p.To.Reg = arm64.REGTMP
1425
1426 s.JumpTables = append(s.JumpTables, b)
1427
1428 default:
1429 b.Fatalf("branch not implemented: %s", b.LongString())
1430 }
1431 }
1432
1433 func loadRegResult(s *ssagen.State, f *ssa.Func, t *types.Type, reg int16, n *ir.Name, off int64) *obj.Prog {
1434 p := s.Prog(loadByType(t))
1435 p.From.Type = obj.TYPE_MEM
1436 p.From.Name = obj.NAME_AUTO
1437 p.From.Sym = n.Linksym()
1438 p.From.Offset = n.FrameOffset() + off
1439 p.To.Type = obj.TYPE_REG
1440 p.To.Reg = reg
1441 return p
1442 }
1443
1444 func spillArgReg(pp *objw.Progs, p *obj.Prog, f *ssa.Func, t *types.Type, reg int16, n *ir.Name, off int64) *obj.Prog {
1445 p = pp.Append(p, storeByType(t), obj.TYPE_REG, reg, 0, obj.TYPE_MEM, 0, n.FrameOffset()+off)
1446 p.To.Name = obj.NAME_PARAM
1447 p.To.Sym = n.Linksym()
1448 p.Pos = p.Pos.WithNotStmt()
1449 return p
1450 }
1451
View as plain text