1
2
3
4
5 package riscv64
6
7 import (
8 "cmd/compile/internal/base"
9 "cmd/compile/internal/ir"
10 "cmd/compile/internal/logopt"
11 "cmd/compile/internal/objw"
12 "cmd/compile/internal/ssa"
13 "cmd/compile/internal/ssagen"
14 "cmd/compile/internal/types"
15 "cmd/internal/obj"
16 "cmd/internal/obj/riscv"
17 "internal/abi"
18 )
19
20
21 var ssaRegToReg = []int16{
22 riscv.REG_X0,
23
24 riscv.REG_X2,
25 riscv.REG_X3,
26 riscv.REG_X4,
27 riscv.REG_X5,
28 riscv.REG_X6,
29 riscv.REG_X7,
30 riscv.REG_X8,
31 riscv.REG_X9,
32 riscv.REG_X10,
33 riscv.REG_X11,
34 riscv.REG_X12,
35 riscv.REG_X13,
36 riscv.REG_X14,
37 riscv.REG_X15,
38 riscv.REG_X16,
39 riscv.REG_X17,
40 riscv.REG_X18,
41 riscv.REG_X19,
42 riscv.REG_X20,
43 riscv.REG_X21,
44 riscv.REG_X22,
45 riscv.REG_X23,
46 riscv.REG_X24,
47 riscv.REG_X25,
48 riscv.REG_X26,
49 riscv.REG_X27,
50 riscv.REG_X28,
51 riscv.REG_X29,
52 riscv.REG_X30,
53 riscv.REG_X31,
54 riscv.REG_F0,
55 riscv.REG_F1,
56 riscv.REG_F2,
57 riscv.REG_F3,
58 riscv.REG_F4,
59 riscv.REG_F5,
60 riscv.REG_F6,
61 riscv.REG_F7,
62 riscv.REG_F8,
63 riscv.REG_F9,
64 riscv.REG_F10,
65 riscv.REG_F11,
66 riscv.REG_F12,
67 riscv.REG_F13,
68 riscv.REG_F14,
69 riscv.REG_F15,
70 riscv.REG_F16,
71 riscv.REG_F17,
72 riscv.REG_F18,
73 riscv.REG_F19,
74 riscv.REG_F20,
75 riscv.REG_F21,
76 riscv.REG_F22,
77 riscv.REG_F23,
78 riscv.REG_F24,
79 riscv.REG_F25,
80 riscv.REG_F26,
81 riscv.REG_F27,
82 riscv.REG_F28,
83 riscv.REG_F29,
84 riscv.REG_F30,
85 riscv.REG_F31,
86 0,
87 }
88
89 func loadByType(t *types.Type) obj.As {
90 width := t.Size()
91
92 if t.IsFloat() {
93 switch width {
94 case 4:
95 return riscv.AMOVF
96 case 8:
97 return riscv.AMOVD
98 default:
99 base.Fatalf("unknown float width for load %d in type %v", width, t)
100 return 0
101 }
102 }
103
104 switch width {
105 case 1:
106 if t.IsSigned() {
107 return riscv.AMOVB
108 } else {
109 return riscv.AMOVBU
110 }
111 case 2:
112 if t.IsSigned() {
113 return riscv.AMOVH
114 } else {
115 return riscv.AMOVHU
116 }
117 case 4:
118 if t.IsSigned() {
119 return riscv.AMOVW
120 } else {
121 return riscv.AMOVWU
122 }
123 case 8:
124 return riscv.AMOV
125 default:
126 base.Fatalf("unknown width for load %d in type %v", width, t)
127 return 0
128 }
129 }
130
131
132 func storeByType(t *types.Type) obj.As {
133 width := t.Size()
134
135 if t.IsFloat() {
136 switch width {
137 case 4:
138 return riscv.AMOVF
139 case 8:
140 return riscv.AMOVD
141 default:
142 base.Fatalf("unknown float width for store %d in type %v", width, t)
143 return 0
144 }
145 }
146
147 switch width {
148 case 1:
149 return riscv.AMOVB
150 case 2:
151 return riscv.AMOVH
152 case 4:
153 return riscv.AMOVW
154 case 8:
155 return riscv.AMOV
156 default:
157 base.Fatalf("unknown width for store %d in type %v", width, t)
158 return 0
159 }
160 }
161
162
163
164
165
166
167
168
169
170
171 func largestMove(alignment int64) (obj.As, int64) {
172 switch {
173 case alignment%8 == 0:
174 return riscv.AMOV, 8
175 case alignment%4 == 0:
176 return riscv.AMOVW, 4
177 case alignment%2 == 0:
178 return riscv.AMOVH, 2
179 default:
180 return riscv.AMOVB, 1
181 }
182 }
183
184 var fracMovOps = []obj.As{riscv.AMOVB, riscv.AMOVH, riscv.AMOVW, riscv.AMOV}
185
186
187
188 func ssaMarkMoves(s *ssagen.State, b *ssa.Block) {}
189
190 func ssaGenValue(s *ssagen.State, v *ssa.Value) {
191 s.SetPos(v.Pos)
192
193 switch v.Op {
194 case ssa.OpInitMem:
195
196 case ssa.OpArg:
197
198 case ssa.OpPhi:
199 ssagen.CheckLoweredPhi(v)
200 case ssa.OpCopy, ssa.OpRISCV64MOVDreg:
201 if v.Type.IsMemory() {
202 return
203 }
204 rs := v.Args[0].Reg()
205 rd := v.Reg()
206 if rs == rd {
207 return
208 }
209 as := riscv.AMOV
210 if v.Type.IsFloat() {
211 as = riscv.AMOVD
212 }
213 p := s.Prog(as)
214 p.From.Type = obj.TYPE_REG
215 p.From.Reg = rs
216 p.To.Type = obj.TYPE_REG
217 p.To.Reg = rd
218 case ssa.OpRISCV64MOVDnop:
219
220 case ssa.OpLoadReg:
221 if v.Type.IsFlags() {
222 v.Fatalf("load flags not implemented: %v", v.LongString())
223 return
224 }
225 p := s.Prog(loadByType(v.Type))
226 ssagen.AddrAuto(&p.From, v.Args[0])
227 p.To.Type = obj.TYPE_REG
228 p.To.Reg = v.Reg()
229 case ssa.OpStoreReg:
230 if v.Type.IsFlags() {
231 v.Fatalf("store flags not implemented: %v", v.LongString())
232 return
233 }
234 p := s.Prog(storeByType(v.Type))
235 p.From.Type = obj.TYPE_REG
236 p.From.Reg = v.Args[0].Reg()
237 ssagen.AddrAuto(&p.To, v)
238 case ssa.OpArgIntReg, ssa.OpArgFloatReg:
239
240
241 for _, a := range v.Block.Func.RegArgs {
242
243
244 addr := ssagen.SpillSlotAddr(a, riscv.REG_SP, base.Ctxt.Arch.FixedFrameSize)
245 s.FuncInfo().AddSpill(
246 obj.RegSpill{Reg: a.Reg, Addr: addr, Unspill: loadByType(a.Type), Spill: storeByType(a.Type)})
247 }
248 v.Block.Func.RegArgs = nil
249
250 ssagen.CheckArgReg(v)
251 case ssa.OpSP, ssa.OpSB, ssa.OpGetG:
252
253 case ssa.OpRISCV64MOVBreg, ssa.OpRISCV64MOVHreg, ssa.OpRISCV64MOVWreg,
254 ssa.OpRISCV64MOVBUreg, ssa.OpRISCV64MOVHUreg, ssa.OpRISCV64MOVWUreg:
255 a := v.Args[0]
256 for a.Op == ssa.OpCopy || a.Op == ssa.OpRISCV64MOVDreg {
257 a = a.Args[0]
258 }
259 as := v.Op.Asm()
260 rs := v.Args[0].Reg()
261 rd := v.Reg()
262 if a.Op == ssa.OpLoadReg {
263 t := a.Type
264 switch {
265 case v.Op == ssa.OpRISCV64MOVBreg && t.Size() == 1 && t.IsSigned(),
266 v.Op == ssa.OpRISCV64MOVHreg && t.Size() == 2 && t.IsSigned(),
267 v.Op == ssa.OpRISCV64MOVWreg && t.Size() == 4 && t.IsSigned(),
268 v.Op == ssa.OpRISCV64MOVBUreg && t.Size() == 1 && !t.IsSigned(),
269 v.Op == ssa.OpRISCV64MOVHUreg && t.Size() == 2 && !t.IsSigned(),
270 v.Op == ssa.OpRISCV64MOVWUreg && t.Size() == 4 && !t.IsSigned():
271
272 if rs == rd {
273 return
274 }
275 as = riscv.AMOV
276 default:
277 }
278 }
279 p := s.Prog(as)
280 p.From.Type = obj.TYPE_REG
281 p.From.Reg = rs
282 p.To.Type = obj.TYPE_REG
283 p.To.Reg = rd
284 case ssa.OpRISCV64ADD, ssa.OpRISCV64SUB, ssa.OpRISCV64SUBW, ssa.OpRISCV64XNOR, ssa.OpRISCV64XOR,
285 ssa.OpRISCV64OR, ssa.OpRISCV64ORN, ssa.OpRISCV64AND, ssa.OpRISCV64ANDN,
286 ssa.OpRISCV64SLL, ssa.OpRISCV64SLLW, ssa.OpRISCV64SRA, ssa.OpRISCV64SRAW, ssa.OpRISCV64SRL, ssa.OpRISCV64SRLW,
287 ssa.OpRISCV64SLT, ssa.OpRISCV64SLTU, ssa.OpRISCV64MUL, ssa.OpRISCV64MULW, ssa.OpRISCV64MULH,
288 ssa.OpRISCV64MULHU, ssa.OpRISCV64DIV, ssa.OpRISCV64DIVU, ssa.OpRISCV64DIVW,
289 ssa.OpRISCV64DIVUW, ssa.OpRISCV64REM, ssa.OpRISCV64REMU, ssa.OpRISCV64REMW,
290 ssa.OpRISCV64REMUW,
291 ssa.OpRISCV64ROL, ssa.OpRISCV64ROLW, ssa.OpRISCV64ROR, ssa.OpRISCV64RORW,
292 ssa.OpRISCV64FADDS, ssa.OpRISCV64FSUBS, ssa.OpRISCV64FMULS, ssa.OpRISCV64FDIVS,
293 ssa.OpRISCV64FEQS, ssa.OpRISCV64FNES, ssa.OpRISCV64FLTS, ssa.OpRISCV64FLES,
294 ssa.OpRISCV64FADDD, ssa.OpRISCV64FSUBD, ssa.OpRISCV64FMULD, ssa.OpRISCV64FDIVD,
295 ssa.OpRISCV64FEQD, ssa.OpRISCV64FNED, ssa.OpRISCV64FLTD, ssa.OpRISCV64FLED, ssa.OpRISCV64FSGNJD,
296 ssa.OpRISCV64MIN, ssa.OpRISCV64MAX, ssa.OpRISCV64MINU, ssa.OpRISCV64MAXU,
297 ssa.OpRISCV64SH1ADD, ssa.OpRISCV64SH2ADD, ssa.OpRISCV64SH3ADD:
298 r := v.Reg()
299 r1 := v.Args[0].Reg()
300 r2 := v.Args[1].Reg()
301 p := s.Prog(v.Op.Asm())
302 p.From.Type = obj.TYPE_REG
303 p.From.Reg = r2
304 p.Reg = r1
305 p.To.Type = obj.TYPE_REG
306 p.To.Reg = r
307
308 case ssa.OpRISCV64LoweredFMAXD, ssa.OpRISCV64LoweredFMIND, ssa.OpRISCV64LoweredFMAXS, ssa.OpRISCV64LoweredFMINS:
309
310
311
312
313
314
315
316
317
318
319
320 r0 := v.Args[0].Reg()
321 r1 := v.Args[1].Reg()
322 out := v.Reg()
323 add, feq := riscv.AFADDD, riscv.AFEQD
324 if v.Op == ssa.OpRISCV64LoweredFMAXS || v.Op == ssa.OpRISCV64LoweredFMINS {
325 add = riscv.AFADDS
326 feq = riscv.AFEQS
327 }
328
329 p1 := s.Prog(add)
330 p1.From.Type = obj.TYPE_REG
331 p1.From.Reg = r0
332 p1.Reg = r1
333 p1.To.Type = obj.TYPE_REG
334 p1.To.Reg = out
335
336 p2 := s.Prog(feq)
337 p2.From.Type = obj.TYPE_REG
338 p2.From.Reg = r0
339 p2.Reg = r0
340 p2.To.Type = obj.TYPE_REG
341 p2.To.Reg = riscv.REG_TMP
342
343 p3 := s.Prog(riscv.ABEQ)
344 p3.From.Type = obj.TYPE_REG
345 p3.From.Reg = riscv.REG_ZERO
346 p3.Reg = riscv.REG_TMP
347 p3.To.Type = obj.TYPE_BRANCH
348
349 p4 := s.Prog(feq)
350 p4.From.Type = obj.TYPE_REG
351 p4.From.Reg = r1
352 p4.Reg = r1
353 p4.To.Type = obj.TYPE_REG
354 p4.To.Reg = riscv.REG_TMP
355
356 p5 := s.Prog(riscv.ABEQ)
357 p5.From.Type = obj.TYPE_REG
358 p5.From.Reg = riscv.REG_ZERO
359 p5.Reg = riscv.REG_TMP
360 p5.To.Type = obj.TYPE_BRANCH
361
362 p6 := s.Prog(v.Op.Asm())
363 p6.From.Type = obj.TYPE_REG
364 p6.From.Reg = r1
365 p6.Reg = r0
366 p6.To.Type = obj.TYPE_REG
367 p6.To.Reg = out
368
369 nop := s.Prog(obj.ANOP)
370 p3.To.SetTarget(nop)
371 p5.To.SetTarget(nop)
372
373 case ssa.OpRISCV64LoweredMuluhilo:
374 r0 := v.Args[0].Reg()
375 r1 := v.Args[1].Reg()
376 p := s.Prog(riscv.AMULHU)
377 p.From.Type = obj.TYPE_REG
378 p.From.Reg = r1
379 p.Reg = r0
380 p.To.Type = obj.TYPE_REG
381 p.To.Reg = v.Reg0()
382 p1 := s.Prog(riscv.AMUL)
383 p1.From.Type = obj.TYPE_REG
384 p1.From.Reg = r1
385 p1.Reg = r0
386 p1.To.Type = obj.TYPE_REG
387 p1.To.Reg = v.Reg1()
388 case ssa.OpRISCV64LoweredMuluover:
389 r0 := v.Args[0].Reg()
390 r1 := v.Args[1].Reg()
391 p := s.Prog(riscv.AMULHU)
392 p.From.Type = obj.TYPE_REG
393 p.From.Reg = r1
394 p.Reg = r0
395 p.To.Type = obj.TYPE_REG
396 p.To.Reg = v.Reg1()
397 p1 := s.Prog(riscv.AMUL)
398 p1.From.Type = obj.TYPE_REG
399 p1.From.Reg = r1
400 p1.Reg = r0
401 p1.To.Type = obj.TYPE_REG
402 p1.To.Reg = v.Reg0()
403 p2 := s.Prog(riscv.ASNEZ)
404 p2.From.Type = obj.TYPE_REG
405 p2.From.Reg = v.Reg1()
406 p2.To.Type = obj.TYPE_REG
407 p2.To.Reg = v.Reg1()
408 case ssa.OpRISCV64FMADDD, ssa.OpRISCV64FMSUBD, ssa.OpRISCV64FNMADDD, ssa.OpRISCV64FNMSUBD,
409 ssa.OpRISCV64FMADDS, ssa.OpRISCV64FMSUBS, ssa.OpRISCV64FNMADDS, ssa.OpRISCV64FNMSUBS:
410 r := v.Reg()
411 r1 := v.Args[0].Reg()
412 r2 := v.Args[1].Reg()
413 r3 := v.Args[2].Reg()
414 p := s.Prog(v.Op.Asm())
415 p.From.Type = obj.TYPE_REG
416 p.From.Reg = r2
417 p.Reg = r1
418 p.AddRestSource(obj.Addr{Type: obj.TYPE_REG, Reg: r3})
419 p.To.Type = obj.TYPE_REG
420 p.To.Reg = r
421 case ssa.OpRISCV64FSQRTS, ssa.OpRISCV64FNEGS, ssa.OpRISCV64FABSD, ssa.OpRISCV64FSQRTD, ssa.OpRISCV64FNEGD,
422 ssa.OpRISCV64FMVSX, ssa.OpRISCV64FMVXS, ssa.OpRISCV64FMVDX, ssa.OpRISCV64FMVXD,
423 ssa.OpRISCV64FCVTSW, ssa.OpRISCV64FCVTSL, ssa.OpRISCV64FCVTWS, ssa.OpRISCV64FCVTLS,
424 ssa.OpRISCV64FCVTDW, ssa.OpRISCV64FCVTDL, ssa.OpRISCV64FCVTWD, ssa.OpRISCV64FCVTLD, ssa.OpRISCV64FCVTDS, ssa.OpRISCV64FCVTSD,
425 ssa.OpRISCV64FCLASSS, ssa.OpRISCV64FCLASSD,
426 ssa.OpRISCV64NOT, ssa.OpRISCV64NEG, ssa.OpRISCV64NEGW, ssa.OpRISCV64CLZ, ssa.OpRISCV64CLZW, ssa.OpRISCV64CTZ, ssa.OpRISCV64CTZW,
427 ssa.OpRISCV64REV8, ssa.OpRISCV64CPOP, ssa.OpRISCV64CPOPW:
428 p := s.Prog(v.Op.Asm())
429 p.From.Type = obj.TYPE_REG
430 p.From.Reg = v.Args[0].Reg()
431 p.To.Type = obj.TYPE_REG
432 p.To.Reg = v.Reg()
433 case ssa.OpRISCV64ADDI, ssa.OpRISCV64ADDIW, ssa.OpRISCV64XORI, ssa.OpRISCV64ORI, ssa.OpRISCV64ANDI,
434 ssa.OpRISCV64SLLI, ssa.OpRISCV64SLLIW, ssa.OpRISCV64SRAI, ssa.OpRISCV64SRAIW,
435 ssa.OpRISCV64SRLI, ssa.OpRISCV64SRLIW, ssa.OpRISCV64SLTI, ssa.OpRISCV64SLTIU,
436 ssa.OpRISCV64RORI, ssa.OpRISCV64RORIW:
437 p := s.Prog(v.Op.Asm())
438 p.From.Type = obj.TYPE_CONST
439 p.From.Offset = v.AuxInt
440 p.Reg = v.Args[0].Reg()
441 p.To.Type = obj.TYPE_REG
442 p.To.Reg = v.Reg()
443 case ssa.OpRISCV64MOVDconst:
444 p := s.Prog(v.Op.Asm())
445 p.From.Type = obj.TYPE_CONST
446 p.From.Offset = v.AuxInt
447 p.To.Type = obj.TYPE_REG
448 p.To.Reg = v.Reg()
449 case ssa.OpRISCV64FMOVDconst, ssa.OpRISCV64FMOVFconst:
450 p := s.Prog(v.Op.Asm())
451 p.From.Type = obj.TYPE_FCONST
452 p.From.Val = v.AuxFloat()
453 p.From.Name = obj.NAME_NONE
454 p.From.Reg = obj.REG_NONE
455 p.To.Type = obj.TYPE_REG
456 p.To.Reg = v.Reg()
457 case ssa.OpRISCV64MOVaddr:
458 p := s.Prog(v.Op.Asm())
459 p.From.Type = obj.TYPE_ADDR
460 p.To.Type = obj.TYPE_REG
461 p.To.Reg = v.Reg()
462
463 var wantreg string
464
465 switch v.Aux.(type) {
466 default:
467 v.Fatalf("aux is of unknown type %T", v.Aux)
468 case *obj.LSym:
469 wantreg = "SB"
470 ssagen.AddAux(&p.From, v)
471 case *ir.Name:
472 wantreg = "SP"
473 ssagen.AddAux(&p.From, v)
474 case nil:
475
476 wantreg = "SP"
477 p.From.Reg = riscv.REG_SP
478 p.From.Offset = v.AuxInt
479 }
480 if reg := v.Args[0].RegName(); reg != wantreg {
481 v.Fatalf("bad reg %s for symbol type %T, want %s", reg, v.Aux, wantreg)
482 }
483 case ssa.OpRISCV64MOVBload, ssa.OpRISCV64MOVHload, ssa.OpRISCV64MOVWload, ssa.OpRISCV64MOVDload,
484 ssa.OpRISCV64MOVBUload, ssa.OpRISCV64MOVHUload, ssa.OpRISCV64MOVWUload,
485 ssa.OpRISCV64FMOVWload, ssa.OpRISCV64FMOVDload:
486 p := s.Prog(v.Op.Asm())
487 p.From.Type = obj.TYPE_MEM
488 p.From.Reg = v.Args[0].Reg()
489 ssagen.AddAux(&p.From, v)
490 p.To.Type = obj.TYPE_REG
491 p.To.Reg = v.Reg()
492 case ssa.OpRISCV64MOVBstore, ssa.OpRISCV64MOVHstore, ssa.OpRISCV64MOVWstore, ssa.OpRISCV64MOVDstore,
493 ssa.OpRISCV64FMOVWstore, ssa.OpRISCV64FMOVDstore:
494 p := s.Prog(v.Op.Asm())
495 p.From.Type = obj.TYPE_REG
496 p.From.Reg = v.Args[1].Reg()
497 p.To.Type = obj.TYPE_MEM
498 p.To.Reg = v.Args[0].Reg()
499 ssagen.AddAux(&p.To, v)
500 case ssa.OpRISCV64MOVBstorezero, ssa.OpRISCV64MOVHstorezero, ssa.OpRISCV64MOVWstorezero, ssa.OpRISCV64MOVDstorezero:
501 p := s.Prog(v.Op.Asm())
502 p.From.Type = obj.TYPE_REG
503 p.From.Reg = riscv.REG_ZERO
504 p.To.Type = obj.TYPE_MEM
505 p.To.Reg = v.Args[0].Reg()
506 ssagen.AddAux(&p.To, v)
507 case ssa.OpRISCV64SEQZ, ssa.OpRISCV64SNEZ:
508 p := s.Prog(v.Op.Asm())
509 p.From.Type = obj.TYPE_REG
510 p.From.Reg = v.Args[0].Reg()
511 p.To.Type = obj.TYPE_REG
512 p.To.Reg = v.Reg()
513 case ssa.OpRISCV64CALLstatic, ssa.OpRISCV64CALLclosure, ssa.OpRISCV64CALLinter:
514 s.Call(v)
515 case ssa.OpRISCV64CALLtail:
516 s.TailCall(v)
517 case ssa.OpRISCV64LoweredWB:
518 p := s.Prog(obj.ACALL)
519 p.To.Type = obj.TYPE_MEM
520 p.To.Name = obj.NAME_EXTERN
521
522 p.To.Sym = ir.Syms.GCWriteBarrier[v.AuxInt-1]
523
524 case ssa.OpRISCV64LoweredPanicBoundsRR, ssa.OpRISCV64LoweredPanicBoundsRC, ssa.OpRISCV64LoweredPanicBoundsCR, ssa.OpRISCV64LoweredPanicBoundsCC:
525
526 code, signed := ssa.BoundsKind(v.AuxInt).Code()
527 xIsReg := false
528 yIsReg := false
529 xVal := 0
530 yVal := 0
531 switch v.Op {
532 case ssa.OpRISCV64LoweredPanicBoundsRR:
533 xIsReg = true
534 xVal = int(v.Args[0].Reg() - riscv.REG_X5)
535 yIsReg = true
536 yVal = int(v.Args[1].Reg() - riscv.REG_X5)
537 case ssa.OpRISCV64LoweredPanicBoundsRC:
538 xIsReg = true
539 xVal = int(v.Args[0].Reg() - riscv.REG_X5)
540 c := v.Aux.(ssa.PanicBoundsC).C
541 if c >= 0 && c <= abi.BoundsMaxConst {
542 yVal = int(c)
543 } else {
544
545 yIsReg = true
546 if yVal == xVal {
547 yVal = 1
548 }
549 p := s.Prog(riscv.AMOV)
550 p.From.Type = obj.TYPE_CONST
551 p.From.Offset = c
552 p.To.Type = obj.TYPE_REG
553 p.To.Reg = riscv.REG_X5 + int16(yVal)
554 }
555 case ssa.OpRISCV64LoweredPanicBoundsCR:
556 yIsReg = true
557 yVal = int(v.Args[0].Reg() - riscv.REG_X5)
558 c := v.Aux.(ssa.PanicBoundsC).C
559 if c >= 0 && c <= abi.BoundsMaxConst {
560 xVal = int(c)
561 } else {
562
563 if xVal == yVal {
564 xVal = 1
565 }
566 p := s.Prog(riscv.AMOV)
567 p.From.Type = obj.TYPE_CONST
568 p.From.Offset = c
569 p.To.Type = obj.TYPE_REG
570 p.To.Reg = riscv.REG_X5 + int16(xVal)
571 }
572 case ssa.OpRISCV64LoweredPanicBoundsCC:
573 c := v.Aux.(ssa.PanicBoundsCC).Cx
574 if c >= 0 && c <= abi.BoundsMaxConst {
575 xVal = int(c)
576 } else {
577
578 xIsReg = true
579 p := s.Prog(riscv.AMOV)
580 p.From.Type = obj.TYPE_CONST
581 p.From.Offset = c
582 p.To.Type = obj.TYPE_REG
583 p.To.Reg = riscv.REG_X5 + int16(xVal)
584 }
585 c = v.Aux.(ssa.PanicBoundsCC).Cy
586 if c >= 0 && c <= abi.BoundsMaxConst {
587 yVal = int(c)
588 } else {
589
590 yIsReg = true
591 yVal = 1
592 p := s.Prog(riscv.AMOV)
593 p.From.Type = obj.TYPE_CONST
594 p.From.Offset = c
595 p.To.Type = obj.TYPE_REG
596 p.To.Reg = riscv.REG_X5 + int16(yVal)
597 }
598 }
599 c := abi.BoundsEncode(code, signed, xIsReg, yIsReg, xVal, yVal)
600
601 p := s.Prog(obj.APCDATA)
602 p.From.SetConst(abi.PCDATA_PanicBounds)
603 p.To.SetConst(int64(c))
604 p = s.Prog(obj.ACALL)
605 p.To.Type = obj.TYPE_MEM
606 p.To.Name = obj.NAME_EXTERN
607 p.To.Sym = ir.Syms.PanicBounds
608
609 case ssa.OpRISCV64LoweredAtomicLoad8:
610 s.Prog(riscv.AFENCE)
611 p := s.Prog(riscv.AMOVBU)
612 p.From.Type = obj.TYPE_MEM
613 p.From.Reg = v.Args[0].Reg()
614 p.To.Type = obj.TYPE_REG
615 p.To.Reg = v.Reg0()
616 s.Prog(riscv.AFENCE)
617
618 case ssa.OpRISCV64LoweredAtomicLoad32, ssa.OpRISCV64LoweredAtomicLoad64:
619 as := riscv.ALRW
620 if v.Op == ssa.OpRISCV64LoweredAtomicLoad64 {
621 as = riscv.ALRD
622 }
623 p := s.Prog(as)
624 p.From.Type = obj.TYPE_MEM
625 p.From.Reg = v.Args[0].Reg()
626 p.To.Type = obj.TYPE_REG
627 p.To.Reg = v.Reg0()
628
629 case ssa.OpRISCV64LoweredAtomicStore8:
630 s.Prog(riscv.AFENCE)
631 p := s.Prog(riscv.AMOVB)
632 p.From.Type = obj.TYPE_REG
633 p.From.Reg = v.Args[1].Reg()
634 p.To.Type = obj.TYPE_MEM
635 p.To.Reg = v.Args[0].Reg()
636 s.Prog(riscv.AFENCE)
637
638 case ssa.OpRISCV64LoweredAtomicStore32, ssa.OpRISCV64LoweredAtomicStore64:
639 as := riscv.AAMOSWAPW
640 if v.Op == ssa.OpRISCV64LoweredAtomicStore64 {
641 as = riscv.AAMOSWAPD
642 }
643 p := s.Prog(as)
644 p.From.Type = obj.TYPE_REG
645 p.From.Reg = v.Args[1].Reg()
646 p.To.Type = obj.TYPE_MEM
647 p.To.Reg = v.Args[0].Reg()
648 p.RegTo2 = riscv.REG_ZERO
649
650 case ssa.OpRISCV64LoweredAtomicAdd32, ssa.OpRISCV64LoweredAtomicAdd64:
651 as := riscv.AAMOADDW
652 if v.Op == ssa.OpRISCV64LoweredAtomicAdd64 {
653 as = riscv.AAMOADDD
654 }
655 p := s.Prog(as)
656 p.From.Type = obj.TYPE_REG
657 p.From.Reg = v.Args[1].Reg()
658 p.To.Type = obj.TYPE_MEM
659 p.To.Reg = v.Args[0].Reg()
660 p.RegTo2 = riscv.REG_TMP
661
662 p2 := s.Prog(riscv.AADD)
663 p2.From.Type = obj.TYPE_REG
664 p2.From.Reg = riscv.REG_TMP
665 p2.Reg = v.Args[1].Reg()
666 p2.To.Type = obj.TYPE_REG
667 p2.To.Reg = v.Reg0()
668
669 case ssa.OpRISCV64LoweredAtomicExchange32, ssa.OpRISCV64LoweredAtomicExchange64:
670 as := riscv.AAMOSWAPW
671 if v.Op == ssa.OpRISCV64LoweredAtomicExchange64 {
672 as = riscv.AAMOSWAPD
673 }
674 p := s.Prog(as)
675 p.From.Type = obj.TYPE_REG
676 p.From.Reg = v.Args[1].Reg()
677 p.To.Type = obj.TYPE_MEM
678 p.To.Reg = v.Args[0].Reg()
679 p.RegTo2 = v.Reg0()
680
681 case ssa.OpRISCV64LoweredAtomicCas32, ssa.OpRISCV64LoweredAtomicCas64:
682
683
684
685
686
687
688
689 lr := riscv.ALRW
690 sc := riscv.ASCW
691 if v.Op == ssa.OpRISCV64LoweredAtomicCas64 {
692 lr = riscv.ALRD
693 sc = riscv.ASCD
694 }
695
696 r0 := v.Args[0].Reg()
697 r1 := v.Args[1].Reg()
698 r2 := v.Args[2].Reg()
699 out := v.Reg0()
700
701 p := s.Prog(riscv.AMOV)
702 p.From.Type = obj.TYPE_REG
703 p.From.Reg = riscv.REG_ZERO
704 p.To.Type = obj.TYPE_REG
705 p.To.Reg = out
706
707 p1 := s.Prog(lr)
708 p1.From.Type = obj.TYPE_MEM
709 p1.From.Reg = r0
710 p1.To.Type = obj.TYPE_REG
711 p1.To.Reg = riscv.REG_TMP
712
713 p2 := s.Prog(riscv.ABNE)
714 p2.From.Type = obj.TYPE_REG
715 p2.From.Reg = r1
716 p2.Reg = riscv.REG_TMP
717 p2.To.Type = obj.TYPE_BRANCH
718
719 p3 := s.Prog(sc)
720 p3.From.Type = obj.TYPE_REG
721 p3.From.Reg = r2
722 p3.To.Type = obj.TYPE_MEM
723 p3.To.Reg = r0
724 p3.RegTo2 = riscv.REG_TMP
725
726 p4 := s.Prog(riscv.ABNE)
727 p4.From.Type = obj.TYPE_REG
728 p4.From.Reg = riscv.REG_TMP
729 p4.Reg = riscv.REG_ZERO
730 p4.To.Type = obj.TYPE_BRANCH
731 p4.To.SetTarget(p1)
732
733 p5 := s.Prog(riscv.AMOV)
734 p5.From.Type = obj.TYPE_CONST
735 p5.From.Offset = 1
736 p5.To.Type = obj.TYPE_REG
737 p5.To.Reg = out
738
739 p6 := s.Prog(obj.ANOP)
740 p2.To.SetTarget(p6)
741
742 case ssa.OpRISCV64LoweredAtomicAnd32, ssa.OpRISCV64LoweredAtomicOr32:
743 p := s.Prog(v.Op.Asm())
744 p.From.Type = obj.TYPE_REG
745 p.From.Reg = v.Args[1].Reg()
746 p.To.Type = obj.TYPE_MEM
747 p.To.Reg = v.Args[0].Reg()
748 p.RegTo2 = riscv.REG_ZERO
749
750 case ssa.OpRISCV64LoweredZero:
751 ptr := v.Args[0].Reg()
752 sc := v.AuxValAndOff()
753 n := sc.Val64()
754
755 mov, sz := largestMove(sc.Off64())
756
757
758 var off int64
759 for n >= sz {
760 zeroOp(s, mov, ptr, off)
761 off += sz
762 n -= sz
763 }
764
765 for i := len(fracMovOps) - 1; i >= 0; i-- {
766 tsz := int64(1 << i)
767 if n < tsz {
768 continue
769 }
770 zeroOp(s, fracMovOps[i], ptr, off)
771 off += tsz
772 n -= tsz
773 }
774
775 case ssa.OpRISCV64LoweredZeroLoop:
776 ptr := v.Args[0].Reg()
777 sc := v.AuxValAndOff()
778 n := sc.Val64()
779 mov, sz := largestMove(sc.Off64())
780 chunk := 8 * sz
781
782 if n <= 3*chunk {
783 v.Fatalf("ZeroLoop too small:%d, expect:%d", n, 3*chunk)
784 }
785
786 tmp := v.RegTmp()
787
788 p := s.Prog(riscv.AADD)
789 p.From.Type = obj.TYPE_CONST
790 p.From.Offset = n - n%chunk
791 p.Reg = ptr
792 p.To.Type = obj.TYPE_REG
793 p.To.Reg = tmp
794
795 for i := int64(0); i < 8; i++ {
796 zeroOp(s, mov, ptr, sz*i)
797 }
798
799 p2 := s.Prog(riscv.AADD)
800 p2.From.Type = obj.TYPE_CONST
801 p2.From.Offset = chunk
802 p2.To.Type = obj.TYPE_REG
803 p2.To.Reg = ptr
804
805 p3 := s.Prog(riscv.ABNE)
806 p3.From.Reg = tmp
807 p3.From.Type = obj.TYPE_REG
808 p3.Reg = ptr
809 p3.To.Type = obj.TYPE_BRANCH
810 p3.To.SetTarget(p.Link)
811
812 n %= chunk
813
814
815 var off int64
816 for n >= sz {
817 zeroOp(s, mov, ptr, off)
818 off += sz
819 n -= sz
820 }
821
822 for i := len(fracMovOps) - 1; i >= 0; i-- {
823 tsz := int64(1 << i)
824 if n < tsz {
825 continue
826 }
827 zeroOp(s, fracMovOps[i], ptr, off)
828 off += tsz
829 n -= tsz
830 }
831
832 case ssa.OpRISCV64LoweredMove:
833 dst := v.Args[0].Reg()
834 src := v.Args[1].Reg()
835 if dst == src {
836 break
837 }
838
839 sa := v.AuxValAndOff()
840 n := sa.Val64()
841 mov, sz := largestMove(sa.Off64())
842
843 var off int64
844 tmp := int16(riscv.REG_X5)
845 for n >= sz {
846 moveOp(s, mov, dst, src, tmp, off)
847 off += sz
848 n -= sz
849 }
850
851 for i := len(fracMovOps) - 1; i >= 0; i-- {
852 tsz := int64(1 << i)
853 if n < tsz {
854 continue
855 }
856 moveOp(s, fracMovOps[i], dst, src, tmp, off)
857 off += tsz
858 n -= tsz
859 }
860
861 case ssa.OpRISCV64LoweredMoveLoop:
862 dst := v.Args[0].Reg()
863 src := v.Args[1].Reg()
864 if dst == src {
865 break
866 }
867
868 sc := v.AuxValAndOff()
869 n := sc.Val64()
870 mov, sz := largestMove(sc.Off64())
871 chunk := 8 * sz
872
873 if n <= 3*chunk {
874 v.Fatalf("MoveLoop too small:%d, expect:%d", n, 3*chunk)
875 }
876 tmp := int16(riscv.REG_X5)
877
878 p := s.Prog(riscv.AADD)
879 p.From.Type = obj.TYPE_CONST
880 p.From.Offset = n - n%chunk
881 p.Reg = src
882 p.To.Type = obj.TYPE_REG
883 p.To.Reg = riscv.REG_X6
884
885 for i := int64(0); i < 8; i++ {
886 moveOp(s, mov, dst, src, tmp, sz*i)
887 }
888
889 p1 := s.Prog(riscv.AADD)
890 p1.From.Type = obj.TYPE_CONST
891 p1.From.Offset = chunk
892 p1.To.Type = obj.TYPE_REG
893 p1.To.Reg = src
894
895 p2 := s.Prog(riscv.AADD)
896 p2.From.Type = obj.TYPE_CONST
897 p2.From.Offset = chunk
898 p2.To.Type = obj.TYPE_REG
899 p2.To.Reg = dst
900
901 p3 := s.Prog(riscv.ABNE)
902 p3.From.Reg = riscv.REG_X6
903 p3.From.Type = obj.TYPE_REG
904 p3.Reg = src
905 p3.To.Type = obj.TYPE_BRANCH
906 p3.To.SetTarget(p.Link)
907
908 n %= chunk
909
910 var off int64
911 for n >= sz {
912 moveOp(s, mov, dst, src, tmp, off)
913 off += sz
914 n -= sz
915 }
916
917 for i := len(fracMovOps) - 1; i >= 0; i-- {
918 tsz := int64(1 << i)
919 if n < tsz {
920 continue
921 }
922 moveOp(s, fracMovOps[i], dst, src, tmp, off)
923 off += tsz
924 n -= tsz
925 }
926
927 case ssa.OpRISCV64LoweredNilCheck:
928
929 p := s.Prog(riscv.AMOVB)
930 p.From.Type = obj.TYPE_MEM
931 p.From.Reg = v.Args[0].Reg()
932 ssagen.AddAux(&p.From, v)
933 p.To.Type = obj.TYPE_REG
934 p.To.Reg = riscv.REG_ZERO
935 if logopt.Enabled() {
936 logopt.LogOpt(v.Pos, "nilcheck", "genssa", v.Block.Func.Name)
937 }
938 if base.Debug.Nil != 0 && v.Pos.Line() > 1 {
939 base.WarnfAt(v.Pos, "generated nil check")
940 }
941
942 case ssa.OpRISCV64LoweredGetClosurePtr:
943
944 ssagen.CheckLoweredGetClosurePtr(v)
945
946 case ssa.OpRISCV64LoweredGetCallerSP:
947
948 p := s.Prog(riscv.AMOV)
949 p.From.Type = obj.TYPE_ADDR
950 p.From.Offset = -base.Ctxt.Arch.FixedFrameSize
951 p.From.Name = obj.NAME_PARAM
952 p.To.Type = obj.TYPE_REG
953 p.To.Reg = v.Reg()
954
955 case ssa.OpRISCV64LoweredGetCallerPC:
956 p := s.Prog(obj.AGETCALLERPC)
957 p.To.Type = obj.TYPE_REG
958 p.To.Reg = v.Reg()
959
960 case ssa.OpRISCV64LoweredPubBarrier:
961
962 s.Prog(v.Op.Asm())
963
964 case ssa.OpRISCV64LoweredRound32F, ssa.OpRISCV64LoweredRound64F:
965
966
967 case ssa.OpClobber, ssa.OpClobberReg:
968
969
970 default:
971 v.Fatalf("Unhandled op %v", v.Op)
972 }
973 }
974
975 var blockBranch = [...]obj.As{
976 ssa.BlockRISCV64BEQ: riscv.ABEQ,
977 ssa.BlockRISCV64BEQZ: riscv.ABEQZ,
978 ssa.BlockRISCV64BGE: riscv.ABGE,
979 ssa.BlockRISCV64BGEU: riscv.ABGEU,
980 ssa.BlockRISCV64BGEZ: riscv.ABGEZ,
981 ssa.BlockRISCV64BGTZ: riscv.ABGTZ,
982 ssa.BlockRISCV64BLEZ: riscv.ABLEZ,
983 ssa.BlockRISCV64BLT: riscv.ABLT,
984 ssa.BlockRISCV64BLTU: riscv.ABLTU,
985 ssa.BlockRISCV64BLTZ: riscv.ABLTZ,
986 ssa.BlockRISCV64BNE: riscv.ABNE,
987 ssa.BlockRISCV64BNEZ: riscv.ABNEZ,
988 }
989
990 func ssaGenBlock(s *ssagen.State, b, next *ssa.Block) {
991 s.SetPos(b.Pos)
992
993 switch b.Kind {
994 case ssa.BlockPlain, ssa.BlockDefer:
995 if b.Succs[0].Block() != next {
996 p := s.Prog(obj.AJMP)
997 p.To.Type = obj.TYPE_BRANCH
998 s.Branches = append(s.Branches, ssagen.Branch{P: p, B: b.Succs[0].Block()})
999 }
1000 case ssa.BlockExit, ssa.BlockRetJmp:
1001 case ssa.BlockRet:
1002 s.Prog(obj.ARET)
1003 case ssa.BlockRISCV64BEQ, ssa.BlockRISCV64BEQZ, ssa.BlockRISCV64BNE, ssa.BlockRISCV64BNEZ,
1004 ssa.BlockRISCV64BLT, ssa.BlockRISCV64BLEZ, ssa.BlockRISCV64BGE, ssa.BlockRISCV64BGEZ,
1005 ssa.BlockRISCV64BLTZ, ssa.BlockRISCV64BGTZ, ssa.BlockRISCV64BLTU, ssa.BlockRISCV64BGEU:
1006
1007 as := blockBranch[b.Kind]
1008 invAs := riscv.InvertBranch(as)
1009
1010 var p *obj.Prog
1011 switch next {
1012 case b.Succs[0].Block():
1013 p = s.Br(invAs, b.Succs[1].Block())
1014 case b.Succs[1].Block():
1015 p = s.Br(as, b.Succs[0].Block())
1016 default:
1017 if b.Likely != ssa.BranchUnlikely {
1018 p = s.Br(as, b.Succs[0].Block())
1019 s.Br(obj.AJMP, b.Succs[1].Block())
1020 } else {
1021 p = s.Br(invAs, b.Succs[1].Block())
1022 s.Br(obj.AJMP, b.Succs[0].Block())
1023 }
1024 }
1025
1026 p.From.Type = obj.TYPE_REG
1027 switch b.Kind {
1028 case ssa.BlockRISCV64BEQ, ssa.BlockRISCV64BNE, ssa.BlockRISCV64BLT, ssa.BlockRISCV64BGE, ssa.BlockRISCV64BLTU, ssa.BlockRISCV64BGEU:
1029 if b.NumControls() != 2 {
1030 b.Fatalf("Unexpected number of controls (%d != 2): %s", b.NumControls(), b.LongString())
1031 }
1032 p.From.Reg = b.Controls[0].Reg()
1033 p.Reg = b.Controls[1].Reg()
1034
1035 case ssa.BlockRISCV64BEQZ, ssa.BlockRISCV64BNEZ, ssa.BlockRISCV64BGEZ, ssa.BlockRISCV64BLEZ, ssa.BlockRISCV64BLTZ, ssa.BlockRISCV64BGTZ:
1036 if b.NumControls() != 1 {
1037 b.Fatalf("Unexpected number of controls (%d != 1): %s", b.NumControls(), b.LongString())
1038 }
1039 p.From.Reg = b.Controls[0].Reg()
1040 }
1041
1042 default:
1043 b.Fatalf("Unhandled block: %s", b.LongString())
1044 }
1045 }
1046
1047 func loadRegResult(s *ssagen.State, f *ssa.Func, t *types.Type, reg int16, n *ir.Name, off int64) *obj.Prog {
1048 p := s.Prog(loadByType(t))
1049 p.From.Type = obj.TYPE_MEM
1050 p.From.Name = obj.NAME_AUTO
1051 p.From.Sym = n.Linksym()
1052 p.From.Offset = n.FrameOffset() + off
1053 p.To.Type = obj.TYPE_REG
1054 p.To.Reg = reg
1055 return p
1056 }
1057
1058 func spillArgReg(pp *objw.Progs, p *obj.Prog, f *ssa.Func, t *types.Type, reg int16, n *ir.Name, off int64) *obj.Prog {
1059 p = pp.Append(p, storeByType(t), obj.TYPE_REG, reg, 0, obj.TYPE_MEM, 0, n.FrameOffset()+off)
1060 p.To.Name = obj.NAME_PARAM
1061 p.To.Sym = n.Linksym()
1062 p.Pos = p.Pos.WithNotStmt()
1063 return p
1064 }
1065
1066 func zeroOp(s *ssagen.State, mov obj.As, reg int16, off int64) {
1067 p := s.Prog(mov)
1068 p.From.Type = obj.TYPE_REG
1069 p.From.Reg = riscv.REG_ZERO
1070 p.To.Type = obj.TYPE_MEM
1071 p.To.Reg = reg
1072 p.To.Offset = off
1073 return
1074 }
1075
1076 func moveOp(s *ssagen.State, mov obj.As, dst int16, src int16, tmp int16, off int64) {
1077 p := s.Prog(mov)
1078 p.From.Type = obj.TYPE_MEM
1079 p.From.Reg = src
1080 p.From.Offset = off
1081 p.To.Type = obj.TYPE_REG
1082 p.To.Reg = tmp
1083
1084 p1 := s.Prog(mov)
1085 p1.From.Type = obj.TYPE_REG
1086 p1.From.Reg = tmp
1087 p1.To.Type = obj.TYPE_MEM
1088 p1.To.Reg = dst
1089 p1.To.Offset = off
1090
1091 return
1092 }
1093
View as plain text