1
2
3
4
5 package mips64
6
7 import (
8 "math"
9
10 "cmd/compile/internal/base"
11 "cmd/compile/internal/ir"
12 "cmd/compile/internal/logopt"
13 "cmd/compile/internal/ssa"
14 "cmd/compile/internal/ssagen"
15 "cmd/compile/internal/types"
16 "cmd/internal/obj"
17 "cmd/internal/obj/mips"
18 )
19
20
21 func isFPreg(r int16) bool {
22 return mips.REG_F0 <= r && r <= mips.REG_F31
23 }
24
25
26 func isHILO(r int16) bool {
27 return r == mips.REG_HI || r == mips.REG_LO
28 }
29
30
31 func loadByType(t *types.Type, r int16) obj.As {
32 if isFPreg(r) {
33 if t.Size() == 4 {
34 return mips.AMOVF
35 } else {
36 return mips.AMOVD
37 }
38 } else {
39 switch t.Size() {
40 case 1:
41 if t.IsSigned() {
42 return mips.AMOVB
43 } else {
44 return mips.AMOVBU
45 }
46 case 2:
47 if t.IsSigned() {
48 return mips.AMOVH
49 } else {
50 return mips.AMOVHU
51 }
52 case 4:
53 if t.IsSigned() {
54 return mips.AMOVW
55 } else {
56 return mips.AMOVWU
57 }
58 case 8:
59 return mips.AMOVV
60 }
61 }
62 panic("bad load type")
63 }
64
65
66 func storeByType(t *types.Type, r int16) obj.As {
67 if isFPreg(r) {
68 if t.Size() == 4 {
69 return mips.AMOVF
70 } else {
71 return mips.AMOVD
72 }
73 } else {
74 switch t.Size() {
75 case 1:
76 return mips.AMOVB
77 case 2:
78 return mips.AMOVH
79 case 4:
80 return mips.AMOVW
81 case 8:
82 return mips.AMOVV
83 }
84 }
85 panic("bad store type")
86 }
87
88 func ssaGenValue(s *ssagen.State, v *ssa.Value) {
89 switch v.Op {
90 case ssa.OpCopy, ssa.OpMIPS64MOVVreg:
91 if v.Type.IsMemory() {
92 return
93 }
94 x := v.Args[0].Reg()
95 y := v.Reg()
96 if x == y {
97 return
98 }
99 as := mips.AMOVV
100 if isFPreg(x) && isFPreg(y) {
101 as = mips.AMOVD
102 }
103 p := s.Prog(as)
104 p.From.Type = obj.TYPE_REG
105 p.From.Reg = x
106 p.To.Type = obj.TYPE_REG
107 p.To.Reg = y
108 if isHILO(x) && isHILO(y) || isHILO(x) && isFPreg(y) || isFPreg(x) && isHILO(y) {
109
110 p.To.Reg = mips.REGTMP
111 p = s.Prog(mips.AMOVV)
112 p.From.Type = obj.TYPE_REG
113 p.From.Reg = mips.REGTMP
114 p.To.Type = obj.TYPE_REG
115 p.To.Reg = y
116 }
117 case ssa.OpMIPS64MOVVnop:
118
119 case ssa.OpLoadReg:
120 if v.Type.IsFlags() {
121 v.Fatalf("load flags not implemented: %v", v.LongString())
122 return
123 }
124 r := v.Reg()
125 p := s.Prog(loadByType(v.Type, r))
126 ssagen.AddrAuto(&p.From, v.Args[0])
127 p.To.Type = obj.TYPE_REG
128 p.To.Reg = r
129 if isHILO(r) {
130
131 p.To.Reg = mips.REGTMP
132 p = s.Prog(mips.AMOVV)
133 p.From.Type = obj.TYPE_REG
134 p.From.Reg = mips.REGTMP
135 p.To.Type = obj.TYPE_REG
136 p.To.Reg = r
137 }
138 case ssa.OpStoreReg:
139 if v.Type.IsFlags() {
140 v.Fatalf("store flags not implemented: %v", v.LongString())
141 return
142 }
143 r := v.Args[0].Reg()
144 if isHILO(r) {
145
146 p := s.Prog(mips.AMOVV)
147 p.From.Type = obj.TYPE_REG
148 p.From.Reg = r
149 p.To.Type = obj.TYPE_REG
150 p.To.Reg = mips.REGTMP
151 r = mips.REGTMP
152 }
153 p := s.Prog(storeByType(v.Type, r))
154 p.From.Type = obj.TYPE_REG
155 p.From.Reg = r
156 ssagen.AddrAuto(&p.To, v)
157 case ssa.OpMIPS64ADDV,
158 ssa.OpMIPS64SUBV,
159 ssa.OpMIPS64AND,
160 ssa.OpMIPS64OR,
161 ssa.OpMIPS64XOR,
162 ssa.OpMIPS64NOR,
163 ssa.OpMIPS64SLLV,
164 ssa.OpMIPS64SRLV,
165 ssa.OpMIPS64SRAV,
166 ssa.OpMIPS64ADDF,
167 ssa.OpMIPS64ADDD,
168 ssa.OpMIPS64SUBF,
169 ssa.OpMIPS64SUBD,
170 ssa.OpMIPS64MULF,
171 ssa.OpMIPS64MULD,
172 ssa.OpMIPS64DIVF,
173 ssa.OpMIPS64DIVD:
174 p := s.Prog(v.Op.Asm())
175 p.From.Type = obj.TYPE_REG
176 p.From.Reg = v.Args[1].Reg()
177 p.Reg = v.Args[0].Reg()
178 p.To.Type = obj.TYPE_REG
179 p.To.Reg = v.Reg()
180 case ssa.OpMIPS64SGT,
181 ssa.OpMIPS64SGTU:
182 p := s.Prog(v.Op.Asm())
183 p.From.Type = obj.TYPE_REG
184 p.From.Reg = v.Args[0].Reg()
185 p.Reg = v.Args[1].Reg()
186 p.To.Type = obj.TYPE_REG
187 p.To.Reg = v.Reg()
188 case ssa.OpMIPS64ADDVconst,
189 ssa.OpMIPS64SUBVconst,
190 ssa.OpMIPS64ANDconst,
191 ssa.OpMIPS64ORconst,
192 ssa.OpMIPS64XORconst,
193 ssa.OpMIPS64NORconst,
194 ssa.OpMIPS64SLLVconst,
195 ssa.OpMIPS64SRLVconst,
196 ssa.OpMIPS64SRAVconst,
197 ssa.OpMIPS64SGTconst,
198 ssa.OpMIPS64SGTUconst:
199 p := s.Prog(v.Op.Asm())
200 p.From.Type = obj.TYPE_CONST
201 p.From.Offset = v.AuxInt
202 p.Reg = v.Args[0].Reg()
203 p.To.Type = obj.TYPE_REG
204 p.To.Reg = v.Reg()
205 case ssa.OpMIPS64MULV,
206 ssa.OpMIPS64MULVU,
207 ssa.OpMIPS64DIVV,
208 ssa.OpMIPS64DIVVU:
209
210 p := s.Prog(v.Op.Asm())
211 p.From.Type = obj.TYPE_REG
212 p.From.Reg = v.Args[1].Reg()
213 p.Reg = v.Args[0].Reg()
214 case ssa.OpMIPS64MOVVconst:
215 r := v.Reg()
216 p := s.Prog(v.Op.Asm())
217 p.From.Type = obj.TYPE_CONST
218 p.From.Offset = v.AuxInt
219 p.To.Type = obj.TYPE_REG
220 p.To.Reg = r
221 if isFPreg(r) || isHILO(r) {
222
223 p.To.Reg = mips.REGTMP
224 p = s.Prog(mips.AMOVV)
225 p.From.Type = obj.TYPE_REG
226 p.From.Reg = mips.REGTMP
227 p.To.Type = obj.TYPE_REG
228 p.To.Reg = r
229 }
230 case ssa.OpMIPS64MOVFconst,
231 ssa.OpMIPS64MOVDconst:
232 p := s.Prog(v.Op.Asm())
233 p.From.Type = obj.TYPE_FCONST
234 p.From.Val = math.Float64frombits(uint64(v.AuxInt))
235 p.To.Type = obj.TYPE_REG
236 p.To.Reg = v.Reg()
237 case ssa.OpMIPS64CMPEQF,
238 ssa.OpMIPS64CMPEQD,
239 ssa.OpMIPS64CMPGEF,
240 ssa.OpMIPS64CMPGED,
241 ssa.OpMIPS64CMPGTF,
242 ssa.OpMIPS64CMPGTD:
243 p := s.Prog(v.Op.Asm())
244 p.From.Type = obj.TYPE_REG
245 p.From.Reg = v.Args[0].Reg()
246 p.Reg = v.Args[1].Reg()
247 case ssa.OpMIPS64MOVVaddr:
248 p := s.Prog(mips.AMOVV)
249 p.From.Type = obj.TYPE_ADDR
250 p.From.Reg = v.Args[0].Reg()
251 var wantreg string
252
253
254
255
256
257 switch v.Aux.(type) {
258 default:
259 v.Fatalf("aux is of unknown type %T", v.Aux)
260 case *obj.LSym:
261 wantreg = "SB"
262 ssagen.AddAux(&p.From, v)
263 case *ir.Name:
264 wantreg = "SP"
265 ssagen.AddAux(&p.From, v)
266 case nil:
267
268 wantreg = "SP"
269 p.From.Offset = v.AuxInt
270 }
271 if reg := v.Args[0].RegName(); reg != wantreg {
272 v.Fatalf("bad reg %s for symbol type %T, want %s", reg, v.Aux, wantreg)
273 }
274 p.To.Type = obj.TYPE_REG
275 p.To.Reg = v.Reg()
276 case ssa.OpMIPS64MOVBload,
277 ssa.OpMIPS64MOVBUload,
278 ssa.OpMIPS64MOVHload,
279 ssa.OpMIPS64MOVHUload,
280 ssa.OpMIPS64MOVWload,
281 ssa.OpMIPS64MOVWUload,
282 ssa.OpMIPS64MOVVload,
283 ssa.OpMIPS64MOVFload,
284 ssa.OpMIPS64MOVDload:
285 p := s.Prog(v.Op.Asm())
286 p.From.Type = obj.TYPE_MEM
287 p.From.Reg = v.Args[0].Reg()
288 ssagen.AddAux(&p.From, v)
289 p.To.Type = obj.TYPE_REG
290 p.To.Reg = v.Reg()
291 case ssa.OpMIPS64MOVBstore,
292 ssa.OpMIPS64MOVHstore,
293 ssa.OpMIPS64MOVWstore,
294 ssa.OpMIPS64MOVVstore,
295 ssa.OpMIPS64MOVFstore,
296 ssa.OpMIPS64MOVDstore:
297 p := s.Prog(v.Op.Asm())
298 p.From.Type = obj.TYPE_REG
299 p.From.Reg = v.Args[1].Reg()
300 p.To.Type = obj.TYPE_MEM
301 p.To.Reg = v.Args[0].Reg()
302 ssagen.AddAux(&p.To, v)
303 case ssa.OpMIPS64MOVBstorezero,
304 ssa.OpMIPS64MOVHstorezero,
305 ssa.OpMIPS64MOVWstorezero,
306 ssa.OpMIPS64MOVVstorezero:
307 p := s.Prog(v.Op.Asm())
308 p.From.Type = obj.TYPE_REG
309 p.From.Reg = mips.REGZERO
310 p.To.Type = obj.TYPE_MEM
311 p.To.Reg = v.Args[0].Reg()
312 ssagen.AddAux(&p.To, v)
313 case ssa.OpMIPS64MOVBreg,
314 ssa.OpMIPS64MOVBUreg,
315 ssa.OpMIPS64MOVHreg,
316 ssa.OpMIPS64MOVHUreg,
317 ssa.OpMIPS64MOVWreg,
318 ssa.OpMIPS64MOVWUreg:
319 a := v.Args[0]
320 for a.Op == ssa.OpCopy || a.Op == ssa.OpMIPS64MOVVreg {
321 a = a.Args[0]
322 }
323 if a.Op == ssa.OpLoadReg && mips.REG_R0 <= a.Reg() && a.Reg() <= mips.REG_R31 {
324
325
326
327 t := a.Type
328 switch {
329 case v.Op == ssa.OpMIPS64MOVBreg && t.Size() == 1 && t.IsSigned(),
330 v.Op == ssa.OpMIPS64MOVBUreg && t.Size() == 1 && !t.IsSigned(),
331 v.Op == ssa.OpMIPS64MOVHreg && t.Size() == 2 && t.IsSigned(),
332 v.Op == ssa.OpMIPS64MOVHUreg && t.Size() == 2 && !t.IsSigned(),
333 v.Op == ssa.OpMIPS64MOVWreg && t.Size() == 4 && t.IsSigned(),
334 v.Op == ssa.OpMIPS64MOVWUreg && t.Size() == 4 && !t.IsSigned():
335
336 if v.Reg() == v.Args[0].Reg() {
337 return
338 }
339 p := s.Prog(mips.AMOVV)
340 p.From.Type = obj.TYPE_REG
341 p.From.Reg = v.Args[0].Reg()
342 p.To.Type = obj.TYPE_REG
343 p.To.Reg = v.Reg()
344 return
345 default:
346 }
347 }
348 fallthrough
349 case ssa.OpMIPS64MOVWF,
350 ssa.OpMIPS64MOVWD,
351 ssa.OpMIPS64TRUNCFW,
352 ssa.OpMIPS64TRUNCDW,
353 ssa.OpMIPS64MOVVF,
354 ssa.OpMIPS64MOVVD,
355 ssa.OpMIPS64TRUNCFV,
356 ssa.OpMIPS64TRUNCDV,
357 ssa.OpMIPS64MOVFD,
358 ssa.OpMIPS64MOVDF,
359 ssa.OpMIPS64MOVWfpgp,
360 ssa.OpMIPS64MOVWgpfp,
361 ssa.OpMIPS64MOVVfpgp,
362 ssa.OpMIPS64MOVVgpfp,
363 ssa.OpMIPS64NEGF,
364 ssa.OpMIPS64NEGD,
365 ssa.OpMIPS64ABSD,
366 ssa.OpMIPS64SQRTF,
367 ssa.OpMIPS64SQRTD:
368 p := s.Prog(v.Op.Asm())
369 p.From.Type = obj.TYPE_REG
370 p.From.Reg = v.Args[0].Reg()
371 p.To.Type = obj.TYPE_REG
372 p.To.Reg = v.Reg()
373 case ssa.OpMIPS64NEGV:
374
375 p := s.Prog(mips.ASUBVU)
376 p.From.Type = obj.TYPE_REG
377 p.From.Reg = v.Args[0].Reg()
378 p.Reg = mips.REGZERO
379 p.To.Type = obj.TYPE_REG
380 p.To.Reg = v.Reg()
381 case ssa.OpMIPS64DUFFZERO:
382
383 p := s.Prog(mips.ASUBVU)
384 p.From.Type = obj.TYPE_CONST
385 p.From.Offset = 8
386 p.Reg = v.Args[0].Reg()
387 p.To.Type = obj.TYPE_REG
388 p.To.Reg = mips.REG_R1
389 p = s.Prog(obj.ADUFFZERO)
390 p.To.Type = obj.TYPE_MEM
391 p.To.Name = obj.NAME_EXTERN
392 p.To.Sym = ir.Syms.Duffzero
393 p.To.Offset = v.AuxInt
394 case ssa.OpMIPS64LoweredZero:
395
396
397
398
399
400 var sz int64
401 var mov obj.As
402 switch {
403 case v.AuxInt%8 == 0:
404 sz = 8
405 mov = mips.AMOVV
406 case v.AuxInt%4 == 0:
407 sz = 4
408 mov = mips.AMOVW
409 case v.AuxInt%2 == 0:
410 sz = 2
411 mov = mips.AMOVH
412 default:
413 sz = 1
414 mov = mips.AMOVB
415 }
416 p := s.Prog(mips.ASUBVU)
417 p.From.Type = obj.TYPE_CONST
418 p.From.Offset = sz
419 p.To.Type = obj.TYPE_REG
420 p.To.Reg = mips.REG_R1
421 p2 := s.Prog(mov)
422 p2.From.Type = obj.TYPE_REG
423 p2.From.Reg = mips.REGZERO
424 p2.To.Type = obj.TYPE_MEM
425 p2.To.Reg = mips.REG_R1
426 p2.To.Offset = sz
427 p3 := s.Prog(mips.AADDVU)
428 p3.From.Type = obj.TYPE_CONST
429 p3.From.Offset = sz
430 p3.To.Type = obj.TYPE_REG
431 p3.To.Reg = mips.REG_R1
432 p4 := s.Prog(mips.ABNE)
433 p4.From.Type = obj.TYPE_REG
434 p4.From.Reg = v.Args[1].Reg()
435 p4.Reg = mips.REG_R1
436 p4.To.Type = obj.TYPE_BRANCH
437 p4.To.SetTarget(p2)
438 case ssa.OpMIPS64DUFFCOPY:
439 p := s.Prog(obj.ADUFFCOPY)
440 p.To.Type = obj.TYPE_MEM
441 p.To.Name = obj.NAME_EXTERN
442 p.To.Sym = ir.Syms.Duffcopy
443 p.To.Offset = v.AuxInt
444 case ssa.OpMIPS64LoweredMove:
445
446
447
448
449
450
451
452 var sz int64
453 var mov obj.As
454 switch {
455 case v.AuxInt%8 == 0:
456 sz = 8
457 mov = mips.AMOVV
458 case v.AuxInt%4 == 0:
459 sz = 4
460 mov = mips.AMOVW
461 case v.AuxInt%2 == 0:
462 sz = 2
463 mov = mips.AMOVH
464 default:
465 sz = 1
466 mov = mips.AMOVB
467 }
468 p := s.Prog(mips.ASUBVU)
469 p.From.Type = obj.TYPE_CONST
470 p.From.Offset = sz
471 p.To.Type = obj.TYPE_REG
472 p.To.Reg = mips.REG_R1
473 p2 := s.Prog(mov)
474 p2.From.Type = obj.TYPE_MEM
475 p2.From.Reg = mips.REG_R1
476 p2.From.Offset = sz
477 p2.To.Type = obj.TYPE_REG
478 p2.To.Reg = mips.REGTMP
479 p3 := s.Prog(mov)
480 p3.From.Type = obj.TYPE_REG
481 p3.From.Reg = mips.REGTMP
482 p3.To.Type = obj.TYPE_MEM
483 p3.To.Reg = mips.REG_R2
484 p4 := s.Prog(mips.AADDVU)
485 p4.From.Type = obj.TYPE_CONST
486 p4.From.Offset = sz
487 p4.To.Type = obj.TYPE_REG
488 p4.To.Reg = mips.REG_R1
489 p5 := s.Prog(mips.AADDVU)
490 p5.From.Type = obj.TYPE_CONST
491 p5.From.Offset = sz
492 p5.To.Type = obj.TYPE_REG
493 p5.To.Reg = mips.REG_R2
494 p6 := s.Prog(mips.ABNE)
495 p6.From.Type = obj.TYPE_REG
496 p6.From.Reg = v.Args[2].Reg()
497 p6.Reg = mips.REG_R1
498 p6.To.Type = obj.TYPE_BRANCH
499 p6.To.SetTarget(p2)
500 case ssa.OpMIPS64CALLstatic, ssa.OpMIPS64CALLclosure, ssa.OpMIPS64CALLinter:
501 s.Call(v)
502 case ssa.OpMIPS64CALLtail:
503 s.TailCall(v)
504 case ssa.OpMIPS64LoweredWB:
505 p := s.Prog(obj.ACALL)
506 p.To.Type = obj.TYPE_MEM
507 p.To.Name = obj.NAME_EXTERN
508
509 p.To.Sym = ir.Syms.GCWriteBarrier[v.AuxInt-1]
510 case ssa.OpMIPS64LoweredPanicBoundsA, ssa.OpMIPS64LoweredPanicBoundsB, ssa.OpMIPS64LoweredPanicBoundsC:
511 p := s.Prog(obj.ACALL)
512 p.To.Type = obj.TYPE_MEM
513 p.To.Name = obj.NAME_EXTERN
514 p.To.Sym = ssagen.BoundsCheckFunc[v.AuxInt]
515 s.UseArgs(16)
516 case ssa.OpMIPS64LoweredAtomicLoad8, ssa.OpMIPS64LoweredAtomicLoad32, ssa.OpMIPS64LoweredAtomicLoad64:
517 as := mips.AMOVV
518 switch v.Op {
519 case ssa.OpMIPS64LoweredAtomicLoad8:
520 as = mips.AMOVB
521 case ssa.OpMIPS64LoweredAtomicLoad32:
522 as = mips.AMOVW
523 }
524 s.Prog(mips.ASYNC)
525 p := s.Prog(as)
526 p.From.Type = obj.TYPE_MEM
527 p.From.Reg = v.Args[0].Reg()
528 p.To.Type = obj.TYPE_REG
529 p.To.Reg = v.Reg0()
530 s.Prog(mips.ASYNC)
531 case ssa.OpMIPS64LoweredAtomicStore8, ssa.OpMIPS64LoweredAtomicStore32, ssa.OpMIPS64LoweredAtomicStore64:
532 as := mips.AMOVV
533 switch v.Op {
534 case ssa.OpMIPS64LoweredAtomicStore8:
535 as = mips.AMOVB
536 case ssa.OpMIPS64LoweredAtomicStore32:
537 as = mips.AMOVW
538 }
539 s.Prog(mips.ASYNC)
540 p := s.Prog(as)
541 p.From.Type = obj.TYPE_REG
542 p.From.Reg = v.Args[1].Reg()
543 p.To.Type = obj.TYPE_MEM
544 p.To.Reg = v.Args[0].Reg()
545 s.Prog(mips.ASYNC)
546 case ssa.OpMIPS64LoweredAtomicStorezero32, ssa.OpMIPS64LoweredAtomicStorezero64:
547 as := mips.AMOVV
548 if v.Op == ssa.OpMIPS64LoweredAtomicStorezero32 {
549 as = mips.AMOVW
550 }
551 s.Prog(mips.ASYNC)
552 p := s.Prog(as)
553 p.From.Type = obj.TYPE_REG
554 p.From.Reg = mips.REGZERO
555 p.To.Type = obj.TYPE_MEM
556 p.To.Reg = v.Args[0].Reg()
557 s.Prog(mips.ASYNC)
558 case ssa.OpMIPS64LoweredAtomicExchange32, ssa.OpMIPS64LoweredAtomicExchange64:
559
560
561
562
563
564
565 ll := mips.ALLV
566 sc := mips.ASCV
567 if v.Op == ssa.OpMIPS64LoweredAtomicExchange32 {
568 ll = mips.ALL
569 sc = mips.ASC
570 }
571 s.Prog(mips.ASYNC)
572 p := s.Prog(mips.AMOVV)
573 p.From.Type = obj.TYPE_REG
574 p.From.Reg = v.Args[1].Reg()
575 p.To.Type = obj.TYPE_REG
576 p.To.Reg = mips.REGTMP
577 p1 := s.Prog(ll)
578 p1.From.Type = obj.TYPE_MEM
579 p1.From.Reg = v.Args[0].Reg()
580 p1.To.Type = obj.TYPE_REG
581 p1.To.Reg = v.Reg0()
582 p2 := s.Prog(sc)
583 p2.From.Type = obj.TYPE_REG
584 p2.From.Reg = mips.REGTMP
585 p2.To.Type = obj.TYPE_MEM
586 p2.To.Reg = v.Args[0].Reg()
587 p3 := s.Prog(mips.ABEQ)
588 p3.From.Type = obj.TYPE_REG
589 p3.From.Reg = mips.REGTMP
590 p3.To.Type = obj.TYPE_BRANCH
591 p3.To.SetTarget(p)
592 s.Prog(mips.ASYNC)
593 case ssa.OpMIPS64LoweredAtomicAdd32, ssa.OpMIPS64LoweredAtomicAdd64:
594
595
596
597
598
599
600
601 ll := mips.ALLV
602 sc := mips.ASCV
603 if v.Op == ssa.OpMIPS64LoweredAtomicAdd32 {
604 ll = mips.ALL
605 sc = mips.ASC
606 }
607 s.Prog(mips.ASYNC)
608 p := s.Prog(ll)
609 p.From.Type = obj.TYPE_MEM
610 p.From.Reg = v.Args[0].Reg()
611 p.To.Type = obj.TYPE_REG
612 p.To.Reg = v.Reg0()
613 p1 := s.Prog(mips.AADDVU)
614 p1.From.Type = obj.TYPE_REG
615 p1.From.Reg = v.Args[1].Reg()
616 p1.Reg = v.Reg0()
617 p1.To.Type = obj.TYPE_REG
618 p1.To.Reg = mips.REGTMP
619 p2 := s.Prog(sc)
620 p2.From.Type = obj.TYPE_REG
621 p2.From.Reg = mips.REGTMP
622 p2.To.Type = obj.TYPE_MEM
623 p2.To.Reg = v.Args[0].Reg()
624 p3 := s.Prog(mips.ABEQ)
625 p3.From.Type = obj.TYPE_REG
626 p3.From.Reg = mips.REGTMP
627 p3.To.Type = obj.TYPE_BRANCH
628 p3.To.SetTarget(p)
629 s.Prog(mips.ASYNC)
630 p4 := s.Prog(mips.AADDVU)
631 p4.From.Type = obj.TYPE_REG
632 p4.From.Reg = v.Args[1].Reg()
633 p4.Reg = v.Reg0()
634 p4.To.Type = obj.TYPE_REG
635 p4.To.Reg = v.Reg0()
636 case ssa.OpMIPS64LoweredAtomicAddconst32, ssa.OpMIPS64LoweredAtomicAddconst64:
637
638
639
640
641
642
643
644 ll := mips.ALLV
645 sc := mips.ASCV
646 if v.Op == ssa.OpMIPS64LoweredAtomicAddconst32 {
647 ll = mips.ALL
648 sc = mips.ASC
649 }
650 s.Prog(mips.ASYNC)
651 p := s.Prog(ll)
652 p.From.Type = obj.TYPE_MEM
653 p.From.Reg = v.Args[0].Reg()
654 p.To.Type = obj.TYPE_REG
655 p.To.Reg = v.Reg0()
656 p1 := s.Prog(mips.AADDVU)
657 p1.From.Type = obj.TYPE_CONST
658 p1.From.Offset = v.AuxInt
659 p1.Reg = v.Reg0()
660 p1.To.Type = obj.TYPE_REG
661 p1.To.Reg = mips.REGTMP
662 p2 := s.Prog(sc)
663 p2.From.Type = obj.TYPE_REG
664 p2.From.Reg = mips.REGTMP
665 p2.To.Type = obj.TYPE_MEM
666 p2.To.Reg = v.Args[0].Reg()
667 p3 := s.Prog(mips.ABEQ)
668 p3.From.Type = obj.TYPE_REG
669 p3.From.Reg = mips.REGTMP
670 p3.To.Type = obj.TYPE_BRANCH
671 p3.To.SetTarget(p)
672 s.Prog(mips.ASYNC)
673 p4 := s.Prog(mips.AADDVU)
674 p4.From.Type = obj.TYPE_CONST
675 p4.From.Offset = v.AuxInt
676 p4.Reg = v.Reg0()
677 p4.To.Type = obj.TYPE_REG
678 p4.To.Reg = v.Reg0()
679 case ssa.OpMIPS64LoweredAtomicAnd32,
680 ssa.OpMIPS64LoweredAtomicOr32:
681
682
683
684
685
686
687 s.Prog(mips.ASYNC)
688
689 p := s.Prog(mips.ALL)
690 p.From.Type = obj.TYPE_MEM
691 p.From.Reg = v.Args[0].Reg()
692 p.To.Type = obj.TYPE_REG
693 p.To.Reg = mips.REGTMP
694
695 p1 := s.Prog(v.Op.Asm())
696 p1.From.Type = obj.TYPE_REG
697 p1.From.Reg = v.Args[1].Reg()
698 p1.Reg = mips.REGTMP
699 p1.To.Type = obj.TYPE_REG
700 p1.To.Reg = mips.REGTMP
701
702 p2 := s.Prog(mips.ASC)
703 p2.From.Type = obj.TYPE_REG
704 p2.From.Reg = mips.REGTMP
705 p2.To.Type = obj.TYPE_MEM
706 p2.To.Reg = v.Args[0].Reg()
707
708 p3 := s.Prog(mips.ABEQ)
709 p3.From.Type = obj.TYPE_REG
710 p3.From.Reg = mips.REGTMP
711 p3.To.Type = obj.TYPE_BRANCH
712 p3.To.SetTarget(p)
713
714 s.Prog(mips.ASYNC)
715
716 case ssa.OpMIPS64LoweredAtomicCas32, ssa.OpMIPS64LoweredAtomicCas64:
717
718
719
720
721
722
723
724
725 ll := mips.ALLV
726 sc := mips.ASCV
727 if v.Op == ssa.OpMIPS64LoweredAtomicCas32 {
728 ll = mips.ALL
729 sc = mips.ASC
730 }
731 p := s.Prog(mips.AMOVV)
732 p.From.Type = obj.TYPE_REG
733 p.From.Reg = mips.REGZERO
734 p.To.Type = obj.TYPE_REG
735 p.To.Reg = v.Reg0()
736 s.Prog(mips.ASYNC)
737 p1 := s.Prog(ll)
738 p1.From.Type = obj.TYPE_MEM
739 p1.From.Reg = v.Args[0].Reg()
740 p1.To.Type = obj.TYPE_REG
741 p1.To.Reg = mips.REGTMP
742 p2 := s.Prog(mips.ABNE)
743 p2.From.Type = obj.TYPE_REG
744 p2.From.Reg = v.Args[1].Reg()
745 p2.Reg = mips.REGTMP
746 p2.To.Type = obj.TYPE_BRANCH
747 p3 := s.Prog(mips.AMOVV)
748 p3.From.Type = obj.TYPE_REG
749 p3.From.Reg = v.Args[2].Reg()
750 p3.To.Type = obj.TYPE_REG
751 p3.To.Reg = v.Reg0()
752 p4 := s.Prog(sc)
753 p4.From.Type = obj.TYPE_REG
754 p4.From.Reg = v.Reg0()
755 p4.To.Type = obj.TYPE_MEM
756 p4.To.Reg = v.Args[0].Reg()
757 p5 := s.Prog(mips.ABEQ)
758 p5.From.Type = obj.TYPE_REG
759 p5.From.Reg = v.Reg0()
760 p5.To.Type = obj.TYPE_BRANCH
761 p5.To.SetTarget(p1)
762 p6 := s.Prog(mips.ASYNC)
763 p2.To.SetTarget(p6)
764 case ssa.OpMIPS64LoweredNilCheck:
765
766 p := s.Prog(mips.AMOVB)
767 p.From.Type = obj.TYPE_MEM
768 p.From.Reg = v.Args[0].Reg()
769 ssagen.AddAux(&p.From, v)
770 p.To.Type = obj.TYPE_REG
771 p.To.Reg = mips.REGTMP
772 if logopt.Enabled() {
773 logopt.LogOpt(v.Pos, "nilcheck", "genssa", v.Block.Func.Name)
774 }
775 if base.Debug.Nil != 0 && v.Pos.Line() > 1 {
776 base.WarnfAt(v.Pos, "generated nil check")
777 }
778 case ssa.OpMIPS64FPFlagTrue,
779 ssa.OpMIPS64FPFlagFalse:
780
781
782
783 branch := mips.ABFPF
784 if v.Op == ssa.OpMIPS64FPFlagFalse {
785 branch = mips.ABFPT
786 }
787 p := s.Prog(mips.AMOVV)
788 p.From.Type = obj.TYPE_REG
789 p.From.Reg = mips.REGZERO
790 p.To.Type = obj.TYPE_REG
791 p.To.Reg = v.Reg()
792 p2 := s.Prog(branch)
793 p2.To.Type = obj.TYPE_BRANCH
794 p3 := s.Prog(mips.AMOVV)
795 p3.From.Type = obj.TYPE_CONST
796 p3.From.Offset = 1
797 p3.To.Type = obj.TYPE_REG
798 p3.To.Reg = v.Reg()
799 p4 := s.Prog(obj.ANOP)
800 p2.To.SetTarget(p4)
801 case ssa.OpMIPS64LoweredGetClosurePtr:
802
803 ssagen.CheckLoweredGetClosurePtr(v)
804 case ssa.OpMIPS64LoweredGetCallerSP:
805
806 p := s.Prog(mips.AMOVV)
807 p.From.Type = obj.TYPE_ADDR
808 p.From.Offset = -base.Ctxt.Arch.FixedFrameSize
809 p.From.Name = obj.NAME_PARAM
810 p.To.Type = obj.TYPE_REG
811 p.To.Reg = v.Reg()
812 case ssa.OpMIPS64LoweredGetCallerPC:
813 p := s.Prog(obj.AGETCALLERPC)
814 p.To.Type = obj.TYPE_REG
815 p.To.Reg = v.Reg()
816 case ssa.OpClobber, ssa.OpClobberReg:
817
818 default:
819 v.Fatalf("genValue not implemented: %s", v.LongString())
820 }
821 }
822
823 var blockJump = map[ssa.BlockKind]struct {
824 asm, invasm obj.As
825 }{
826 ssa.BlockMIPS64EQ: {mips.ABEQ, mips.ABNE},
827 ssa.BlockMIPS64NE: {mips.ABNE, mips.ABEQ},
828 ssa.BlockMIPS64LTZ: {mips.ABLTZ, mips.ABGEZ},
829 ssa.BlockMIPS64GEZ: {mips.ABGEZ, mips.ABLTZ},
830 ssa.BlockMIPS64LEZ: {mips.ABLEZ, mips.ABGTZ},
831 ssa.BlockMIPS64GTZ: {mips.ABGTZ, mips.ABLEZ},
832 ssa.BlockMIPS64FPT: {mips.ABFPT, mips.ABFPF},
833 ssa.BlockMIPS64FPF: {mips.ABFPF, mips.ABFPT},
834 }
835
836 func ssaGenBlock(s *ssagen.State, b, next *ssa.Block) {
837 switch b.Kind {
838 case ssa.BlockPlain:
839 if b.Succs[0].Block() != next {
840 p := s.Prog(obj.AJMP)
841 p.To.Type = obj.TYPE_BRANCH
842 s.Branches = append(s.Branches, ssagen.Branch{P: p, B: b.Succs[0].Block()})
843 }
844 case ssa.BlockDefer:
845
846
847
848 p := s.Prog(mips.ABNE)
849 p.From.Type = obj.TYPE_REG
850 p.From.Reg = mips.REGZERO
851 p.Reg = mips.REG_R1
852 p.To.Type = obj.TYPE_BRANCH
853 s.Branches = append(s.Branches, ssagen.Branch{P: p, B: b.Succs[1].Block()})
854 if b.Succs[0].Block() != next {
855 p := s.Prog(obj.AJMP)
856 p.To.Type = obj.TYPE_BRANCH
857 s.Branches = append(s.Branches, ssagen.Branch{P: p, B: b.Succs[0].Block()})
858 }
859 case ssa.BlockExit, ssa.BlockRetJmp:
860 case ssa.BlockRet:
861 s.Prog(obj.ARET)
862 case ssa.BlockMIPS64EQ, ssa.BlockMIPS64NE,
863 ssa.BlockMIPS64LTZ, ssa.BlockMIPS64GEZ,
864 ssa.BlockMIPS64LEZ, ssa.BlockMIPS64GTZ,
865 ssa.BlockMIPS64FPT, ssa.BlockMIPS64FPF:
866 jmp := blockJump[b.Kind]
867 var p *obj.Prog
868 switch next {
869 case b.Succs[0].Block():
870 p = s.Br(jmp.invasm, b.Succs[1].Block())
871 case b.Succs[1].Block():
872 p = s.Br(jmp.asm, b.Succs[0].Block())
873 default:
874 if b.Likely != ssa.BranchUnlikely {
875 p = s.Br(jmp.asm, b.Succs[0].Block())
876 s.Br(obj.AJMP, b.Succs[1].Block())
877 } else {
878 p = s.Br(jmp.invasm, b.Succs[1].Block())
879 s.Br(obj.AJMP, b.Succs[0].Block())
880 }
881 }
882 if !b.Controls[0].Type.IsFlags() {
883 p.From.Type = obj.TYPE_REG
884 p.From.Reg = b.Controls[0].Reg()
885 }
886 default:
887 b.Fatalf("branch not implemented: %s", b.LongString())
888 }
889 }
890
View as plain text