Text file
src/runtime/preempt_amd64.s
1 // Code generated by mkpreempt.go; DO NOT EDIT.
2
3 #include "go_asm.h"
4 #include "go_tls.h"
5 #include "asm_amd64.h"
6 #include "textflag.h"
7
8 TEXT ·asyncPreempt(SB),NOSPLIT|NOFRAME,$0-0
9 PUSHQ BP
10 MOVQ SP, BP
11 // Save flags before clobbering them
12 PUSHFQ
13 // obj doesn't understand ADD/SUB on SP, but does understand ADJSP
14 ADJSP $112
15 // But vet doesn't know ADJSP, so suppress vet stack checking
16 NOP SP
17 // Save GPs
18 MOVQ AX, 0(SP)
19 MOVQ CX, 8(SP)
20 MOVQ DX, 16(SP)
21 MOVQ BX, 24(SP)
22 MOVQ SI, 32(SP)
23 MOVQ DI, 40(SP)
24 MOVQ R8, 48(SP)
25 MOVQ R9, 56(SP)
26 MOVQ R10, 64(SP)
27 MOVQ R11, 72(SP)
28 MOVQ R12, 80(SP)
29 MOVQ R13, 88(SP)
30 MOVQ R14, 96(SP)
31 MOVQ R15, 104(SP)
32 // Save extended register state to p.xRegs.scratch
33 // Don't make assumptions about ABI register state. See mkpreempt.go
34 get_tls(CX)
35 MOVQ g(CX), R14
36 MOVQ g_m(R14), AX
37 MOVQ m_p(AX), AX
38 LEAQ (p_xRegs+xRegPerP_scratch)(AX), AX
39 #ifdef GOEXPERIMENT_simd
40 CMPB internal∕cpu·X86+const_offsetX86HasAVX512(SB), $1
41 JE saveAVX512
42 CMPB internal∕cpu·X86+const_offsetX86HasAVX2(SB), $1
43 JE saveAVX2
44 #endif
45 saveSSE:
46 MOVUPS X0, 0(AX)
47 MOVUPS X1, 64(AX)
48 MOVUPS X2, 128(AX)
49 MOVUPS X3, 192(AX)
50 MOVUPS X4, 256(AX)
51 MOVUPS X5, 320(AX)
52 MOVUPS X6, 384(AX)
53 MOVUPS X7, 448(AX)
54 MOVUPS X8, 512(AX)
55 MOVUPS X9, 576(AX)
56 MOVUPS X10, 640(AX)
57 MOVUPS X11, 704(AX)
58 MOVUPS X12, 768(AX)
59 MOVUPS X13, 832(AX)
60 MOVUPS X14, 896(AX)
61 MOVUPS X15, 960(AX)
62 JMP preempt
63 saveAVX2:
64 VMOVDQU Y0, 0(AX)
65 VMOVDQU Y1, 64(AX)
66 VMOVDQU Y2, 128(AX)
67 VMOVDQU Y3, 192(AX)
68 VMOVDQU Y4, 256(AX)
69 VMOVDQU Y5, 320(AX)
70 VMOVDQU Y6, 384(AX)
71 VMOVDQU Y7, 448(AX)
72 VMOVDQU Y8, 512(AX)
73 VMOVDQU Y9, 576(AX)
74 VMOVDQU Y10, 640(AX)
75 VMOVDQU Y11, 704(AX)
76 VMOVDQU Y12, 768(AX)
77 VMOVDQU Y13, 832(AX)
78 VMOVDQU Y14, 896(AX)
79 VMOVDQU Y15, 960(AX)
80 JMP preempt
81 saveAVX512:
82 VMOVDQU64 Z0, 0(AX)
83 VMOVDQU64 Z1, 64(AX)
84 VMOVDQU64 Z2, 128(AX)
85 VMOVDQU64 Z3, 192(AX)
86 VMOVDQU64 Z4, 256(AX)
87 VMOVDQU64 Z5, 320(AX)
88 VMOVDQU64 Z6, 384(AX)
89 VMOVDQU64 Z7, 448(AX)
90 VMOVDQU64 Z8, 512(AX)
91 VMOVDQU64 Z9, 576(AX)
92 VMOVDQU64 Z10, 640(AX)
93 VMOVDQU64 Z11, 704(AX)
94 VMOVDQU64 Z12, 768(AX)
95 VMOVDQU64 Z13, 832(AX)
96 VMOVDQU64 Z14, 896(AX)
97 VMOVDQU64 Z15, 960(AX)
98 VMOVDQU64 Z16, 1024(AX)
99 VMOVDQU64 Z17, 1088(AX)
100 VMOVDQU64 Z18, 1152(AX)
101 VMOVDQU64 Z19, 1216(AX)
102 VMOVDQU64 Z20, 1280(AX)
103 VMOVDQU64 Z21, 1344(AX)
104 VMOVDQU64 Z22, 1408(AX)
105 VMOVDQU64 Z23, 1472(AX)
106 VMOVDQU64 Z24, 1536(AX)
107 VMOVDQU64 Z25, 1600(AX)
108 VMOVDQU64 Z26, 1664(AX)
109 VMOVDQU64 Z27, 1728(AX)
110 VMOVDQU64 Z28, 1792(AX)
111 VMOVDQU64 Z29, 1856(AX)
112 VMOVDQU64 Z30, 1920(AX)
113 VMOVDQU64 Z31, 1984(AX)
114 KMOVQ K0, 2048(AX)
115 KMOVQ K1, 2056(AX)
116 KMOVQ K2, 2064(AX)
117 KMOVQ K3, 2072(AX)
118 KMOVQ K4, 2080(AX)
119 KMOVQ K5, 2088(AX)
120 KMOVQ K6, 2096(AX)
121 KMOVQ K7, 2104(AX)
122 JMP preempt
123 preempt:
124 CALL ·asyncPreempt2(SB)
125 // Restore non-GPs from *p.xRegs.cache
126 MOVQ g_m(R14), AX
127 MOVQ m_p(AX), AX
128 MOVQ (p_xRegs+xRegPerP_cache)(AX), AX
129 #ifdef GOEXPERIMENT_simd
130 CMPB internal∕cpu·X86+const_offsetX86HasAVX512(SB), $1
131 JE restoreAVX512
132 CMPB internal∕cpu·X86+const_offsetX86HasAVX2(SB), $1
133 JE restoreAVX2
134 #endif
135 restoreSSE:
136 MOVUPS 960(AX), X15
137 MOVUPS 896(AX), X14
138 MOVUPS 832(AX), X13
139 MOVUPS 768(AX), X12
140 MOVUPS 704(AX), X11
141 MOVUPS 640(AX), X10
142 MOVUPS 576(AX), X9
143 MOVUPS 512(AX), X8
144 MOVUPS 448(AX), X7
145 MOVUPS 384(AX), X6
146 MOVUPS 320(AX), X5
147 MOVUPS 256(AX), X4
148 MOVUPS 192(AX), X3
149 MOVUPS 128(AX), X2
150 MOVUPS 64(AX), X1
151 MOVUPS 0(AX), X0
152 JMP restoreGPs
153 restoreAVX2:
154 VMOVDQU 960(AX), Y15
155 VMOVDQU 896(AX), Y14
156 VMOVDQU 832(AX), Y13
157 VMOVDQU 768(AX), Y12
158 VMOVDQU 704(AX), Y11
159 VMOVDQU 640(AX), Y10
160 VMOVDQU 576(AX), Y9
161 VMOVDQU 512(AX), Y8
162 VMOVDQU 448(AX), Y7
163 VMOVDQU 384(AX), Y6
164 VMOVDQU 320(AX), Y5
165 VMOVDQU 256(AX), Y4
166 VMOVDQU 192(AX), Y3
167 VMOVDQU 128(AX), Y2
168 VMOVDQU 64(AX), Y1
169 VMOVDQU 0(AX), Y0
170 JMP restoreGPs
171 restoreAVX512:
172 KMOVQ 2104(AX), K7
173 KMOVQ 2096(AX), K6
174 KMOVQ 2088(AX), K5
175 KMOVQ 2080(AX), K4
176 KMOVQ 2072(AX), K3
177 KMOVQ 2064(AX), K2
178 KMOVQ 2056(AX), K1
179 KMOVQ 2048(AX), K0
180 VMOVDQU64 1984(AX), Z31
181 VMOVDQU64 1920(AX), Z30
182 VMOVDQU64 1856(AX), Z29
183 VMOVDQU64 1792(AX), Z28
184 VMOVDQU64 1728(AX), Z27
185 VMOVDQU64 1664(AX), Z26
186 VMOVDQU64 1600(AX), Z25
187 VMOVDQU64 1536(AX), Z24
188 VMOVDQU64 1472(AX), Z23
189 VMOVDQU64 1408(AX), Z22
190 VMOVDQU64 1344(AX), Z21
191 VMOVDQU64 1280(AX), Z20
192 VMOVDQU64 1216(AX), Z19
193 VMOVDQU64 1152(AX), Z18
194 VMOVDQU64 1088(AX), Z17
195 VMOVDQU64 1024(AX), Z16
196 VMOVDQU64 960(AX), Z15
197 VMOVDQU64 896(AX), Z14
198 VMOVDQU64 832(AX), Z13
199 VMOVDQU64 768(AX), Z12
200 VMOVDQU64 704(AX), Z11
201 VMOVDQU64 640(AX), Z10
202 VMOVDQU64 576(AX), Z9
203 VMOVDQU64 512(AX), Z8
204 VMOVDQU64 448(AX), Z7
205 VMOVDQU64 384(AX), Z6
206 VMOVDQU64 320(AX), Z5
207 VMOVDQU64 256(AX), Z4
208 VMOVDQU64 192(AX), Z3
209 VMOVDQU64 128(AX), Z2
210 VMOVDQU64 64(AX), Z1
211 VMOVDQU64 0(AX), Z0
212 JMP restoreGPs
213 restoreGPs:
214 // Restore GPs
215 MOVQ 104(SP), R15
216 MOVQ 96(SP), R14
217 MOVQ 88(SP), R13
218 MOVQ 80(SP), R12
219 MOVQ 72(SP), R11
220 MOVQ 64(SP), R10
221 MOVQ 56(SP), R9
222 MOVQ 48(SP), R8
223 MOVQ 40(SP), DI
224 MOVQ 32(SP), SI
225 MOVQ 24(SP), BX
226 MOVQ 16(SP), DX
227 MOVQ 8(SP), CX
228 MOVQ 0(SP), AX
229 ADJSP $-112
230 POPFQ
231 POPQ BP
232 RET
233
View as plain text