Text file
src/runtime/preempt_loong64.s
1 // Code generated by mkpreempt.go; DO NOT EDIT.
2
3 #include "go_asm.h"
4 #include "textflag.h"
5
6 TEXT ·asyncPreempt(SB),NOSPLIT|NOFRAME,$0-0
7 MOVV R1, -232(R3)
8 SUBV $232, R3
9 // Save GPs
10 MOVV R4, 8(R3)
11 MOVV R5, 16(R3)
12 MOVV R6, 24(R3)
13 MOVV R7, 32(R3)
14 MOVV R8, 40(R3)
15 MOVV R9, 48(R3)
16 MOVV R10, 56(R3)
17 MOVV R11, 64(R3)
18 MOVV R12, 72(R3)
19 MOVV R13, 80(R3)
20 MOVV R14, 88(R3)
21 MOVV R15, 96(R3)
22 MOVV R16, 104(R3)
23 MOVV R17, 112(R3)
24 MOVV R18, 120(R3)
25 MOVV R19, 128(R3)
26 MOVV R20, 136(R3)
27 MOVV R21, 144(R3)
28 MOVV R23, 152(R3)
29 MOVV R24, 160(R3)
30 MOVV R25, 168(R3)
31 MOVV R26, 176(R3)
32 MOVV R27, 184(R3)
33 MOVV R28, 192(R3)
34 MOVV R29, 200(R3)
35 MOVV R31, 208(R3)
36 MOVV FCC0, R4
37 BSTRINSV $7, R4, $0, R5
38 MOVV FCC1, R4
39 BSTRINSV $15, R4, $8, R5
40 MOVV FCC2, R4
41 BSTRINSV $23, R4, $16, R5
42 MOVV FCC3, R4
43 BSTRINSV $31, R4, $24, R5
44 MOVV FCC4, R4
45 BSTRINSV $39, R4, $32, R5
46 MOVV FCC5, R4
47 BSTRINSV $47, R4, $40, R5
48 MOVV FCC6, R4
49 BSTRINSV $55, R4, $48, R5
50 MOVV FCC7, R4
51 BSTRINSV $63, R4, $56, R5
52 MOVV R5, 216(R3)
53 MOVV FCSR0, R5
54 MOVV R5, 224(R3)
55 // Save extended register state to p.xRegs.scratch
56 MOVV g_m(g), R4
57 MOVV m_p(R4), R4
58 ADDV $(p_xRegs+xRegPerP_scratch), R4, R4
59 MOVBU internal∕cpu·Loong64+const_offsetLOONG64HasLASX(SB), R5
60 BNE R5, saveLASX
61 MOVBU internal∕cpu·Loong64+const_offsetLOONG64HasLSX(SB), R5
62 BNE R5, saveLSX
63 saveFP:
64 MOVD F0, 0(R4)
65 MOVD F1, 32(R4)
66 MOVD F2, 64(R4)
67 MOVD F3, 96(R4)
68 MOVD F4, 128(R4)
69 MOVD F5, 160(R4)
70 MOVD F6, 192(R4)
71 MOVD F7, 224(R4)
72 MOVD F8, 256(R4)
73 MOVD F9, 288(R4)
74 MOVD F10, 320(R4)
75 MOVD F11, 352(R4)
76 MOVD F12, 384(R4)
77 MOVD F13, 416(R4)
78 MOVD F14, 448(R4)
79 MOVD F15, 480(R4)
80 MOVD F16, 512(R4)
81 MOVD F17, 544(R4)
82 MOVD F18, 576(R4)
83 MOVD F19, 608(R4)
84 MOVD F20, 640(R4)
85 MOVD F21, 672(R4)
86 MOVD F22, 704(R4)
87 MOVD F23, 736(R4)
88 MOVD F24, 768(R4)
89 MOVD F25, 800(R4)
90 MOVD F26, 832(R4)
91 MOVD F27, 864(R4)
92 MOVD F28, 896(R4)
93 MOVD F29, 928(R4)
94 MOVD F30, 960(R4)
95 MOVD F31, 992(R4)
96 JMP preempt
97 saveLSX:
98 VMOVQ V0, 0(R4)
99 VMOVQ V1, 32(R4)
100 VMOVQ V2, 64(R4)
101 VMOVQ V3, 96(R4)
102 VMOVQ V4, 128(R4)
103 VMOVQ V5, 160(R4)
104 VMOVQ V6, 192(R4)
105 VMOVQ V7, 224(R4)
106 VMOVQ V8, 256(R4)
107 VMOVQ V9, 288(R4)
108 VMOVQ V10, 320(R4)
109 VMOVQ V11, 352(R4)
110 VMOVQ V12, 384(R4)
111 VMOVQ V13, 416(R4)
112 VMOVQ V14, 448(R4)
113 VMOVQ V15, 480(R4)
114 VMOVQ V16, 512(R4)
115 VMOVQ V17, 544(R4)
116 VMOVQ V18, 576(R4)
117 VMOVQ V19, 608(R4)
118 VMOVQ V20, 640(R4)
119 VMOVQ V21, 672(R4)
120 VMOVQ V22, 704(R4)
121 VMOVQ V23, 736(R4)
122 VMOVQ V24, 768(R4)
123 VMOVQ V25, 800(R4)
124 VMOVQ V26, 832(R4)
125 VMOVQ V27, 864(R4)
126 VMOVQ V28, 896(R4)
127 VMOVQ V29, 928(R4)
128 VMOVQ V30, 960(R4)
129 VMOVQ V31, 992(R4)
130 JMP preempt
131 saveLASX:
132 XVMOVQ X0, 0(R4)
133 XVMOVQ X1, 32(R4)
134 XVMOVQ X2, 64(R4)
135 XVMOVQ X3, 96(R4)
136 XVMOVQ X4, 128(R4)
137 XVMOVQ X5, 160(R4)
138 XVMOVQ X6, 192(R4)
139 XVMOVQ X7, 224(R4)
140 XVMOVQ X8, 256(R4)
141 XVMOVQ X9, 288(R4)
142 XVMOVQ X10, 320(R4)
143 XVMOVQ X11, 352(R4)
144 XVMOVQ X12, 384(R4)
145 XVMOVQ X13, 416(R4)
146 XVMOVQ X14, 448(R4)
147 XVMOVQ X15, 480(R4)
148 XVMOVQ X16, 512(R4)
149 XVMOVQ X17, 544(R4)
150 XVMOVQ X18, 576(R4)
151 XVMOVQ X19, 608(R4)
152 XVMOVQ X20, 640(R4)
153 XVMOVQ X21, 672(R4)
154 XVMOVQ X22, 704(R4)
155 XVMOVQ X23, 736(R4)
156 XVMOVQ X24, 768(R4)
157 XVMOVQ X25, 800(R4)
158 XVMOVQ X26, 832(R4)
159 XVMOVQ X27, 864(R4)
160 XVMOVQ X28, 896(R4)
161 XVMOVQ X29, 928(R4)
162 XVMOVQ X30, 960(R4)
163 XVMOVQ X31, 992(R4)
164 preempt:
165 CALL ·asyncPreempt2(SB)
166 // Restore non-GPs from *p.xRegs.cache
167 MOVV g_m(g), R4
168 MOVV m_p(R4), R4
169 MOVV (p_xRegs+xRegPerP_cache)(R4), R4
170 MOVBU internal∕cpu·Loong64+const_offsetLOONG64HasLASX(SB), R5
171 BNE R5, restoreLASX
172 MOVBU internal∕cpu·Loong64+const_offsetLOONG64HasLSX(SB), R5
173 BNE R5, restoreLSX
174 restoreFP:
175 MOVD 992(R4), F31
176 MOVD 960(R4), F30
177 MOVD 928(R4), F29
178 MOVD 896(R4), F28
179 MOVD 864(R4), F27
180 MOVD 832(R4), F26
181 MOVD 800(R4), F25
182 MOVD 768(R4), F24
183 MOVD 736(R4), F23
184 MOVD 704(R4), F22
185 MOVD 672(R4), F21
186 MOVD 640(R4), F20
187 MOVD 608(R4), F19
188 MOVD 576(R4), F18
189 MOVD 544(R4), F17
190 MOVD 512(R4), F16
191 MOVD 480(R4), F15
192 MOVD 448(R4), F14
193 MOVD 416(R4), F13
194 MOVD 384(R4), F12
195 MOVD 352(R4), F11
196 MOVD 320(R4), F10
197 MOVD 288(R4), F9
198 MOVD 256(R4), F8
199 MOVD 224(R4), F7
200 MOVD 192(R4), F6
201 MOVD 160(R4), F5
202 MOVD 128(R4), F4
203 MOVD 96(R4), F3
204 MOVD 64(R4), F2
205 MOVD 32(R4), F1
206 MOVD 0(R4), F0
207 JMP restoreGPs
208 restoreLSX:
209 VMOVQ 992(R4), V31
210 VMOVQ 960(R4), V30
211 VMOVQ 928(R4), V29
212 VMOVQ 896(R4), V28
213 VMOVQ 864(R4), V27
214 VMOVQ 832(R4), V26
215 VMOVQ 800(R4), V25
216 VMOVQ 768(R4), V24
217 VMOVQ 736(R4), V23
218 VMOVQ 704(R4), V22
219 VMOVQ 672(R4), V21
220 VMOVQ 640(R4), V20
221 VMOVQ 608(R4), V19
222 VMOVQ 576(R4), V18
223 VMOVQ 544(R4), V17
224 VMOVQ 512(R4), V16
225 VMOVQ 480(R4), V15
226 VMOVQ 448(R4), V14
227 VMOVQ 416(R4), V13
228 VMOVQ 384(R4), V12
229 VMOVQ 352(R4), V11
230 VMOVQ 320(R4), V10
231 VMOVQ 288(R4), V9
232 VMOVQ 256(R4), V8
233 VMOVQ 224(R4), V7
234 VMOVQ 192(R4), V6
235 VMOVQ 160(R4), V5
236 VMOVQ 128(R4), V4
237 VMOVQ 96(R4), V3
238 VMOVQ 64(R4), V2
239 VMOVQ 32(R4), V1
240 VMOVQ 0(R4), V0
241 JMP restoreGPs
242 restoreLASX:
243 XVMOVQ 992(R4), X31
244 XVMOVQ 960(R4), X30
245 XVMOVQ 928(R4), X29
246 XVMOVQ 896(R4), X28
247 XVMOVQ 864(R4), X27
248 XVMOVQ 832(R4), X26
249 XVMOVQ 800(R4), X25
250 XVMOVQ 768(R4), X24
251 XVMOVQ 736(R4), X23
252 XVMOVQ 704(R4), X22
253 XVMOVQ 672(R4), X21
254 XVMOVQ 640(R4), X20
255 XVMOVQ 608(R4), X19
256 XVMOVQ 576(R4), X18
257 XVMOVQ 544(R4), X17
258 XVMOVQ 512(R4), X16
259 XVMOVQ 480(R4), X15
260 XVMOVQ 448(R4), X14
261 XVMOVQ 416(R4), X13
262 XVMOVQ 384(R4), X12
263 XVMOVQ 352(R4), X11
264 XVMOVQ 320(R4), X10
265 XVMOVQ 288(R4), X9
266 XVMOVQ 256(R4), X8
267 XVMOVQ 224(R4), X7
268 XVMOVQ 192(R4), X6
269 XVMOVQ 160(R4), X5
270 XVMOVQ 128(R4), X4
271 XVMOVQ 96(R4), X3
272 XVMOVQ 64(R4), X2
273 XVMOVQ 32(R4), X1
274 XVMOVQ 0(R4), X0
275 // Restore GPs
276 restoreGPs:
277 MOVV 224(R3), R5
278 MOVV R5, FCSR0
279 MOVV 216(R3), R5
280 BSTRPICKV $7, R5, $0, R4
281 MOVV R4, FCC0
282 BSTRPICKV $15, R5, $8, R4
283 MOVV R4, FCC1
284 BSTRPICKV $23, R5, $16, R4
285 MOVV R4, FCC2
286 BSTRPICKV $31, R5, $24, R4
287 MOVV R4, FCC3
288 BSTRPICKV $39, R5, $32, R4
289 MOVV R4, FCC4
290 BSTRPICKV $47, R5, $40, R4
291 MOVV R4, FCC5
292 BSTRPICKV $55, R5, $48, R4
293 MOVV R4, FCC6
294 BSTRPICKV $63, R5, $56, R4
295 MOVV R4, FCC7
296 MOVV 208(R3), R31
297 MOVV 200(R3), R29
298 MOVV 192(R3), R28
299 MOVV 184(R3), R27
300 MOVV 176(R3), R26
301 MOVV 168(R3), R25
302 MOVV 160(R3), R24
303 MOVV 152(R3), R23
304 MOVV 144(R3), R21
305 MOVV 136(R3), R20
306 MOVV 128(R3), R19
307 MOVV 120(R3), R18
308 MOVV 112(R3), R17
309 MOVV 104(R3), R16
310 MOVV 96(R3), R15
311 MOVV 88(R3), R14
312 MOVV 80(R3), R13
313 MOVV 72(R3), R12
314 MOVV 64(R3), R11
315 MOVV 56(R3), R10
316 MOVV 48(R3), R9
317 MOVV 40(R3), R8
318 MOVV 32(R3), R7
319 MOVV 24(R3), R6
320 MOVV 16(R3), R5
321 MOVV 8(R3), R4
322 MOVV 232(R3), R1
323 MOVV (R3), R30
324 ADDV $240, R3
325 JMP (R30)
326
View as plain text