blob: f0bf0a8efa3e559f838084c578a45a85ec5a5af3 [file] [log] [blame]
Xiantao Zhang7fc86bd2008-04-01 14:57:09 +08001/*
2 * arch/ia64/vmx/optvfault.S
3 * optimize virtualization fault handler
4 *
5 * Copyright (C) 2006 Intel Co
6 * Xuefei Xu (Anthony Xu) <anthony.xu@intel.com>
7 */
8
9#include <asm/asmmacro.h>
10#include <asm/processor.h>
11
12#include "vti.h"
13#include "asm-offsets.h"
14
15#define ACCE_MOV_FROM_AR
16#define ACCE_MOV_FROM_RR
17#define ACCE_MOV_TO_RR
18#define ACCE_RSM
19#define ACCE_SSM
20#define ACCE_MOV_TO_PSR
21#define ACCE_THASH
22
Xiantao Zhang81aec522008-09-12 20:23:11 +080023ENTRY(kvm_vps_entry)
24 adds r29 = VMM_VCPU_VSA_BASE_OFFSET,r21
25 ;;
26 ld8 r29 = [r29]
27 ;;
28 add r29 = r29, r30
29 ;;
30 mov b0 = r29
31 br.sptk.many b0
32END(kvm_vps_entry)
33
34/*
35 * Inputs:
36 * r24 : return address
37 * r25 : vpd
38 * r29 : scratch
39 *
40 */
41GLOBAL_ENTRY(kvm_vps_sync_read)
42 movl r30 = PAL_VPS_SYNC_READ
43 ;;
44 br.sptk.many kvm_vps_entry
45END(kvm_vps_sync_read)
46
47/*
48 * Inputs:
49 * r24 : return address
50 * r25 : vpd
51 * r29 : scratch
52 *
53 */
54GLOBAL_ENTRY(kvm_vps_sync_write)
55 movl r30 = PAL_VPS_SYNC_WRITE
56 ;;
57 br.sptk.many kvm_vps_entry
58END(kvm_vps_sync_write)
59
60/*
61 * Inputs:
62 * r23 : pr
63 * r24 : guest b0
64 * r25 : vpd
65 *
66 */
67GLOBAL_ENTRY(kvm_vps_resume_normal)
68 movl r30 = PAL_VPS_RESUME_NORMAL
69 ;;
70 mov pr=r23,-2
71 br.sptk.many kvm_vps_entry
72END(kvm_vps_resume_normal)
73
74/*
75 * Inputs:
76 * r23 : pr
77 * r24 : guest b0
78 * r25 : vpd
79 * r17 : isr
80 */
81GLOBAL_ENTRY(kvm_vps_resume_handler)
82 movl r30 = PAL_VPS_RESUME_HANDLER
83 ;;
84 ld8 r27=[r25]
85 shr r17=r17,IA64_ISR_IR_BIT
86 ;;
87 dep r27=r17,r27,63,1 // bit 63 of r27 indicate whether enable CFLE
88 mov pr=r23,-2
89 br.sptk.many kvm_vps_entry
90END(kvm_vps_resume_handler)
91
Xiantao Zhang7fc86bd2008-04-01 14:57:09 +080092//mov r1=ar3
93GLOBAL_ENTRY(kvm_asm_mov_from_ar)
94#ifndef ACCE_MOV_FROM_AR
95 br.many kvm_virtualization_fault_back
96#endif
97 add r18=VMM_VCPU_ITC_OFS_OFFSET, r21
98 add r16=VMM_VCPU_LAST_ITC_OFFSET,r21
99 extr.u r17=r25,6,7
100 ;;
101 ld8 r18=[r18]
102 mov r19=ar.itc
103 mov r24=b0
104 ;;
105 add r19=r19,r18
106 addl r20=@gprel(asm_mov_to_reg),gp
107 ;;
108 st8 [r16] = r19
109 adds r30=kvm_resume_to_guest-asm_mov_to_reg,r20
110 shladd r17=r17,4,r20
111 ;;
112 mov b0=r17
113 br.sptk.few b0
114 ;;
115END(kvm_asm_mov_from_ar)
116
117
118// mov r1=rr[r3]
119GLOBAL_ENTRY(kvm_asm_mov_from_rr)
120#ifndef ACCE_MOV_FROM_RR
121 br.many kvm_virtualization_fault_back
122#endif
123 extr.u r16=r25,20,7
124 extr.u r17=r25,6,7
125 addl r20=@gprel(asm_mov_from_reg),gp
126 ;;
127 adds r30=kvm_asm_mov_from_rr_back_1-asm_mov_from_reg,r20
128 shladd r16=r16,4,r20
129 mov r24=b0
130 ;;
131 add r27=VMM_VCPU_VRR0_OFFSET,r21
132 mov b0=r16
133 br.many b0
134 ;;
135kvm_asm_mov_from_rr_back_1:
136 adds r30=kvm_resume_to_guest-asm_mov_from_reg,r20
137 adds r22=asm_mov_to_reg-asm_mov_from_reg,r20
138 shr.u r26=r19,61
139 ;;
140 shladd r17=r17,4,r22
141 shladd r27=r26,3,r27
142 ;;
143 ld8 r19=[r27]
144 mov b0=r17
145 br.many b0
146END(kvm_asm_mov_from_rr)
147
148
149// mov rr[r3]=r2
150GLOBAL_ENTRY(kvm_asm_mov_to_rr)
151#ifndef ACCE_MOV_TO_RR
152 br.many kvm_virtualization_fault_back
153#endif
154 extr.u r16=r25,20,7
155 extr.u r17=r25,13,7
156 addl r20=@gprel(asm_mov_from_reg),gp
157 ;;
158 adds r30=kvm_asm_mov_to_rr_back_1-asm_mov_from_reg,r20
159 shladd r16=r16,4,r20
160 mov r22=b0
161 ;;
162 add r27=VMM_VCPU_VRR0_OFFSET,r21
163 mov b0=r16
164 br.many b0
165 ;;
166kvm_asm_mov_to_rr_back_1:
167 adds r30=kvm_asm_mov_to_rr_back_2-asm_mov_from_reg,r20
168 shr.u r23=r19,61
169 shladd r17=r17,4,r20
170 ;;
171 //if rr6, go back
172 cmp.eq p6,p0=6,r23
173 mov b0=r22
174 (p6) br.cond.dpnt.many kvm_virtualization_fault_back
175 ;;
176 mov r28=r19
177 mov b0=r17
178 br.many b0
179kvm_asm_mov_to_rr_back_2:
180 adds r30=kvm_resume_to_guest-asm_mov_from_reg,r20
181 shladd r27=r23,3,r27
182 ;; // vrr.rid<<4 |0xe
183 st8 [r27]=r19
184 mov b0=r30
185 ;;
186 extr.u r16=r19,8,26
187 extr.u r18 =r19,2,6
188 mov r17 =0xe
189 ;;
190 shladd r16 = r16, 4, r17
191 extr.u r19 =r19,0,8
192 ;;
193 shl r16 = r16,8
194 ;;
195 add r19 = r19, r16
196 ;; //set ve 1
197 dep r19=-1,r19,0,1
198 cmp.lt p6,p0=14,r18
199 ;;
200 (p6) mov r18=14
201 ;;
202 (p6) dep r19=r18,r19,2,6
203 ;;
204 cmp.eq p6,p0=0,r23
205 ;;
206 cmp.eq.or p6,p0=4,r23
207 ;;
208 adds r16=VMM_VCPU_MODE_FLAGS_OFFSET,r21
209 (p6) adds r17=VMM_VCPU_META_SAVED_RR0_OFFSET,r21
210 ;;
211 ld4 r16=[r16]
212 cmp.eq p7,p0=r0,r0
213 (p6) shladd r17=r23,1,r17
214 ;;
215 (p6) st8 [r17]=r19
216 (p6) tbit.nz p6,p7=r16,0
217 ;;
218 (p7) mov rr[r28]=r19
219 mov r24=r22
220 br.many b0
221END(kvm_asm_mov_to_rr)
222
223
224//rsm
225GLOBAL_ENTRY(kvm_asm_rsm)
226#ifndef ACCE_RSM
227 br.many kvm_virtualization_fault_back
228#endif
229 add r16=VMM_VPD_BASE_OFFSET,r21
230 extr.u r26=r25,6,21
231 extr.u r27=r25,31,2
232 ;;
233 ld8 r16=[r16]
234 extr.u r28=r25,36,1
235 dep r26=r27,r26,21,2
236 ;;
237 add r17=VPD_VPSR_START_OFFSET,r16
238 add r22=VMM_VCPU_MODE_FLAGS_OFFSET,r21
239 //r26 is imm24
240 dep r26=r28,r26,23,1
241 ;;
242 ld8 r18=[r17]
243 movl r28=IA64_PSR_IC+IA64_PSR_I+IA64_PSR_DT+IA64_PSR_SI
244 ld4 r23=[r22]
245 sub r27=-1,r26
246 mov r24=b0
247 ;;
248 mov r20=cr.ipsr
249 or r28=r27,r28
250 and r19=r18,r27
251 ;;
252 st8 [r17]=r19
253 and r20=r20,r28
254 /* Comment it out due to short of fp lazy alorgithm support
255 adds r27=IA64_VCPU_FP_PSR_OFFSET,r21
256 ;;
257 ld8 r27=[r27]
258 ;;
259 tbit.nz p8,p0= r27,IA64_PSR_DFH_BIT
260 ;;
261 (p8) dep r20=-1,r20,IA64_PSR_DFH_BIT,1
262 */
263 ;;
264 mov cr.ipsr=r20
265 tbit.nz p6,p0=r23,0
266 ;;
267 tbit.z.or p6,p0=r26,IA64_PSR_DT_BIT
268 (p6) br.dptk kvm_resume_to_guest
269 ;;
270 add r26=VMM_VCPU_META_RR0_OFFSET,r21
271 add r27=VMM_VCPU_META_RR0_OFFSET+8,r21
272 dep r23=-1,r23,0,1
273 ;;
274 ld8 r26=[r26]
275 ld8 r27=[r27]
276 st4 [r22]=r23
277 dep.z r28=4,61,3
278 ;;
279 mov rr[r0]=r26
280 ;;
281 mov rr[r28]=r27
282 ;;
283 srlz.d
284 br.many kvm_resume_to_guest
285END(kvm_asm_rsm)
286
287
288//ssm
289GLOBAL_ENTRY(kvm_asm_ssm)
290#ifndef ACCE_SSM
291 br.many kvm_virtualization_fault_back
292#endif
293 add r16=VMM_VPD_BASE_OFFSET,r21
294 extr.u r26=r25,6,21
295 extr.u r27=r25,31,2
296 ;;
297 ld8 r16=[r16]
298 extr.u r28=r25,36,1
299 dep r26=r27,r26,21,2
300 ;; //r26 is imm24
301 add r27=VPD_VPSR_START_OFFSET,r16
302 dep r26=r28,r26,23,1
303 ;; //r19 vpsr
304 ld8 r29=[r27]
305 mov r24=b0
306 ;;
307 add r22=VMM_VCPU_MODE_FLAGS_OFFSET,r21
308 mov r20=cr.ipsr
309 or r19=r29,r26
310 ;;
311 ld4 r23=[r22]
312 st8 [r27]=r19
313 or r20=r20,r26
314 ;;
315 mov cr.ipsr=r20
316 movl r28=IA64_PSR_DT+IA64_PSR_RT+IA64_PSR_IT
317 ;;
318 and r19=r28,r19
319 tbit.z p6,p0=r23,0
320 ;;
321 cmp.ne.or p6,p0=r28,r19
322 (p6) br.dptk kvm_asm_ssm_1
323 ;;
324 add r26=VMM_VCPU_META_SAVED_RR0_OFFSET,r21
325 add r27=VMM_VCPU_META_SAVED_RR0_OFFSET+8,r21
326 dep r23=0,r23,0,1
327 ;;
328 ld8 r26=[r26]
329 ld8 r27=[r27]
330 st4 [r22]=r23
331 dep.z r28=4,61,3
332 ;;
333 mov rr[r0]=r26
334 ;;
335 mov rr[r28]=r27
336 ;;
337 srlz.d
338 ;;
339kvm_asm_ssm_1:
340 tbit.nz p6,p0=r29,IA64_PSR_I_BIT
341 ;;
342 tbit.z.or p6,p0=r19,IA64_PSR_I_BIT
343 (p6) br.dptk kvm_resume_to_guest
344 ;;
345 add r29=VPD_VTPR_START_OFFSET,r16
346 add r30=VPD_VHPI_START_OFFSET,r16
347 ;;
348 ld8 r29=[r29]
349 ld8 r30=[r30]
350 ;;
351 extr.u r17=r29,4,4
352 extr.u r18=r29,16,1
353 ;;
354 dep r17=r18,r17,4,1
355 ;;
356 cmp.gt p6,p0=r30,r17
357 (p6) br.dpnt.few kvm_asm_dispatch_vexirq
358 br.many kvm_resume_to_guest
359END(kvm_asm_ssm)
360
361
362//mov psr.l=r2
363GLOBAL_ENTRY(kvm_asm_mov_to_psr)
364#ifndef ACCE_MOV_TO_PSR
365 br.many kvm_virtualization_fault_back
366#endif
367 add r16=VMM_VPD_BASE_OFFSET,r21
368 extr.u r26=r25,13,7 //r2
369 ;;
370 ld8 r16=[r16]
371 addl r20=@gprel(asm_mov_from_reg),gp
372 ;;
373 adds r30=kvm_asm_mov_to_psr_back-asm_mov_from_reg,r20
374 shladd r26=r26,4,r20
375 mov r24=b0
376 ;;
377 add r27=VPD_VPSR_START_OFFSET,r16
378 mov b0=r26
379 br.many b0
380 ;;
381kvm_asm_mov_to_psr_back:
382 ld8 r17=[r27]
383 add r22=VMM_VCPU_MODE_FLAGS_OFFSET,r21
384 dep r19=0,r19,32,32
385 ;;
386 ld4 r23=[r22]
387 dep r18=0,r17,0,32
388 ;;
389 add r30=r18,r19
390 movl r28=IA64_PSR_DT+IA64_PSR_RT+IA64_PSR_IT
391 ;;
392 st8 [r27]=r30
393 and r27=r28,r30
394 and r29=r28,r17
395 ;;
396 cmp.eq p5,p0=r29,r27
397 cmp.eq p6,p7=r28,r27
398 (p5) br.many kvm_asm_mov_to_psr_1
399 ;;
400 //virtual to physical
401 (p7) add r26=VMM_VCPU_META_RR0_OFFSET,r21
402 (p7) add r27=VMM_VCPU_META_RR0_OFFSET+8,r21
403 (p7) dep r23=-1,r23,0,1
404 ;;
405 //physical to virtual
406 (p6) add r26=VMM_VCPU_META_SAVED_RR0_OFFSET,r21
407 (p6) add r27=VMM_VCPU_META_SAVED_RR0_OFFSET+8,r21
408 (p6) dep r23=0,r23,0,1
409 ;;
410 ld8 r26=[r26]
411 ld8 r27=[r27]
412 st4 [r22]=r23
413 dep.z r28=4,61,3
414 ;;
415 mov rr[r0]=r26
416 ;;
417 mov rr[r28]=r27
418 ;;
419 srlz.d
420 ;;
421kvm_asm_mov_to_psr_1:
422 mov r20=cr.ipsr
423 movl r28=IA64_PSR_IC+IA64_PSR_I+IA64_PSR_DT+IA64_PSR_SI+IA64_PSR_RT
424 ;;
425 or r19=r19,r28
426 dep r20=0,r20,0,32
427 ;;
428 add r20=r19,r20
429 mov b0=r24
430 ;;
431 /* Comment it out due to short of fp lazy algorithm support
432 adds r27=IA64_VCPU_FP_PSR_OFFSET,r21
433 ;;
434 ld8 r27=[r27]
435 ;;
436 tbit.nz p8,p0=r27,IA64_PSR_DFH_BIT
437 ;;
438 (p8) dep r20=-1,r20,IA64_PSR_DFH_BIT,1
439 ;;
440 */
441 mov cr.ipsr=r20
442 cmp.ne p6,p0=r0,r0
443 ;;
444 tbit.nz.or p6,p0=r17,IA64_PSR_I_BIT
445 tbit.z.or p6,p0=r30,IA64_PSR_I_BIT
446 (p6) br.dpnt.few kvm_resume_to_guest
447 ;;
448 add r29=VPD_VTPR_START_OFFSET,r16
449 add r30=VPD_VHPI_START_OFFSET,r16
450 ;;
451 ld8 r29=[r29]
452 ld8 r30=[r30]
453 ;;
454 extr.u r17=r29,4,4
455 extr.u r18=r29,16,1
456 ;;
457 dep r17=r18,r17,4,1
458 ;;
459 cmp.gt p6,p0=r30,r17
460 (p6) br.dpnt.few kvm_asm_dispatch_vexirq
461 br.many kvm_resume_to_guest
462END(kvm_asm_mov_to_psr)
463
464
465ENTRY(kvm_asm_dispatch_vexirq)
466//increment iip
467 mov r16=cr.ipsr
468 ;;
469 extr.u r17=r16,IA64_PSR_RI_BIT,2
470 tbit.nz p6,p7=r16,IA64_PSR_RI_BIT+1
471 ;;
472 (p6) mov r18=cr.iip
473 (p6) mov r17=r0
474 (p7) add r17=1,r17
475 ;;
476 (p6) add r18=0x10,r18
477 dep r16=r17,r16,IA64_PSR_RI_BIT,2
478 ;;
479 (p6) mov cr.iip=r18
480 mov cr.ipsr=r16
481 mov r30 =1
482 br.many kvm_dispatch_vexirq
483END(kvm_asm_dispatch_vexirq)
484
485// thash
486// TODO: add support when pta.vf = 1
487GLOBAL_ENTRY(kvm_asm_thash)
488#ifndef ACCE_THASH
489 br.many kvm_virtualization_fault_back
490#endif
491 extr.u r17=r25,20,7 // get r3 from opcode in r25
492 extr.u r18=r25,6,7 // get r1 from opcode in r25
493 addl r20=@gprel(asm_mov_from_reg),gp
494 ;;
495 adds r30=kvm_asm_thash_back1-asm_mov_from_reg,r20
496 shladd r17=r17,4,r20 // get addr of MOVE_FROM_REG(r17)
497 adds r16=VMM_VPD_BASE_OFFSET,r21 // get vcpu.arch.priveregs
498 ;;
499 mov r24=b0
500 ;;
501 ld8 r16=[r16] // get VPD addr
502 mov b0=r17
503 br.many b0 // r19 return value
504 ;;
505kvm_asm_thash_back1:
506 shr.u r23=r19,61 // get RR number
507 adds r25=VMM_VCPU_VRR0_OFFSET,r21 // get vcpu->arch.vrr[0]'s addr
508 adds r16=VMM_VPD_VPTA_OFFSET,r16 // get vpta
509 ;;
510 shladd r27=r23,3,r25 // get vcpu->arch.vrr[r23]'s addr
511 ld8 r17=[r16] // get PTA
512 mov r26=1
513 ;;
514 extr.u r29=r17,2,6 // get pta.size
515 ld8 r25=[r27] // get vcpu->arch.vrr[r23]'s value
516 ;;
517 extr.u r25=r25,2,6 // get rr.ps
518 shl r22=r26,r29 // 1UL << pta.size
519 ;;
520 shr.u r23=r19,r25 // vaddr >> rr.ps
521 adds r26=3,r29 // pta.size + 3
522 shl r27=r17,3 // pta << 3
523 ;;
524 shl r23=r23,3 // (vaddr >> rr.ps) << 3
525 shr.u r27=r27,r26 // (pta << 3) >> (pta.size+3)
526 movl r16=7<<61
527 ;;
528 adds r22=-1,r22 // (1UL << pta.size) - 1
529 shl r27=r27,r29 // ((pta<<3)>>(pta.size+3))<<pta.size
530 and r19=r19,r16 // vaddr & VRN_MASK
531 ;;
532 and r22=r22,r23 // vhpt_offset
533 or r19=r19,r27 // (vadr&VRN_MASK)|(((pta<<3)>>(pta.size + 3))<<pta.size)
534 adds r26=asm_mov_to_reg-asm_mov_from_reg,r20
535 ;;
536 or r19=r19,r22 // calc pval
537 shladd r17=r18,4,r26
538 adds r30=kvm_resume_to_guest-asm_mov_from_reg,r20
539 ;;
540 mov b0=r17
541 br.many b0
542END(kvm_asm_thash)
543
544#define MOV_TO_REG0 \
545{; \
546 nop.b 0x0; \
547 nop.b 0x0; \
548 nop.b 0x0; \
549 ;; \
550};
551
552
553#define MOV_TO_REG(n) \
554{; \
555 mov r##n##=r19; \
556 mov b0=r30; \
557 br.sptk.many b0; \
558 ;; \
559};
560
561
562#define MOV_FROM_REG(n) \
563{; \
564 mov r19=r##n##; \
565 mov b0=r30; \
566 br.sptk.many b0; \
567 ;; \
568};
569
570
571#define MOV_TO_BANK0_REG(n) \
572ENTRY_MIN_ALIGN(asm_mov_to_bank0_reg##n##); \
573{; \
574 mov r26=r2; \
575 mov r2=r19; \
576 bsw.1; \
577 ;; \
578}; \
579{; \
580 mov r##n##=r2; \
581 nop.b 0x0; \
582 bsw.0; \
583 ;; \
584}; \
585{; \
586 mov r2=r26; \
587 mov b0=r30; \
588 br.sptk.many b0; \
589 ;; \
590}; \
591END(asm_mov_to_bank0_reg##n##)
592
593
594#define MOV_FROM_BANK0_REG(n) \
595ENTRY_MIN_ALIGN(asm_mov_from_bank0_reg##n##); \
596{; \
597 mov r26=r2; \
598 nop.b 0x0; \
599 bsw.1; \
600 ;; \
601}; \
602{; \
603 mov r2=r##n##; \
604 nop.b 0x0; \
605 bsw.0; \
606 ;; \
607}; \
608{; \
609 mov r19=r2; \
610 mov r2=r26; \
611 mov b0=r30; \
612}; \
613{; \
614 nop.b 0x0; \
615 nop.b 0x0; \
616 br.sptk.many b0; \
617 ;; \
618}; \
619END(asm_mov_from_bank0_reg##n##)
620
621
622#define JMP_TO_MOV_TO_BANK0_REG(n) \
623{; \
624 nop.b 0x0; \
625 nop.b 0x0; \
626 br.sptk.many asm_mov_to_bank0_reg##n##; \
627 ;; \
628}
629
630
631#define JMP_TO_MOV_FROM_BANK0_REG(n) \
632{; \
633 nop.b 0x0; \
634 nop.b 0x0; \
635 br.sptk.many asm_mov_from_bank0_reg##n##; \
636 ;; \
637}
638
639
640MOV_FROM_BANK0_REG(16)
641MOV_FROM_BANK0_REG(17)
642MOV_FROM_BANK0_REG(18)
643MOV_FROM_BANK0_REG(19)
644MOV_FROM_BANK0_REG(20)
645MOV_FROM_BANK0_REG(21)
646MOV_FROM_BANK0_REG(22)
647MOV_FROM_BANK0_REG(23)
648MOV_FROM_BANK0_REG(24)
649MOV_FROM_BANK0_REG(25)
650MOV_FROM_BANK0_REG(26)
651MOV_FROM_BANK0_REG(27)
652MOV_FROM_BANK0_REG(28)
653MOV_FROM_BANK0_REG(29)
654MOV_FROM_BANK0_REG(30)
655MOV_FROM_BANK0_REG(31)
656
657
658// mov from reg table
659ENTRY(asm_mov_from_reg)
660 MOV_FROM_REG(0)
661 MOV_FROM_REG(1)
662 MOV_FROM_REG(2)
663 MOV_FROM_REG(3)
664 MOV_FROM_REG(4)
665 MOV_FROM_REG(5)
666 MOV_FROM_REG(6)
667 MOV_FROM_REG(7)
668 MOV_FROM_REG(8)
669 MOV_FROM_REG(9)
670 MOV_FROM_REG(10)
671 MOV_FROM_REG(11)
672 MOV_FROM_REG(12)
673 MOV_FROM_REG(13)
674 MOV_FROM_REG(14)
675 MOV_FROM_REG(15)
676 JMP_TO_MOV_FROM_BANK0_REG(16)
677 JMP_TO_MOV_FROM_BANK0_REG(17)
678 JMP_TO_MOV_FROM_BANK0_REG(18)
679 JMP_TO_MOV_FROM_BANK0_REG(19)
680 JMP_TO_MOV_FROM_BANK0_REG(20)
681 JMP_TO_MOV_FROM_BANK0_REG(21)
682 JMP_TO_MOV_FROM_BANK0_REG(22)
683 JMP_TO_MOV_FROM_BANK0_REG(23)
684 JMP_TO_MOV_FROM_BANK0_REG(24)
685 JMP_TO_MOV_FROM_BANK0_REG(25)
686 JMP_TO_MOV_FROM_BANK0_REG(26)
687 JMP_TO_MOV_FROM_BANK0_REG(27)
688 JMP_TO_MOV_FROM_BANK0_REG(28)
689 JMP_TO_MOV_FROM_BANK0_REG(29)
690 JMP_TO_MOV_FROM_BANK0_REG(30)
691 JMP_TO_MOV_FROM_BANK0_REG(31)
692 MOV_FROM_REG(32)
693 MOV_FROM_REG(33)
694 MOV_FROM_REG(34)
695 MOV_FROM_REG(35)
696 MOV_FROM_REG(36)
697 MOV_FROM_REG(37)
698 MOV_FROM_REG(38)
699 MOV_FROM_REG(39)
700 MOV_FROM_REG(40)
701 MOV_FROM_REG(41)
702 MOV_FROM_REG(42)
703 MOV_FROM_REG(43)
704 MOV_FROM_REG(44)
705 MOV_FROM_REG(45)
706 MOV_FROM_REG(46)
707 MOV_FROM_REG(47)
708 MOV_FROM_REG(48)
709 MOV_FROM_REG(49)
710 MOV_FROM_REG(50)
711 MOV_FROM_REG(51)
712 MOV_FROM_REG(52)
713 MOV_FROM_REG(53)
714 MOV_FROM_REG(54)
715 MOV_FROM_REG(55)
716 MOV_FROM_REG(56)
717 MOV_FROM_REG(57)
718 MOV_FROM_REG(58)
719 MOV_FROM_REG(59)
720 MOV_FROM_REG(60)
721 MOV_FROM_REG(61)
722 MOV_FROM_REG(62)
723 MOV_FROM_REG(63)
724 MOV_FROM_REG(64)
725 MOV_FROM_REG(65)
726 MOV_FROM_REG(66)
727 MOV_FROM_REG(67)
728 MOV_FROM_REG(68)
729 MOV_FROM_REG(69)
730 MOV_FROM_REG(70)
731 MOV_FROM_REG(71)
732 MOV_FROM_REG(72)
733 MOV_FROM_REG(73)
734 MOV_FROM_REG(74)
735 MOV_FROM_REG(75)
736 MOV_FROM_REG(76)
737 MOV_FROM_REG(77)
738 MOV_FROM_REG(78)
739 MOV_FROM_REG(79)
740 MOV_FROM_REG(80)
741 MOV_FROM_REG(81)
742 MOV_FROM_REG(82)
743 MOV_FROM_REG(83)
744 MOV_FROM_REG(84)
745 MOV_FROM_REG(85)
746 MOV_FROM_REG(86)
747 MOV_FROM_REG(87)
748 MOV_FROM_REG(88)
749 MOV_FROM_REG(89)
750 MOV_FROM_REG(90)
751 MOV_FROM_REG(91)
752 MOV_FROM_REG(92)
753 MOV_FROM_REG(93)
754 MOV_FROM_REG(94)
755 MOV_FROM_REG(95)
756 MOV_FROM_REG(96)
757 MOV_FROM_REG(97)
758 MOV_FROM_REG(98)
759 MOV_FROM_REG(99)
760 MOV_FROM_REG(100)
761 MOV_FROM_REG(101)
762 MOV_FROM_REG(102)
763 MOV_FROM_REG(103)
764 MOV_FROM_REG(104)
765 MOV_FROM_REG(105)
766 MOV_FROM_REG(106)
767 MOV_FROM_REG(107)
768 MOV_FROM_REG(108)
769 MOV_FROM_REG(109)
770 MOV_FROM_REG(110)
771 MOV_FROM_REG(111)
772 MOV_FROM_REG(112)
773 MOV_FROM_REG(113)
774 MOV_FROM_REG(114)
775 MOV_FROM_REG(115)
776 MOV_FROM_REG(116)
777 MOV_FROM_REG(117)
778 MOV_FROM_REG(118)
779 MOV_FROM_REG(119)
780 MOV_FROM_REG(120)
781 MOV_FROM_REG(121)
782 MOV_FROM_REG(122)
783 MOV_FROM_REG(123)
784 MOV_FROM_REG(124)
785 MOV_FROM_REG(125)
786 MOV_FROM_REG(126)
787 MOV_FROM_REG(127)
788END(asm_mov_from_reg)
789
790
791/* must be in bank 0
792 * parameter:
793 * r31: pr
794 * r24: b0
795 */
796ENTRY(kvm_resume_to_guest)
797 adds r16 = VMM_VCPU_SAVED_GP_OFFSET,r21
798 ;;
799 ld8 r1 =[r16]
800 adds r20 = VMM_VCPU_VSA_BASE_OFFSET,r21
801 ;;
802 mov r16=cr.ipsr
803 ;;
804 ld8 r20 = [r20]
805 adds r19=VMM_VPD_BASE_OFFSET,r21
806 ;;
807 ld8 r25=[r19]
808 extr.u r17=r16,IA64_PSR_RI_BIT,2
809 tbit.nz p6,p7=r16,IA64_PSR_RI_BIT+1
810 ;;
811 (p6) mov r18=cr.iip
812 (p6) mov r17=r0
813 ;;
814 (p6) add r18=0x10,r18
815 (p7) add r17=1,r17
816 ;;
817 (p6) mov cr.iip=r18
818 dep r16=r17,r16,IA64_PSR_RI_BIT,2
819 ;;
820 mov cr.ipsr=r16
821 adds r19= VPD_VPSR_START_OFFSET,r25
822 add r28=PAL_VPS_RESUME_NORMAL,r20
823 add r29=PAL_VPS_RESUME_HANDLER,r20
824 ;;
825 ld8 r19=[r19]
826 mov b0=r29
827 cmp.ne p6,p7 = r0,r0
828 ;;
829 tbit.z p6,p7 = r19,IA64_PSR_IC_BIT // p1=vpsr.ic
830 ;;
831 (p6) ld8 r26=[r25]
832 (p7) mov b0=r28
833 mov pr=r31,-2
834 br.sptk.many b0 // call pal service
835 ;;
836END(kvm_resume_to_guest)
837
838
839MOV_TO_BANK0_REG(16)
840MOV_TO_BANK0_REG(17)
841MOV_TO_BANK0_REG(18)
842MOV_TO_BANK0_REG(19)
843MOV_TO_BANK0_REG(20)
844MOV_TO_BANK0_REG(21)
845MOV_TO_BANK0_REG(22)
846MOV_TO_BANK0_REG(23)
847MOV_TO_BANK0_REG(24)
848MOV_TO_BANK0_REG(25)
849MOV_TO_BANK0_REG(26)
850MOV_TO_BANK0_REG(27)
851MOV_TO_BANK0_REG(28)
852MOV_TO_BANK0_REG(29)
853MOV_TO_BANK0_REG(30)
854MOV_TO_BANK0_REG(31)
855
856
857// mov to reg table
858ENTRY(asm_mov_to_reg)
859 MOV_TO_REG0
860 MOV_TO_REG(1)
861 MOV_TO_REG(2)
862 MOV_TO_REG(3)
863 MOV_TO_REG(4)
864 MOV_TO_REG(5)
865 MOV_TO_REG(6)
866 MOV_TO_REG(7)
867 MOV_TO_REG(8)
868 MOV_TO_REG(9)
869 MOV_TO_REG(10)
870 MOV_TO_REG(11)
871 MOV_TO_REG(12)
872 MOV_TO_REG(13)
873 MOV_TO_REG(14)
874 MOV_TO_REG(15)
875 JMP_TO_MOV_TO_BANK0_REG(16)
876 JMP_TO_MOV_TO_BANK0_REG(17)
877 JMP_TO_MOV_TO_BANK0_REG(18)
878 JMP_TO_MOV_TO_BANK0_REG(19)
879 JMP_TO_MOV_TO_BANK0_REG(20)
880 JMP_TO_MOV_TO_BANK0_REG(21)
881 JMP_TO_MOV_TO_BANK0_REG(22)
882 JMP_TO_MOV_TO_BANK0_REG(23)
883 JMP_TO_MOV_TO_BANK0_REG(24)
884 JMP_TO_MOV_TO_BANK0_REG(25)
885 JMP_TO_MOV_TO_BANK0_REG(26)
886 JMP_TO_MOV_TO_BANK0_REG(27)
887 JMP_TO_MOV_TO_BANK0_REG(28)
888 JMP_TO_MOV_TO_BANK0_REG(29)
889 JMP_TO_MOV_TO_BANK0_REG(30)
890 JMP_TO_MOV_TO_BANK0_REG(31)
891 MOV_TO_REG(32)
892 MOV_TO_REG(33)
893 MOV_TO_REG(34)
894 MOV_TO_REG(35)
895 MOV_TO_REG(36)
896 MOV_TO_REG(37)
897 MOV_TO_REG(38)
898 MOV_TO_REG(39)
899 MOV_TO_REG(40)
900 MOV_TO_REG(41)
901 MOV_TO_REG(42)
902 MOV_TO_REG(43)
903 MOV_TO_REG(44)
904 MOV_TO_REG(45)
905 MOV_TO_REG(46)
906 MOV_TO_REG(47)
907 MOV_TO_REG(48)
908 MOV_TO_REG(49)
909 MOV_TO_REG(50)
910 MOV_TO_REG(51)
911 MOV_TO_REG(52)
912 MOV_TO_REG(53)
913 MOV_TO_REG(54)
914 MOV_TO_REG(55)
915 MOV_TO_REG(56)
916 MOV_TO_REG(57)
917 MOV_TO_REG(58)
918 MOV_TO_REG(59)
919 MOV_TO_REG(60)
920 MOV_TO_REG(61)
921 MOV_TO_REG(62)
922 MOV_TO_REG(63)
923 MOV_TO_REG(64)
924 MOV_TO_REG(65)
925 MOV_TO_REG(66)
926 MOV_TO_REG(67)
927 MOV_TO_REG(68)
928 MOV_TO_REG(69)
929 MOV_TO_REG(70)
930 MOV_TO_REG(71)
931 MOV_TO_REG(72)
932 MOV_TO_REG(73)
933 MOV_TO_REG(74)
934 MOV_TO_REG(75)
935 MOV_TO_REG(76)
936 MOV_TO_REG(77)
937 MOV_TO_REG(78)
938 MOV_TO_REG(79)
939 MOV_TO_REG(80)
940 MOV_TO_REG(81)
941 MOV_TO_REG(82)
942 MOV_TO_REG(83)
943 MOV_TO_REG(84)
944 MOV_TO_REG(85)
945 MOV_TO_REG(86)
946 MOV_TO_REG(87)
947 MOV_TO_REG(88)
948 MOV_TO_REG(89)
949 MOV_TO_REG(90)
950 MOV_TO_REG(91)
951 MOV_TO_REG(92)
952 MOV_TO_REG(93)
953 MOV_TO_REG(94)
954 MOV_TO_REG(95)
955 MOV_TO_REG(96)
956 MOV_TO_REG(97)
957 MOV_TO_REG(98)
958 MOV_TO_REG(99)
959 MOV_TO_REG(100)
960 MOV_TO_REG(101)
961 MOV_TO_REG(102)
962 MOV_TO_REG(103)
963 MOV_TO_REG(104)
964 MOV_TO_REG(105)
965 MOV_TO_REG(106)
966 MOV_TO_REG(107)
967 MOV_TO_REG(108)
968 MOV_TO_REG(109)
969 MOV_TO_REG(110)
970 MOV_TO_REG(111)
971 MOV_TO_REG(112)
972 MOV_TO_REG(113)
973 MOV_TO_REG(114)
974 MOV_TO_REG(115)
975 MOV_TO_REG(116)
976 MOV_TO_REG(117)
977 MOV_TO_REG(118)
978 MOV_TO_REG(119)
979 MOV_TO_REG(120)
980 MOV_TO_REG(121)
981 MOV_TO_REG(122)
982 MOV_TO_REG(123)
983 MOV_TO_REG(124)
984 MOV_TO_REG(125)
985 MOV_TO_REG(126)
986 MOV_TO_REG(127)
987END(asm_mov_to_reg)