Lines Matching refs:regs
76 #define AL(regs) (((unsigned char *)&((regs)->pt.ax))[0]) argument
77 #define AH(regs) (((unsigned char *)&((regs)->pt.ax))[1]) argument
78 #define IP(regs) (*(unsigned short *)&((regs)->pt.ip)) argument
79 #define SP(regs) (*(unsigned short *)&((regs)->pt.sp)) argument
93 void save_v86_state(struct kernel_vm86_regs *regs, int retval) in save_v86_state() argument
112 set_flags(regs->pt.flags, VEFLAGS, X86_EFLAGS_VIF | vm86->veflags_mask); in save_v86_state()
123 put_user_ex(regs->pt.bx, &user->regs.ebx); in save_v86_state()
124 put_user_ex(regs->pt.cx, &user->regs.ecx); in save_v86_state()
125 put_user_ex(regs->pt.dx, &user->regs.edx); in save_v86_state()
126 put_user_ex(regs->pt.si, &user->regs.esi); in save_v86_state()
127 put_user_ex(regs->pt.di, &user->regs.edi); in save_v86_state()
128 put_user_ex(regs->pt.bp, &user->regs.ebp); in save_v86_state()
129 put_user_ex(regs->pt.ax, &user->regs.eax); in save_v86_state()
130 put_user_ex(regs->pt.ip, &user->regs.eip); in save_v86_state()
131 put_user_ex(regs->pt.cs, &user->regs.cs); in save_v86_state()
132 put_user_ex(regs->pt.flags, &user->regs.eflags); in save_v86_state()
133 put_user_ex(regs->pt.sp, &user->regs.esp); in save_v86_state()
134 put_user_ex(regs->pt.ss, &user->regs.ss); in save_v86_state()
135 put_user_ex(regs->es, &user->regs.es); in save_v86_state()
136 put_user_ex(regs->ds, &user->regs.ds); in save_v86_state()
137 put_user_ex(regs->fs, &user->regs.fs); in save_v86_state()
138 put_user_ex(regs->gs, &user->regs.gs); in save_v86_state()
154 memcpy(®s->pt, &vm86->regs32, sizeof(struct pt_regs)); in save_v86_state()
158 regs->pt.ax = retval; in save_v86_state()
233 struct pt_regs *regs = current_pt_regs(); in do_sys_vm86() local
278 get_user_ex(vm86regs.pt.bx, &user_vm86->regs.ebx); in do_sys_vm86()
279 get_user_ex(vm86regs.pt.cx, &user_vm86->regs.ecx); in do_sys_vm86()
280 get_user_ex(vm86regs.pt.dx, &user_vm86->regs.edx); in do_sys_vm86()
281 get_user_ex(vm86regs.pt.si, &user_vm86->regs.esi); in do_sys_vm86()
282 get_user_ex(vm86regs.pt.di, &user_vm86->regs.edi); in do_sys_vm86()
283 get_user_ex(vm86regs.pt.bp, &user_vm86->regs.ebp); in do_sys_vm86()
284 get_user_ex(vm86regs.pt.ax, &user_vm86->regs.eax); in do_sys_vm86()
285 get_user_ex(vm86regs.pt.ip, &user_vm86->regs.eip); in do_sys_vm86()
286 get_user_ex(seg, &user_vm86->regs.cs); in do_sys_vm86()
288 get_user_ex(vm86regs.pt.flags, &user_vm86->regs.eflags); in do_sys_vm86()
289 get_user_ex(vm86regs.pt.sp, &user_vm86->regs.esp); in do_sys_vm86()
290 get_user_ex(seg, &user_vm86->regs.ss); in do_sys_vm86()
292 get_user_ex(vm86regs.es, &user_vm86->regs.es); in do_sys_vm86()
293 get_user_ex(vm86regs.ds, &user_vm86->regs.ds); in do_sys_vm86()
294 get_user_ex(vm86regs.fs, &user_vm86->regs.fs); in do_sys_vm86()
295 get_user_ex(vm86regs.gs, &user_vm86->regs.gs); in do_sys_vm86()
321 memcpy(&vm86->regs32, regs, sizeof(struct pt_regs)); in do_sys_vm86()
331 vm86regs.pt.flags |= regs->flags & ~SAFE_MASK; in do_sys_vm86()
334 vm86regs.pt.orig_ax = regs->orig_ax; in do_sys_vm86()
368 memcpy((struct kernel_vm86_regs *)regs, &vm86regs, sizeof(vm86regs)); in do_sys_vm86()
370 return regs->ax; in do_sys_vm86()
373 static inline void set_IF(struct kernel_vm86_regs *regs) in set_IF() argument
378 static inline void clear_IF(struct kernel_vm86_regs *regs) in clear_IF() argument
383 static inline void clear_TF(struct kernel_vm86_regs *regs) in clear_TF() argument
385 regs->pt.flags &= ~X86_EFLAGS_TF; in clear_TF()
388 static inline void clear_AC(struct kernel_vm86_regs *regs) in clear_AC() argument
390 regs->pt.flags &= ~X86_EFLAGS_AC; in clear_AC()
405 static inline void set_vflags_long(unsigned long flags, struct kernel_vm86_regs *regs) in set_vflags_long() argument
408 set_flags(regs->pt.flags, flags, SAFE_MASK); in set_vflags_long()
410 set_IF(regs); in set_vflags_long()
412 clear_IF(regs); in set_vflags_long()
415 static inline void set_vflags_short(unsigned short flags, struct kernel_vm86_regs *regs) in set_vflags_short() argument
418 set_flags(regs->pt.flags, flags, SAFE_MASK); in set_vflags_short()
420 set_IF(regs); in set_vflags_short()
422 clear_IF(regs); in set_vflags_short()
425 static inline unsigned long get_vflags(struct kernel_vm86_regs *regs) in get_vflags() argument
427 unsigned long flags = regs->pt.flags & RETURN_MASK; in get_vflags()
525 static void do_int(struct kernel_vm86_regs *regs, int i, in do_int() argument
532 if (regs->pt.cs == BIOSSEG) in do_int()
536 if (i == 0x21 && is_revectored(AH(regs), &vm86->int21_revectored)) in do_int()
543 pushw(ssp, sp, get_vflags(regs), cannot_handle); in do_int()
544 pushw(ssp, sp, regs->pt.cs, cannot_handle); in do_int()
545 pushw(ssp, sp, IP(regs), cannot_handle); in do_int()
546 regs->pt.cs = segoffs >> 16; in do_int()
547 SP(regs) -= 6; in do_int()
548 IP(regs) = segoffs & 0xffff; in do_int()
549 clear_TF(regs); in do_int()
550 clear_IF(regs); in do_int()
551 clear_AC(regs); in do_int()
555 save_v86_state(regs, VM86_INTx + (i << 8)); in do_int()
558 int handle_vm86_trap(struct kernel_vm86_regs *regs, long error_code, int trapno) in handle_vm86_trap() argument
564 save_v86_state(regs, VM86_TRAP + (trapno << 8)); in handle_vm86_trap()
567 do_int(regs, trapno, (unsigned char __user *) (regs->pt.ss << 4), SP(regs)); in handle_vm86_trap()
578 void handle_vm86_fault(struct kernel_vm86_regs *regs, long error_code) in handle_vm86_fault() argument
591 orig_flags = *(unsigned short *)®s->pt.flags; in handle_vm86_fault()
593 csp = (unsigned char __user *) (regs->pt.cs << 4); in handle_vm86_fault()
594 ssp = (unsigned char __user *) (regs->pt.ss << 4); in handle_vm86_fault()
595 sp = SP(regs); in handle_vm86_fault()
596 ip = IP(regs); in handle_vm86_fault()
621 pushl(ssp, sp, get_vflags(regs), simulate_sigsegv); in handle_vm86_fault()
622 SP(regs) -= 4; in handle_vm86_fault()
624 pushw(ssp, sp, get_vflags(regs), simulate_sigsegv); in handle_vm86_fault()
625 SP(regs) -= 2; in handle_vm86_fault()
627 IP(regs) = ip; in handle_vm86_fault()
636 SP(regs) += 4; in handle_vm86_fault()
639 SP(regs) += 2; in handle_vm86_fault()
641 IP(regs) = ip; in handle_vm86_fault()
644 set_vflags_long(newflags, regs); in handle_vm86_fault()
646 set_vflags_short(newflags, regs); in handle_vm86_fault()
654 IP(regs) = ip; in handle_vm86_fault()
657 save_v86_state(regs, VM86_INTx + (intno << 8)); in handle_vm86_fault()
661 do_int(regs, intno, ssp, sp); in handle_vm86_fault()
675 SP(regs) += 12; in handle_vm86_fault()
680 SP(regs) += 6; in handle_vm86_fault()
682 IP(regs) = newip; in handle_vm86_fault()
683 regs->pt.cs = newcs; in handle_vm86_fault()
686 set_vflags_long(newflags, regs); in handle_vm86_fault()
688 set_vflags_short(newflags, regs); in handle_vm86_fault()
695 IP(regs) = ip; in handle_vm86_fault()
696 clear_IF(regs); in handle_vm86_fault()
707 IP(regs) = ip; in handle_vm86_fault()
708 set_IF(regs); in handle_vm86_fault()
712 save_v86_state(regs, VM86_UNKNOWN); in handle_vm86_fault()
719 save_v86_state(regs, VM86_STI); in handle_vm86_fault()
725 save_v86_state(regs, VM86_PICRETURN); in handle_vm86_fault()
729 handle_vm86_trap(regs, 0, X86_TRAP_DB); in handle_vm86_fault()
743 save_v86_state(regs, VM86_UNKNOWN); in handle_vm86_fault()