/linux-4.1.27/arch/x86/lib/ |
H A D | msr-reg.S | 21 movl (%rdi), %eax 22 movl 4(%rdi), %ecx 23 movl 8(%rdi), %edx 24 movl 12(%rdi), %ebx 25 movl 20(%rdi), %ebp 26 movl 24(%rdi), %esi 27 movl 28(%rdi), %edi 30 2: movl %eax, (%r10) 31 movl %r11d, %eax /* Return value */ 32 movl %ecx, 4(%r10) 33 movl %edx, 8(%r10) 34 movl %ebx, 12(%r10) 35 movl %ebp, 20(%r10) 36 movl %esi, 24(%r10) 37 movl %edi, 28(%r10) 43 movl $-EIO, %r11d 62 movl 4(%eax), %ecx 63 movl 8(%eax), %edx 64 movl 12(%eax), %ebx 65 movl 20(%eax), %ebp 66 movl 24(%eax), %esi 67 movl 28(%eax), %edi 68 movl (%eax), %eax 72 movl 4(%esp), %eax 76 movl %ecx, 4(%eax) 77 movl %edx, 8(%eax) 78 movl %ebx, 12(%eax) 79 movl %ebp, 20(%eax) 80 movl %esi, 24(%eax) 81 movl %edi, 28(%eax) 90 movl $-EIO, 4(%esp)
|
H A D | atomic64_386_32.S | 48 movl (v), %eax 49 movl 4(v), %edx 55 movl %ebx, (v) 56 movl %ecx, 4(v) 62 movl (v), %eax 63 movl 4(v), %edx 64 movl %ebx, (v) 65 movl %ecx, 4(v) 80 movl %eax, (v) 81 movl %edx, 4(v) 99 movl %eax, (v) 100 movl %edx, 4(v) 113 movl (v), %eax 114 movl 4(v), %edx 117 movl %eax, (v) 118 movl %edx, 4(v) 131 movl (v), %eax 132 movl 4(v), %edx 135 movl %eax, (v) 136 movl %edx, 4(v) 149 movl %eax, (v) 150 movl %edx, 4(v) 151 movl $1, %eax 164 movl (v), %eax 165 movl 4(v), %edx 171 movl %eax, (v) 172 movl %edx, 4(v) 173 movl $1, %eax 185 movl (v), %eax 186 movl 4(v), %edx 190 movl %eax, (v) 191 movl %edx, 4(v)
|
H A D | usercopy_32.c | 47 " movl %2,%0\n" \ 106 "1: movl 32(%4), %%eax\n" __copy_user_intel() 109 "2: movl 64(%4), %%eax\n" __copy_user_intel() 111 "3: movl 0(%4), %%eax\n" __copy_user_intel() 112 "4: movl 4(%4), %%edx\n" __copy_user_intel() 113 "5: movl %%eax, 0(%3)\n" __copy_user_intel() 114 "6: movl %%edx, 4(%3)\n" __copy_user_intel() 115 "7: movl 8(%4), %%eax\n" __copy_user_intel() 116 "8: movl 12(%4),%%edx\n" __copy_user_intel() 117 "9: movl %%eax, 8(%3)\n" __copy_user_intel() 118 "10: movl %%edx, 12(%3)\n" __copy_user_intel() 119 "11: movl 16(%4), %%eax\n" __copy_user_intel() 120 "12: movl 20(%4), %%edx\n" __copy_user_intel() 121 "13: movl %%eax, 16(%3)\n" __copy_user_intel() 122 "14: movl %%edx, 20(%3)\n" __copy_user_intel() 123 "15: movl 24(%4), %%eax\n" __copy_user_intel() 124 "16: movl 28(%4), %%edx\n" __copy_user_intel() 125 "17: movl %%eax, 24(%3)\n" __copy_user_intel() 126 "18: movl %%edx, 28(%3)\n" __copy_user_intel() 127 "19: movl 32(%4), %%eax\n" __copy_user_intel() 128 "20: movl 36(%4), %%edx\n" __copy_user_intel() 129 "21: movl %%eax, 32(%3)\n" __copy_user_intel() 130 "22: movl %%edx, 36(%3)\n" __copy_user_intel() 131 "23: movl 40(%4), %%eax\n" __copy_user_intel() 132 "24: movl 44(%4), %%edx\n" __copy_user_intel() 133 "25: movl %%eax, 40(%3)\n" __copy_user_intel() 134 "26: movl %%edx, 44(%3)\n" __copy_user_intel() 135 "27: movl 48(%4), %%eax\n" __copy_user_intel() 136 "28: movl 52(%4), %%edx\n" __copy_user_intel() 137 "29: movl %%eax, 48(%3)\n" __copy_user_intel() 138 "30: movl %%edx, 52(%3)\n" __copy_user_intel() 139 "31: movl 56(%4), %%eax\n" __copy_user_intel() 140 "32: movl 60(%4), %%edx\n" __copy_user_intel() 141 "33: movl %%eax, 56(%3)\n" __copy_user_intel() 142 "34: movl %%edx, 60(%3)\n" __copy_user_intel() 148 "35: movl %0, %%eax\n" __copy_user_intel() 153 "36: movl %%eax, %0\n" __copy_user_intel() 210 "0: movl 32(%4), %%eax\n" __copy_user_zeroing_intel() 213 "1: movl 64(%4), %%eax\n" __copy_user_zeroing_intel() 215 "2: movl 0(%4), %%eax\n" __copy_user_zeroing_intel() 216 "21: movl 4(%4), %%edx\n" __copy_user_zeroing_intel() 217 " movl %%eax, 0(%3)\n" __copy_user_zeroing_intel() 218 " movl %%edx, 4(%3)\n" __copy_user_zeroing_intel() 219 "3: movl 8(%4), %%eax\n" __copy_user_zeroing_intel() 220 "31: movl 12(%4),%%edx\n" __copy_user_zeroing_intel() 221 " movl %%eax, 8(%3)\n" __copy_user_zeroing_intel() 222 " movl %%edx, 12(%3)\n" __copy_user_zeroing_intel() 223 "4: movl 16(%4), %%eax\n" __copy_user_zeroing_intel() 224 "41: movl 20(%4), %%edx\n" __copy_user_zeroing_intel() 225 " movl %%eax, 16(%3)\n" __copy_user_zeroing_intel() 226 " movl %%edx, 20(%3)\n" __copy_user_zeroing_intel() 227 "10: movl 24(%4), %%eax\n" __copy_user_zeroing_intel() 228 "51: movl 28(%4), %%edx\n" __copy_user_zeroing_intel() 229 " movl %%eax, 24(%3)\n" __copy_user_zeroing_intel() 230 " movl %%edx, 28(%3)\n" __copy_user_zeroing_intel() 231 "11: movl 32(%4), %%eax\n" __copy_user_zeroing_intel() 232 "61: movl 36(%4), %%edx\n" __copy_user_zeroing_intel() 233 " movl %%eax, 32(%3)\n" __copy_user_zeroing_intel() 234 " movl %%edx, 36(%3)\n" __copy_user_zeroing_intel() 235 "12: movl 40(%4), %%eax\n" __copy_user_zeroing_intel() 236 "71: movl 44(%4), %%edx\n" __copy_user_zeroing_intel() 237 " movl %%eax, 40(%3)\n" __copy_user_zeroing_intel() 238 " movl %%edx, 44(%3)\n" __copy_user_zeroing_intel() 239 "13: movl 48(%4), %%eax\n" __copy_user_zeroing_intel() 240 "81: movl 52(%4), %%edx\n" __copy_user_zeroing_intel() 241 " movl %%eax, 48(%3)\n" __copy_user_zeroing_intel() 242 " movl %%edx, 52(%3)\n" __copy_user_zeroing_intel() 243 "14: movl 56(%4), %%eax\n" __copy_user_zeroing_intel() 244 "91: movl 60(%4), %%edx\n" __copy_user_zeroing_intel() 245 " movl %%eax, 56(%3)\n" __copy_user_zeroing_intel() 246 " movl %%edx, 60(%3)\n" __copy_user_zeroing_intel() 252 "5: movl %0, %%eax\n" __copy_user_zeroing_intel() 257 " movl %%eax,%0\n" __copy_user_zeroing_intel() 308 "0: movl 32(%4), %%eax\n" __copy_user_zeroing_intel_nocache() 311 "1: movl 64(%4), %%eax\n" __copy_user_zeroing_intel_nocache() 313 "2: movl 0(%4), %%eax\n" __copy_user_zeroing_intel_nocache() 314 "21: movl 4(%4), %%edx\n" __copy_user_zeroing_intel_nocache() 317 "3: movl 8(%4), %%eax\n" __copy_user_zeroing_intel_nocache() 318 "31: movl 12(%4),%%edx\n" __copy_user_zeroing_intel_nocache() 321 "4: movl 16(%4), %%eax\n" __copy_user_zeroing_intel_nocache() 322 "41: movl 20(%4), %%edx\n" __copy_user_zeroing_intel_nocache() 325 "10: movl 24(%4), %%eax\n" __copy_user_zeroing_intel_nocache() 326 "51: movl 28(%4), %%edx\n" __copy_user_zeroing_intel_nocache() 329 "11: movl 32(%4), %%eax\n" __copy_user_zeroing_intel_nocache() 330 "61: movl 36(%4), %%edx\n" __copy_user_zeroing_intel_nocache() 333 "12: movl 40(%4), %%eax\n" __copy_user_zeroing_intel_nocache() 334 "71: movl 44(%4), %%edx\n" __copy_user_zeroing_intel_nocache() 337 "13: movl 48(%4), %%eax\n" __copy_user_zeroing_intel_nocache() 338 "81: movl 52(%4), %%edx\n" __copy_user_zeroing_intel_nocache() 341 "14: movl 56(%4), %%eax\n" __copy_user_zeroing_intel_nocache() 342 "91: movl 60(%4), %%edx\n" __copy_user_zeroing_intel_nocache() 351 "5: movl %0, %%eax\n" __copy_user_zeroing_intel_nocache() 356 " movl %%eax,%0\n" __copy_user_zeroing_intel_nocache() 402 "0: movl 32(%4), %%eax\n" __copy_user_intel_nocache() 405 "1: movl 64(%4), %%eax\n" __copy_user_intel_nocache() 407 "2: movl 0(%4), %%eax\n" __copy_user_intel_nocache() 408 "21: movl 4(%4), %%edx\n" __copy_user_intel_nocache() 411 "3: movl 8(%4), %%eax\n" __copy_user_intel_nocache() 412 "31: movl 12(%4),%%edx\n" __copy_user_intel_nocache() 415 "4: movl 16(%4), %%eax\n" __copy_user_intel_nocache() 416 "41: movl 20(%4), %%edx\n" __copy_user_intel_nocache() 419 "10: movl 24(%4), %%eax\n" __copy_user_intel_nocache() 420 "51: movl 28(%4), %%edx\n" __copy_user_intel_nocache() 423 "11: movl 32(%4), %%eax\n" __copy_user_intel_nocache() 424 "61: movl 36(%4), %%edx\n" __copy_user_intel_nocache() 427 "12: movl 40(%4), %%eax\n" __copy_user_intel_nocache() 428 "71: movl 44(%4), %%edx\n" __copy_user_intel_nocache() 431 "13: movl 48(%4), %%eax\n" __copy_user_intel_nocache() 432 "81: movl 52(%4), %%edx\n" __copy_user_intel_nocache() 435 "14: movl 56(%4), %%eax\n" __copy_user_intel_nocache() 436 "91: movl 60(%4), %%edx\n" __copy_user_intel_nocache() 445 "5: movl %0, %%eax\n" __copy_user_intel_nocache() 450 " movl %%eax,%0\n" __copy_user_intel_nocache() 504 " movl %1,%0\n" \ 509 " movl %3,%0\n" \ 514 " movl %3,%0\n" \ 537 " movl %1,%0\n" \ 542 " movl %3,%0\n" \ 547 " movl %3,%0\n" \
|
H A D | atomic64_cx8_32.S | 17 movl %ebx, %eax 18 movl %ecx, %edx 65 movl %eax, %esi 66 movl %edx, %edi 67 movl %ecx, %ebp 71 movl %eax, %ebx 72 movl %edx, %ecx 80 movl %ebx, %eax 81 movl %ecx, %edx 101 movl %eax, %ebx 102 movl %edx, %ecx 110 movl %ebx, %eax 111 movl %ecx, %edx 127 movl %eax, %ebx 128 movl %edx, %ecx 137 movl %ebx, %eax 138 movl %ecx, %edx 152 movl %eax, %ebp 153 movl %edx, %edi 160 movl %eax, %ebx 161 movl %edx, %ecx 168 movl $1, %eax 189 movl %eax, %ecx 192 movl %eax, %ebx 200 movl $1, %eax
|
H A D | strstr_32.c | 8 "movl %6,%%edi\n\t" strstr() 13 "movl %%ecx,%%edx\n" strstr() 15 "movl %%esi,%%eax\n\t" strstr() 16 "movl %%edx,%%ecx\n\t" strstr()
|
H A D | checksum_32.S | 56 movl 20(%esp),%eax # Function arg: unsigned int sum 57 movl 16(%esp),%ecx # Function arg: int len 58 movl 12(%esp),%esi # Function arg: unsigned char *buff 83 movl %ecx, %edx 87 1: movl (%esi), %ebx 89 movl 4(%esi), %ebx 91 movl 8(%esi), %ebx 93 movl 12(%esi), %ebx 95 movl 16(%esi), %ebx 97 movl 20(%esi), %ebx 99 movl 24(%esi), %ebx 101 movl 28(%esi), %ebx 107 2: movl %edx, %ecx 146 movl 20(%esp),%eax # Function arg: unsigned int sum 147 movl 16(%esp),%ecx # Function arg: int len 148 movl 12(%esp),%esi # Function arg: const unsigned char *buf 153 movl %ecx, %edx 154 movl %ecx, %ebx 235 movl %edx, %ecx 241 movl $0xffffff,%ebx # by the shll and shrl instructions 296 movl ARGBASE+16(%esp),%eax # sum 297 movl ARGBASE+12(%esp),%ecx # len 298 movl ARGBASE+4(%esp),%esi # src 299 movl ARGBASE+8(%esp),%edi # dst 314 movl %ecx, FP(%esp) 318 SRC(1: movl (%esi), %ebx ) 319 SRC( movl 4(%esi), %edx ) 321 DST( movl %ebx, (%edi) ) 323 DST( movl %edx, 4(%edi) ) 325 SRC( movl 8(%esi), %ebx ) 326 SRC( movl 12(%esi), %edx ) 328 DST( movl %ebx, 8(%edi) ) 330 DST( movl %edx, 12(%edi) ) 332 SRC( movl 16(%esi), %ebx ) 333 SRC( movl 20(%esi), %edx ) 335 DST( movl %ebx, 16(%edi) ) 337 DST( movl %edx, 20(%edi) ) 339 SRC( movl 24(%esi), %ebx ) 340 SRC( movl 28(%esi), %edx ) 342 DST( movl %ebx, 24(%edi) ) 344 DST( movl %edx, 28(%edi) ) 351 2: movl FP(%esp), %edx 352 movl %edx, %ecx 356 SRC(3: movl (%esi), %ebx ) 358 DST( movl %ebx, (%edi) ) 385 movl ARGBASE+20(%esp), %ebx # src_err_ptr 386 movl $-EFAULT, (%ebx) 390 movl ARGBASE+8(%esp), %edi # dst 391 movl ARGBASE+12(%esp), %ecx # len 398 movl ARGBASE+24(%esp), %ebx # dst_err_ptr 399 movl $-EFAULT,(%ebx) 417 SRC(movl x(%esi), %ebx ) ; \ 419 DST(movl %ebx, x(%edi) ) ; 422 SRC(movl x(%esi), %ebx ) ; \ 424 DST(movl %ebx, x(%edi) ) ; 433 movl ARGBASE+4(%esp),%esi #src 434 movl ARGBASE+8(%esp),%edi #dst 435 movl ARGBASE+12(%esp),%ecx #len 436 movl ARGBASE+16(%esp),%eax #sum 437 # movl %ecx, %edx 438 movl %ecx, %ebx 439 movl %esi, %edx 461 4: movl ARGBASE+12(%esp),%edx #len 479 6001: movl ARGBASE+20(%esp), %ebx # src_err_ptr 480 movl $-EFAULT, (%ebx) 482 movl ARGBASE+8(%esp),%edi # dst 483 movl ARGBASE+12(%esp),%ecx # len 487 6002: movl ARGBASE+24(%esp), %ebx # dst_err_ptr 488 movl $-EFAULT, (%ebx)
|
H A D | cmpxchg8b_emu.S | 38 movl %ebx, (%esi) 39 movl %ecx, 4(%esi) 47 movl (%esi), %eax 49 movl 4(%esi), %edx
|
H A D | clear_page_64.S | 23 movl $4096/8,%ecx 34 movl $4096/64,%ecx 56 movl $4096,%ecx
|
H A D | copy_user_64.S | 21 movl %edi,%ecx 82 movl %edx,%ecx 87 movl %edx,%eax 112 movl %edx,%ecx 136 17: movl %edx,%ecx 148 movl %edx,%ecx 165 50: movl %ecx,%edx 216 movl %edx,%ecx 221 2: movl %edx,%ecx 230 12: movl %ecx,%edx /* ecx is zerorest also */ 254 movl %edx,%ecx 262 12: movl %ecx,%edx /* ecx is zerorest also */
|
H A D | copy_user_nocache_64.S | 23 movl %edi,%ecx 56 movl %edx,%ecx 80 17: movl %edx,%ecx 92 movl %edx,%ecx 110 50: movl %ecx,%edx
|
H A D | putuser.S | 69 3: movl %eax,(%_ASM_CX) 83 5: movl %edx,4(%_ASM_CX) 91 movl $-EFAULT,%eax
|
H A D | getuser.S | 74 3: movl -3(%_ASM_AX),%edx 101 4: movl -7(%_ASM_AX),%edx 102 5: movl -3(%_ASM_AX),%ecx
|
H A D | string_32.c | 76 "movl %8,%3\n" strncat() 152 "movl $1,%1\n" strchr() 188 "movl $1,%0\n" memchr() 220 asm volatile("movl %2,%0\n\t" strnlen()
|
/linux-4.1.27/arch/x86/power/ |
H A D | hibernate_asm_32.S | 18 movl %esp, saved_context_esp 19 movl %ebx, saved_context_ebx 20 movl %ebp, saved_context_ebp 21 movl %esi, saved_context_esi 22 movl %edi, saved_context_edi 30 movl mmu_cr4_features, %ecx 31 movl resume_pg_dir, %eax 33 movl %eax, %cr3 37 movl %ecx, %cr4; # turn off PGE 38 movl %cr3, %eax; # flush TLB 39 movl %eax, %cr3 41 movl restore_pblist, %edx 48 movl pbe_address(%edx), %esi 49 movl pbe_orig_address(%edx), %edi 51 movl $1024, %ecx 55 movl pbe_next(%edx), %edx 61 movl $swapper_pg_dir, %eax 63 movl %eax, %cr3 64 movl mmu_cr4_features, %ecx 66 movl %ecx, %cr4; # turn PGE back on 69 movl saved_context_esp, %esp 70 movl saved_context_ebp, %ebp 71 movl saved_context_ebx, %ebx 72 movl saved_context_esi, %esi 73 movl saved_context_edi, %edi 79 movl $saved_context, %eax
|
/linux-4.1.27/arch/x86/um/ |
H A D | setjmp_32.S | 23 movl %eax,%edx 25 movl 4(%esp),%edx 29 movl %ebx,(%edx) 30 movl %esp,4(%edx) # Post-return %esp! 32 movl %ebp,8(%edx) 33 movl %esi,12(%edx) 34 movl %edi,16(%edx) 35 movl %ecx,20(%edx) # Return address 48 movl 4(%esp),%edx # jmp_ptr address 49 movl 8(%esp),%eax # Return value 51 movl (%edx),%ebx 52 movl 4(%edx),%esp 53 movl 8(%edx),%ebp 54 movl 12(%edx),%esi 55 movl 16(%edx),%edi
|
H A D | checksum_32.S | 55 movl 20(%esp),%eax # Function arg: unsigned int sum 56 movl 16(%esp),%ecx # Function arg: int len 57 movl 12(%esp),%esi # Function arg: unsigned char *buff 69 movl %ecx, %edx 73 1: movl (%esi), %ebx 75 movl 4(%esi), %ebx 77 movl 8(%esi), %ebx 79 movl 12(%esi), %ebx 81 movl 16(%esi), %ebx 83 movl 20(%esi), %ebx 85 movl 24(%esi), %ebx 87 movl 28(%esi), %ebx 93 2: movl %edx, %ecx 125 movl 20(%esp),%eax # Function arg: unsigned int sum 126 movl 16(%esp),%ecx # Function arg: int len 127 movl 12(%esp),%esi # Function arg: const unsigned char *buf 132 movl %ecx, %edx 133 movl %ecx, %ebx 199 movl %edx, %ecx 205 movl $0xffffff,%ebx # by the shll and shrl instructions
|
H A D | setjmp_64.S | 44 movl %esi,%eax # Return value (int)
|
H A D | signal.c | 398 * This is popl %eax ; movl $,%eax ; int $0x80 setup_signal_stack_sc() 444 * This is movl $,%eax ; int $0x80 setup_signal_stack_si()
|
/linux-4.1.27/arch/x86/math-emu/ |
H A D | round_Xsig.S | 27 movl %esp,%ebp 32 movl PARAM1,%esi 34 movl 8(%esi),%edx 35 movl 4(%esi),%ebx 36 movl (%esi),%eax 38 movl $0,-4(%ebp) 44 movl %ebx,%edx 45 movl %eax,%ebx 47 movl $-32,-4(%ebp) 67 movl $0x80000000,%edx 71 movl %edx,8(%esi) 72 movl %ebx,4(%esi) 73 movl %eax,(%esi) 75 movl -4(%ebp),%eax 87 movl %esp,%ebp 92 movl PARAM1,%esi 94 movl 8(%esi),%edx 95 movl 4(%esi),%ebx 96 movl (%esi),%eax 98 movl $0,-4(%ebp) 104 movl %ebx,%edx 105 movl %eax,%ebx 107 movl $-32,-4(%ebp) 113 movl %ebx,%edx 114 movl %eax,%ebx 131 movl %edx,8(%esi) 132 movl %ebx,4(%esi) 133 movl %eax,(%esi) 135 movl -4(%ebp),%eax
|
H A D | shr_Xsig.S | 26 movl %esp,%ebp 28 movl PARAM2,%ecx 29 movl PARAM1,%esi 35 movl (%esi),%eax /* lsl */ 36 movl 4(%esi),%ebx /* midl */ 37 movl 8(%esi),%edx /* msl */ 41 movl %eax,(%esi) 42 movl %ebx,4(%esi) 43 movl %edx,8(%esi) 54 movl 4(%esi),%eax /* midl */ 55 movl 8(%esi),%edx /* msl */ 58 movl %eax,(%esi) 59 movl %edx,4(%esi) 60 movl $0,8(%esi) 70 movl 8(%esi),%eax /* msl */ 73 movl %eax,(%esi) 74 movl %edx,4(%esi) 75 movl %edx,8(%esi) 82 movl %eax,(%esi) 83 movl %eax,4(%esi) 84 movl %eax,8(%esi)
|
H A D | polynom_Xsig.S | 41 movl %esp,%ebp 47 movl PARAM2,%esi /* x */ 48 movl PARAM3,%edi /* terms */ 50 movl TERM_SIZE,%eax 54 movl 4(%edi),%edx /* terms[n] */ 55 movl %edx,SUM_MS 56 movl (%edi),%edx /* terms[n] */ 57 movl %edx,SUM_MIDDLE 59 movl %eax,SUM_LS 68 movl %eax,ACCUM_MS 69 movl %eax,ACCUM_MIDDLE 71 movl SUM_MIDDLE,%eax 73 movl %edx,ACCUM_LS 75 movl SUM_MIDDLE,%eax 81 movl SUM_MS,%eax 87 movl SUM_MS,%eax 95 movl (%esi),%eax 97 movl 4(%esi),%eax 106 movl ACCUM_LS,%eax 108 movl %eax,SUM_LS 109 movl ACCUM_MIDDLE,%eax 111 movl %eax,SUM_MIDDLE 112 movl ACCUM_MS,%eax 114 movl %eax,SUM_MS 123 movl PARAM1,%edi /* accum */ 124 movl SUM_LS,%eax 126 movl SUM_MIDDLE,%eax 128 movl SUM_MS,%eax
|
H A D | wm_shrx.S | 37 movl %esp,%ebp 39 movl PARAM2,%ecx 40 movl PARAM1,%esi 46 movl (%esi),%ebx /* lsl */ 47 movl 4(%esi),%edx /* msl */ 52 movl %ebx,(%esi) 53 movl %edx,4(%esi) 64 movl (%esi),%eax /* lsl */ 65 movl 4(%esi),%edx /* msl */ 68 movl %edx,(%esi) 69 movl $0,4(%esi) 79 movl 4(%esi),%eax /* msl */ 82 movl %edx,(%esi) 83 movl %edx,4(%esi) 90 movl %eax,(%esi) 91 movl %eax,4(%esi) 115 movl %esp,%ebp 118 movl PARAM2,%ecx 119 movl PARAM1,%esi 130 movl (%esi),%eax /* lsl */ 131 movl 4(%esi),%edx /* msl */ 142 movl %edx,(%esi) 143 movl $0,4(%esi) 151 movl (%esi),%ebx /* lsl */ 152 movl 4(%esi),%edx /* msl */ 159 movl %ebx,(%esi) 160 movl %edx,4(%esi) 172 movl (%esi),%ebx /* lsl */ 173 movl 4(%esi),%eax /* msl */ 185 movl %edx,(%esi) /* set to zero */ 186 movl %edx,4(%esi) /* set to zero */ 195 movl (%esi),%ebx 199 movl %ebx,(%esi) 200 movl %ebx,4(%esi)
|
H A D | reg_u_mul.S | 49 movl %esp,%ebp 58 movl PARAM1,%esi 59 movl PARAM2,%edi 71 movl SIGL(%esi),%eax 73 movl %eax,FPU_accum_0 74 movl %edx,FPU_accum_1 76 movl SIGL(%esi),%eax 82 movl SIGH(%esi),%eax 88 movl SIGH(%esi),%eax 94 movl PARAM6,%eax 102 movl EXP_WAY_UNDER,%eax 107 movl PARAM3,%edi /* Point to the destination */ 122 movl FPU_accum_0,%eax 123 movl FPU_accum_1,%edx 130 movl %ecx,%eax
|
H A D | wm_sqrt.S | 79 movl %esp,%ebp 87 movl PARAM1,%esi 89 movl SIGH(%esi),%eax 90 movl SIGL(%esi),%ecx 106 movl %eax,FPU_fsqrt_arg_2 /* ms word of n */ 107 movl %ecx,FPU_fsqrt_arg_1 108 movl %edx,FPU_fsqrt_arg_0 113 movl $0xaaaaaaaa,%ecx 119 movl $0x80000000,%edx /* round up */ 122 movl %edx,%esi /* Our first guess */ 127 movl FPU_fsqrt_arg_2,%ecx /* ms word */ 139 movl %ecx,%edx /* msw of the arg / 2 */ 144 movl %ecx,%edx 149 movl %ecx,%edx 163 movl %esi,%eax 167 movl FPU_fsqrt_arg_1,%ecx 169 movl FPU_fsqrt_arg_2,%ecx /* ms word of normalized n */ 181 movl %eax,%ecx 183 movl %edx,%eax 189 movl %eax,%ecx 191 movl %edx,%eax 204 movl %eax,%edi 218 movl %eax,%edi 219 movl %eax,%esi 220 movl $0x7fffffff,%eax 234 movl %edi,%eax /* ls word of guess */ 236 movl %edx,FPU_accum_1 238 movl %esi,%eax 240 movl %edx,FPU_accum_3 241 movl %eax,FPU_accum_2 243 movl %edi,%eax 249 /* movl %esi,%eax */ 257 movl FPU_fsqrt_arg_0,%eax /* get normalized n */ 259 movl FPU_fsqrt_arg_1,%eax 261 movl FPU_fsqrt_arg_2,%eax /* ms word of normalized n */ 284 movl FPU_accum_2,%edx 285 movl FPU_accum_1,%eax 287 movl %eax,%ecx 289 movl %edx,%eax 303 movl FPU_accum_2,%edx 304 movl FPU_accum_1,%eax 306 movl %eax,%ecx 308 movl %edx,%eax 346 movl %eax,%edx 347 movl %esi,%eax 348 movl %edi,%ebx 349 movl PARAM1,%edi 367 movl %edi,%eax /* ls word of guess */ 369 movl %edx,%ebx /* 2nd ls word of square */ 370 movl %eax,%ecx /* ls word of square */ 372 movl %edi,%eax 404 movl $0x000000ff,%eax 411 movl $0xffffff00,%eax 422 movl %edi,%eax /* ls word of guess */ 424 movl %edx,%ebx /* 2nd ls word of square */ 425 movl %eax,%ecx /* ls word of square */ 427 movl %edi,%eax 459 movl $0x80000000,%eax 464 movl $0x800000ff,%eax 469 movl $0x7fffff00,%eax
|
H A D | div_Xsig.S | 79 movl %esp,%ebp 88 movl PARAM1,%esi /* pointer to num */ 89 movl PARAM2,%ebx /* pointer to denom */ 115 movl XsigH(%esi),%eax 117 movl %eax,FPU_accum_3 118 movl XsigL(%esi),%eax 120 movl %eax,FPU_accum_2 121 movl XsigLL(%esi),%eax 123 movl %eax,FPU_accum_1 124 movl $0,%eax 126 movl %eax,FPU_accum_0 128 movl FPU_accum_2,%eax /* Get the current num */ 129 movl FPU_accum_3,%edx 136 movl XsigH(%ebx),%ecx 150 movl %eax,FPU_result_3 /* Put the result in the answer */ 157 movl FPU_result_3,%eax /* Get the result back */ 172 movl XsigL(%ebx),%eax 173 movl XsigH(%ebx),%edx 187 movl FPU_accum_2,%edx /* get the reduced num */ 188 movl FPU_accum_1,%eax 203 movl %edx,FPU_accum_2 204 movl %eax,FPU_accum_1 224 movl %eax,FPU_result_2 /* Put the result in the answer */ 235 movl FPU_result_2,%eax /* Get the result back */ 254 movl XsigL(%ebx),%eax 255 movl XsigH(%ebx),%edx 281 movl FPU_result_3,%eax /* a/b */ 289 movl XsigH(%ebx),%edx 296 movl FPU_accum_1,%edx /* get the reduced num */ 300 movl XsigH(%ebx),%edx 308 movl FPU_accum_1,%edx /* get the reduced num */ 316 movl %edx,FPU_accum_1 322 movl FPU_accum_0,%eax 323 movl FPU_accum_1,%edx 326 movl %eax,FPU_result_1 /* Rough estimate of third word */ 328 movl PARAM3,%esi /* pointer to answer */ 330 movl FPU_result_1,%eax 331 movl %eax,XsigLL(%esi) 332 movl FPU_result_2,%eax 333 movl %eax,XsigL(%esi) 334 movl FPU_result_3,%eax 335 movl %eax,XsigH(%esi)
|
H A D | reg_u_sub.S | 37 movl %esp,%ebp 42 movl PARAM1,%esi /* source 1 */ 43 movl PARAM2,%edi /* source 2 */ 45 movl PARAM6,%ecx 63 movl SIGH(%edi),%eax /* register ms word */ 64 movl SIGL(%edi),%ebx /* register ls word */ 66 movl PARAM3,%edi /* destination */ 67 movl PARAM6,%edx 102 movl %eax,%ebx 107 movl %ebx,%edx 108 movl %eax,%ebx 117 movl %eax,%edx 129 movl %eax,%edx 144 movl $1,%edx /* The shifted nr always at least one '1' */ 157 movl %ecx,%edx 158 movl SIGL(%esi),%ecx 160 movl %ecx,%ebx 161 movl SIGH(%esi),%ecx 163 movl %ecx,%eax 206 movl $0,SIGL(%edi) 207 movl $0,SIGH(%edi) 208 movl TAG_Zero,%eax 212 movl %ebx,%eax 213 movl %edx,%ebx 214 movl $0,%edx 263 movl $-1,%eax
|
H A D | reg_norm.S | 26 movl %esp,%ebp 29 movl PARAM1,%ebx 31 movl SIGH(%ebx),%edx 32 movl SIGL(%ebx),%eax 41 movl %eax,%edx 54 movl %edx,SIGH(%ebx) 55 movl %eax,SIGL(%ebx) 65 movl TAG_Valid,%eax 79 movl TAG_Zero,%eax 103 movl %esp,%ebp 106 movl PARAM1,%ebx 108 movl SIGH(%ebx),%edx 109 movl SIGL(%ebx),%eax 118 movl %eax,%edx 131 movl %edx,SIGH(%ebx) 132 movl %eax,SIGL(%ebx) 135 movl TAG_Valid,%eax 142 movl TAG_Zero,%eax
|
H A D | reg_u_add.S | 36 movl %esp,%ebp 41 movl PARAM1,%esi /* source 1 */ 42 movl PARAM2,%edi /* source 2 */ 44 movl PARAM6,%ecx 45 movl %ecx,%edx 50 movl SIGL(%esi),%ebx 51 movl SIGH(%esi),%eax 53 movl %edi,%esi 54 movl PARAM7,%edx 60 movl SIGL(%edi),%ebx 61 movl SIGH(%edi),%eax 64 movl PARAM3,%edi /* destination */ 102 movl %eax,%ebx 107 movl %ebx,%edx 108 movl %eax,%ebx 116 movl %eax,%edx 124 movl $1,%edx /* The shifted nr always at least one '1' */ 158 movl $-1,%eax
|
H A D | reg_u_div.S | 79 movl %esp,%ebp 88 movl REGA,%esi 89 movl REGB,%ebx 90 movl DEST,%edi 102 movl EXP_WAY_UNDER,%edx 120 movl SIGH(%ebx),%ecx /* The divisor */ 121 movl SIGH(%esi),%edx /* Dividend */ 122 movl SIGL(%esi),%eax /* Dividend */ 133 movl %eax,FPU_result_2 138 movl %eax,FPU_result_1 181 movl SIGL(%esi),%eax 182 movl %eax,FPU_accum_2 183 movl SIGH(%esi),%eax 184 movl %eax,FPU_accum_3 186 movl %eax,FPU_accum_1 /* zero the extension */ 187 movl %eax,FPU_accum_0 /* zero the extension */ 189 movl SIGL(%esi),%eax /* Get the current num */ 190 movl SIGH(%esi),%edx 210 movl %eax,FPU_accum_2 211 movl %edx,FPU_accum_3 218 movl SIGH(%ebx),%ecx 232 movl %eax,FPU_result_2 /* Put the result in the answer */ 239 movl FPU_result_2,%eax /* Get the result back */ 254 movl SIGL(%ebx),%eax 255 movl SIGH(%ebx),%edx 269 movl FPU_accum_2,%edx /* get the reduced num */ 270 movl FPU_accum_1,%eax 285 movl %edx,FPU_accum_2 286 movl %eax,FPU_accum_1 306 movl %eax,FPU_result_1 /* Put the result in the answer */ 317 movl FPU_result_1,%eax /* Get the result back */ 336 movl SIGL(%ebx),%eax 337 movl SIGH(%ebx),%edx 359 movl FPU_accum_1,%edx /* get the reduced num */ 360 movl FPU_accum_0,%eax 374 movl %edx,FPU_accum_1 375 movl %eax,FPU_accum_0 391 movl FPU_accum_0,%ecx 392 movl FPU_accum_1,%edx 393 movl %ecx,%eax 407 movl $0x70000000,%eax /* Denom was larger */ 413 movl $0x80000000,%eax /* Remainder was exactly 1/2 denom */ 417 movl $0xff000000,%eax /* Denom was smaller */ 437 movl %eax,%edx 438 movl FPU_result_1,%ebx 439 movl FPU_result_2,%eax 464 movl $-1,%eax
|
H A D | poly.h | 66 asm volatile ("mull %2; movl %%edx,%%eax":"=a" (retval) mul_32_32() 75 asm volatile ("movl %1,%%edi; movl %2,%%esi;\n" add_Xsig_Xsig() 76 "movl (%%esi),%%eax; addl %%eax,(%%edi);\n" add_Xsig_Xsig() 77 "movl 4(%%esi),%%eax; adcl %%eax,4(%%edi);\n" add_Xsig_Xsig() 78 "movl 8(%%esi),%%eax; adcl %%eax,8(%%edi);\n":"=g" add_Xsig_Xsig() 89 asm volatile ("movl %2,%%ecx; movl %3,%%esi;\n" add_two_Xsig() 90 "movl (%%esi),%%eax; addl %%eax,(%%ecx);\n" add_two_Xsig() 91 "movl 4(%%esi),%%eax; adcl %%eax,4(%%ecx);\n" add_two_Xsig() 92 "movl 8(%%esi),%%eax; adcl %%eax,8(%%ecx);\n" add_two_Xsig() 95 "movl %4,%%ecx; incl (%%ecx)\n" add_two_Xsig() 96 "movl $1,%%eax; jmp 1f;\n" add_two_Xsig() 106 asm volatile ("movl %1,%%esi;\n" negate_Xsig() 108 "movl %%ecx,%%eax; subl (%%esi),%%eax; movl %%eax,(%%esi);\n" negate_Xsig() 109 "movl %%ecx,%%eax; sbbl 4(%%esi),%%eax; movl %%eax,4(%%esi);\n" negate_Xsig() 110 "movl %%ecx,%%eax; sbbl 8(%%esi),%%eax; movl %%eax,8(%%esi);\n":"=g" negate_Xsig()
|
H A D | reg_round.S | 113 movl %esp,%ebp 118 movl PARAM1,%edi 119 movl SIGH(%edi),%eax 120 movl SIGL(%edi),%ebx 121 movl PARAM2,%edx 124 movl PARAM4,%ecx 144 movl %ecx,%esi 172 movl %esi,%ecx 201 movl %eax,%ecx 210 movl %eax,%ecx 237 movl %eax,%ecx 252 movl %esi,%ecx 281 movl %ebx,%ecx 289 movl %ebx,%ecx 313 movl %ebx,%ecx 326 movl %esi,%ecx 396 movl TAG_Valid,%edx 409 movl %eax,SIGH(%edi) 410 movl %ebx,SIGL(%edi) 415 movl %edx,%eax 521 movl %ebx,%edx 522 movl %eax,%ebx 541 movl %eax,%edx 550 movl $1,%edx 600 movl TAG_Special,%edx 609 movl TAG_Special,%edx 630 movl TAG_Zero,%edx
|
H A D | fpu_entry.c | 669 __asm__("movl %0,%%esp ; ret": :"g"(((long)info) - 4)); math_abort()
|
/linux-4.1.27/arch/x86/platform/olpc/ |
H A D | xo1-wakeup.S | 25 movl $initial_page_table - __PAGE_OFFSET, %eax 26 movl %eax, %cr3 28 movl saved_cr4, %eax 29 movl %eax, %cr4 31 movl saved_cr0, %eax 32 movl %eax, %cr0 50 movl %cr3, %eax 51 movl %eax, %cr3 63 movl %cr4, %edx 64 movl %edx, saved_cr4 66 movl %cr0, %edx 67 movl %edx, saved_cr0 71 movl %ebx, saved_context_ebx 72 movl %ebp, saved_context_ebp 73 movl %esi, saved_context_esi 74 movl %edi, saved_context_edi 82 movl saved_context_ebp, %ebp 83 movl saved_context_ebx, %ebx 84 movl saved_context_esi, %esi 85 movl saved_context_edi, %edi 97 movl %esp, saved_context_esp 105 movl saved_context_esp, %esp
|
H A D | olpc-xo1-pm.c | 85 __asm__("movl %0,%%eax" : : "r" (pgd_addr)); xo1_do_sleep()
|
/linux-4.1.27/arch/x86/kernel/ |
H A D | relocate_kernel_32.S | 50 movl 20+8(%esp), %ebp /* list of pages */ 51 movl PTR(VA_CONTROL_PAGE)(%ebp), %edi 52 movl %esp, ESP(%edi) 53 movl %cr0, %eax 54 movl %eax, CR0(%edi) 55 movl %cr3, %eax 56 movl %eax, CR3(%edi) 57 movl %cr4, %eax 58 movl %eax, CR4(%edi) 61 movl 20+4(%esp), %ebx /* page_list */ 62 movl 20+8(%esp), %ebp /* list of pages */ 63 movl 20+12(%esp), %edx /* start address */ 64 movl 20+16(%esp), %ecx /* cpu_has_pae */ 65 movl 20+20(%esp), %esi /* preserve_context */ 72 movl PTR(VA_CONTROL_PAGE)(%ebp), %edi 73 movl %edi, CP_VA_CONTROL_PAGE(%edi) 74 movl PTR(PA_PGD)(%ebp), %eax 75 movl %eax, CP_PA_PGD(%edi) 76 movl PTR(PA_SWAP_PAGE)(%ebp), %eax 77 movl %eax, CP_PA_SWAP_PAGE(%edi) 78 movl %ebx, CP_PA_BACKUP_PAGES_MAP(%edi) 84 movl PTR(PA_CONTROL_PAGE)(%ebp), %edi 87 movl PTR(PA_PGD)(%ebp), %eax 88 movl %eax, %cr3 94 movl %edi, %eax 114 movl %cr0, %eax 117 movl %eax, %cr0 127 movl %eax, %cr4 134 movl %eax, %cr3 136 movl CP_PA_SWAP_PAGE(%edi), %eax 148 movl %eax, %cr3 167 movl CP_PA_SWAP_PAGE(%edi), %esp 173 movl 0(%esp), %ebp 178 movl CP_VA_CONTROL_PAGE(%ebx), %edi 180 movl CP_PA_SWAP_PAGE(%ebx), %eax 181 movl CP_PA_BACKUP_PAGES_MAP(%ebx), %edx 186 movl CP_PA_PGD(%ebx), %eax 187 movl %eax, %cr3 188 movl %cr0, %eax 190 movl %eax, %cr0 192 movl %edi, %eax 198 movl CR4(%edi), %eax 199 movl %eax, %cr4 200 movl CR3(%edi), %eax 201 movl %eax, %cr3 202 movl CR0(%edi), %eax 203 movl %eax, %cr0 204 movl ESP(%edi), %esp 205 movl %ebp, %eax 216 movl 8(%esp), %edx 217 movl 4(%esp), %ecx 222 movl %ecx, %ebx 226 movl (%ebx), %ecx 231 movl %ecx, %edi 237 movl %ecx, %ebx 247 movl %ecx, %esi /* For every source page do a copy */ 250 movl %edi, %eax 251 movl %esi, %ebp 253 movl %edx, %edi 254 movl $1024, %ecx 257 movl %ebp, %edi 258 movl %eax, %esi 259 movl $1024, %ecx 262 movl %eax, %edi 263 movl %edx, %esi 264 movl $1024, %ecx
|
H A D | head_32.S | 97 movl pa(stack_start),%ecx 108 movl $(__BOOT_DS),%eax 109 movl %eax,%ds 110 movl %eax,%es 111 movl %eax,%fs 112 movl %eax,%gs 113 movl %eax,%ss 122 movl $pa(__bss_start),%edi 123 movl $pa(__bss_stop),%ecx 135 movl $pa(boot_params),%edi 136 movl $(PARAM_SIZE/4),%ecx 140 movl pa(boot_params) + NEW_CL_POINTER,%esi 143 movl $pa(boot_command_line),%edi 144 movl $(COMMAND_LINE_SIZE/4),%ecx 151 movl %cr3, %eax 152 movl %eax, pa(olpc_ofw_pgd) 182 movl $pa(__brk_base), %edi 183 movl $pa(initial_pg_pmd), %edx 184 movl $PTE_IDENT_ATTR, %eax 187 movl %ecx,(%edx) /* Store PMD entry */ 190 movl $512,%ecx 202 movl $pa(_end) + MAPPING_BEYOND_END + PTE_IDENT_ATTR, %ebp 207 movl %edi, pa(_brk_end) 209 movl %eax, pa(max_pfn_mapped) 212 movl $pa(initial_pg_fixmap)+PDE_IDENT_ATTR,%eax 213 movl %eax,pa(initial_pg_pmd+0x1000*KPMDS-8) 218 movl $pa(__brk_base), %edi 219 movl $pa(initial_page_table), %edx 220 movl $PTE_IDENT_ATTR, %eax 223 movl %ecx,(%edx) /* Store identity PDE entry */ 224 movl %ecx,page_pde_offset(%edx) /* Store kernel PDE entry */ 226 movl $1024, %ecx 234 movl $pa(_end) + MAPPING_BEYOND_END + PTE_IDENT_ATTR, %ebp 238 movl %edi, pa(_brk_end) 240 movl %eax, pa(max_pfn_mapped) 243 movl $pa(initial_pg_fixmap)+PDE_IDENT_ATTR,%eax 244 movl %eax,pa(initial_page_table+0xffc) 254 movl pa(boot_params + BP_hardware_subarch), %eax 258 movl pa(subarch_entries)(,%eax,4), %eax 289 movl stack_start, %ecx 290 movl %ecx, %esp 305 movl $(__BOOT_DS),%eax 306 movl %eax,%ds 307 movl %eax,%es 308 movl %eax,%fs 309 movl %eax,%gs 310 movl pa(stack_start),%ecx 311 movl %eax,%ss 324 movl $(CR0_STATE & ~X86_CR0_PG),%eax 325 movl %eax,%cr0 343 movl $-1,pa(X86_CPUID) # preset CPUID level 344 movl $X86_EFLAGS_ID,%ecx 355 movl %eax,pa(X86_CPUID) # save largest std CPUID function 357 movl $1,%eax 362 movl pa(mmu_cr4_features),%eax 363 movl %eax,%cr4 369 movl $0x80000000, %eax 386 movl $MSR_EFER, %ecx 398 movl $pa(initial_page_table), %eax 399 movl %eax,%cr3 /* set the page table pointer.. */ 400 movl $CR0_STATE,%eax 401 movl %eax,%cr0 /* ..and set paging (PG) bit */ 411 movl setup_once_ref,%eax 427 movl %eax,X86_CPUID # save CPUID level 428 movl %ebx,X86_VENDOR_ID # lo 4 chars 429 movl %edx,X86_VENDOR_ID+4 # next 4 chars 430 movl %ecx,X86_VENDOR_ID+8 # last 4 chars 435 movl $1,%eax # Use the CPUID instruction to get CPU type 445 movl %edx,X86_CAPABILITY 448 movl $0x50022,%ecx # set AM, WP, NE and MP 449 movl %cr0,%eax 452 movl %eax,%cr0 457 1: movl $(__KERNEL_DS),%eax # reload all the segment registers 458 movl %eax,%ss # after changing gdt. 460 movl $(__USER_DS),%eax # DS/ES contains default USER segment 461 movl %eax,%ds 462 movl %eax,%es 464 movl $(__KERNEL_PERCPU), %eax 465 movl %eax,%fs # set this cpu's percpu 467 movl $(__KERNEL_STACK_CANARY),%eax 468 movl %eax,%gs 495 movl $idt_table,%edi 496 movl $early_idt_handler_array,%eax 497 movl $NUM_EXCEPTION_VECTORS,%ecx 499 movl %eax,(%edi) 500 movl %eax,4(%edi) 502 movl $(0x8E000000 + __KERNEL_CS),2(%edi) 507 movl $256 - NUM_EXCEPTION_VECTORS,%ecx 508 movl $ignore_int,%edx 509 movl $(__KERNEL_CS << 16),%eax 513 movl %eax,(%edi) 514 movl %edx,4(%edi) 524 movl $gdt_page,%eax 525 movl $stack_canary,%ecx 571 movl $(__KERNEL_DS),%eax 572 movl %eax,%ds 573 movl %eax,%es 595 movl %cr2,%eax 628 movl $(__KERNEL_DS),%eax 629 movl %eax,%ds 630 movl %eax,%es
|
H A D | entry_32.S | 78 * GET_CR0_INTO_EAX (aka. "movl %cr0, %eax") 154 99: movl $0, (%esp) 165 99: movl $0, PT_GS(%esp) 172 movl %gs, \reg 176 movl \reg, PT_GS(%esp) 180 movl $(__KERNEL_STACK_CANARY), \reg 181 movl \reg, %gs 209 movl $(__USER_DS), %edx 210 movl %edx, %ds 211 movl %edx, %es 212 movl $(__KERNEL_PERCPU), %edx 213 movl %edx, %fs 244 4: movl $0, (%esp) 246 5: movl $0, (%esp) 248 6: movl $0, (%esp) 310 movl PT_EBP(%esp),%eax 312 movl $0,PT_EAX(%esp) 332 movl PT_EFLAGS(%esp), %eax # mix EFLAGS and CS 339 movl PT_CS(%esp), %eax 351 movl TI_flags(%ebp), %ecx 381 movl TSS_sysenter_sp0(%esp),%esp 418 1: movl (%ebp),%ebp 420 movl %ebp,PT_EBP(%esp) 432 movl %eax,PT_EAX(%esp) 436 movl TI_flags(%ebp), %ecx 441 movl PT_EIP(%esp), %edx 442 movl PT_OLDESP(%esp), %ecx 453 /* movl PT_EAX(%esp), %eax already set, syscall number: 1st arg to audit */ 454 movl PT_EBX(%esp), %edx /* ebx/a0: 2nd arg to audit */ 455 /* movl PT_ECX(%esp), %ecx already set, a1: 3nd arg to audit */ 461 movl PT_EAX(%esp),%eax /* reload syscall number */ 469 movl %eax,%edx /* second arg, syscall return value */ 476 movl TI_flags(%ebp), %ecx 479 movl PT_EAX(%esp),%eax /* reload syscall return value */ 485 2: movl $0,PT_FS(%esp) 507 movl %eax,PT_EAX(%esp) # store the return value 514 movl TI_flags(%ebp), %ecx 522 movl PT_EFLAGS(%esp), %eax # mix EFLAGS, SS and CS 606 movl TI_flags(%ebp), %ecx 617 movl %esp, %eax 622 movl %esp, %eax 640 movl %eax, %esp 648 movl $-ENOSYS,PT_EAX(%esp) 649 movl %esp, %eax 665 movl %esp, %eax 675 movl $-EFAULT,PT_EAX(%esp) 680 movl $-ENOSYS,%eax 685 movl $-ENOSYS,%eax 712 movl %ss, %eax 716 movl $__KERNEL_DS, %eax 717 movl %eax, %ds 718 movl %eax, %es 752 movl %esp,%eax 765 movl %esp,%eax; \ 950 movl PT_EIP(%esp),%eax 981 movl $1,%eax 1001 movl %eax,4(%esp) 1004 movl %eax,8(%esp) 1007 movl %eax,12(%esp) 1010 movl %eax,16(%esp) 1043 movl 4*4(%esp), %eax 1044 movl 0x4(%ebp), %edx 1045 movl function_trace_op, %ecx 1093 movl 13*4(%esp), %eax /* Get the saved flags */ 1094 movl %eax, 14*4(%esp) /* Move saved flags into regs->flags location */ 1096 movl $__KERNEL_CS,13*4(%esp) 1098 movl 12*4(%esp), %eax /* Load ip (1st parameter) */ 1100 movl 0x4(%ebp), %edx /* Load parent ip (2nd parameter) */ 1101 movl function_trace_op, %ecx /* Save ftrace_pos in 3rd parameter */ 1108 movl 14*4(%esp), %eax /* Move flags back into cs */ 1109 movl %eax, 13*4(%esp) /* Needed to keep addl from modifying flags */ 1110 movl 12*4(%esp), %eax /* Get return ip from regs->ip */ 1111 movl %eax, 14*4(%esp) /* Put return ip back for ret */ 1154 movl 0xc(%esp), %eax 1155 movl 0x4(%ebp), %edx 1173 movl 0xc(%esp), %eax 1175 movl (%ebp), %ecx 1188 movl %ebp, %eax 1190 movl %eax, %ecx 1227 movl $(__KERNEL_PERCPU), %ecx 1228 movl %ecx, %fs 1231 movl PT_GS(%esp), %edi # get the function address 1232 movl PT_ORIG_EAX(%esp), %edx # get the error code 1233 movl $-1, PT_ORIG_EAX(%esp) # no syscall to restart 1236 movl $(__USER_DS), %ecx 1237 movl %ecx, %ds 1238 movl %ecx, %es 1240 movl %esp,%eax # pt_regs pointer 1263 movl TSS_sysenter_sp0 + \offset(%esp), %esp 1283 movl %esp,%eax # pt_regs pointer 1302 movl %ss, %eax 1310 movl %esp,%eax 1325 movl %esp,%eax # pt_regs pointer 1379 movl %esp,%eax # pt_regs pointer
|
H A D | verify_cpu.S | 44 movl %eax,%ebx 54 movl $0x0,%eax # See if cpuid 1 is implemented 79 movl $0x1, %eax # check CPU family and model 81 movl %eax, %ecx 95 movl $MSR_IA32_MISC_ENABLE, %ecx 102 movl $0x1,%eax # Does the cpu have what it takes 108 movl $0x80000000,%eax # See if extended cpuid is implemented 113 movl $0x80000001,%eax # Does the cpu have what it takes 120 movl $1,%eax 127 movl $MSR_K7_HWCR,%ecx 136 movl $1,%eax
|
H A D | head_64.S | 187 movl $(X86_CR4_PAE | X86_CR4_PGE), %ecx 200 movl $0x80000001, %eax 202 movl %edx,%edi 205 movl $MSR_EFER, %ecx 218 movl $CR0_STATE, %eax 239 movl %eax,%ds 240 movl %eax,%ss 241 movl %eax,%es 248 movl %eax,%fs 249 movl %eax,%gs 258 movl $MSR_GS_BASE,%ecx 259 movl initial_gs(%rip),%eax 260 movl initial_gs+4(%rip),%edx 392 movl 80(%rsp),%r8d # error code 393 movl 72(%rsp),%esi # vector number 394 movl 96(%rsp),%edx # %cs
|
H A D | relocate_kernel_64.S | 126 movl $X86_CR4_PAE, %eax 249 movl $512, %ecx 254 movl $512, %ecx 259 movl $512, %ecx
|
H A D | irq_32.c | 65 "movl %%ebx,%%esp \n" call_on_stack() 106 "movl %%ebx,%%esp \n" execute_on_irq_stack()
|
H A D | entry_64.S | 315 movl $AUDIT_ARCH_X86_64, %esi 326 movl $AUDIT_ARCH_X86_64, %esi 358 movl $_TIF_ALLWORK_MASK,%edi 363 movl TI_flags(%rcx),%edx 405 1: movl $_TIF_WORK_MASK,%edi 640 movl $0, RAX(%rsp) 753 movl $_TIF_WORK_MASK,%edi 756 movl TI_flags(%rcx),%edx 1103 movl %edi,%gs 1117 movl %eax,%gs 1280 movl $1,%ebx 1281 movl $MSR_GS_BASE,%ecx 1351 movl %ecx,%eax /* zero extend */ 1377 movl %ebx,%eax 1385 movl TI_flags(%rcx),%edx 1386 movl $_TIF_WORK_MASK,%edi
|
H A D | process_64.c | 81 asm("movl %%ds,%0" : "=r" (ds)); __show_regs() 82 asm("movl %%cs,%0" : "=r" (cs)); __show_regs() 83 asm("movl %%es,%0" : "=r" (es)); __show_regs() 84 asm("movl %%fs,%0" : "=r" (fsindex)); __show_regs() 85 asm("movl %%gs,%0" : "=r" (gsindex)); __show_regs()
|
H A D | signal.c | 256 0xb858, /* popl %eax; movl $..., %eax */ 262 u8 movl; member in struct:__anon3158 267 0xb8, /* movl $..., %eax */ 311 * This is popl %eax ; movl $__NR_sigreturn, %eax ; int $0x80 __setup_frame() 371 * This is movl $__NR_rt_sigreturn, %ax ; int $0x80 __setup_rt_frame()
|
H A D | vm86_32.c | 339 "movl %0,%%esp\n\t" do_sys_vm86() 340 "movl %1,%%ebp\n\t" do_sys_vm86() 356 __asm__ __volatile__("movl %0,%%esp\n\t" return_to_32bit() 357 "movl %1,%%ebp\n\t" return_to_32bit()
|
H A D | ptrace.c | 282 asm("movl %%fs,%0" : "=r" (seg)); offsetof() 288 asm("movl %%gs,%0" : "=r" (seg)); offsetof() 294 asm("movl %%ds,%0" : "=r" (seg)); offsetof() 300 asm("movl %%es,%0" : "=r" (seg)); offsetof() 481 asm("movl %%fs,%0" : "=r" (seg)); offsetof() 494 asm("movl %%gs,%0" : "=r" (seg)); offsetof()
|
H A D | ftrace.c | 990 " movl $0, %[faulted]\n" prepare_ftrace_return() 994 "4: movl $1, %[faulted]\n" prepare_ftrace_return()
|
H A D | uprobes.c | 559 * "movl %edx,0xnnnn(%rip)", we have instead executed an equivalent 560 * instruction using a scratch register -- e.g., "movl %edx,0xnnnn(%rsi)".
|
/linux-4.1.27/arch/x86/kernel/acpi/ |
H A D | wakeup_32.S | 25 movl %cr3, %eax 26 movl %eax, %cr3 30 movl saved_context_esp, %esp 32 movl %cs:saved_magic, %eax 37 movl saved_eip, %eax 51 movl %eax, saved_context_esp 52 movl %ebx, saved_context_ebx 53 movl %ebp, saved_context_ebp 54 movl %esi, saved_context_esi 55 movl %edi, saved_context_edi 59 movl $ret_point, saved_eip 64 movl saved_context_ebp, %ebp 65 movl saved_context_ebx, %ebx 66 movl saved_context_esi, %esi 67 movl saved_context_edi, %edi
|
H A D | wakeup_64.S | 74 movl $3, %edi
|
/linux-4.1.27/tools/perf/arch/x86/tests/ |
H A D | regs_load.S | 66 movl 8(%esp), %edi 67 movl %eax, AX(%edi) 68 movl %ebx, BX(%edi) 69 movl %ecx, CX(%edi) 70 movl %edx, DX(%edi) 71 movl %esi, SI(%edi) 73 movl %eax, DI(%edi) 74 movl %ebp, BP(%edi) 77 movl %eax, SP(%edi) 79 movl 0(%esp), %eax 80 movl %eax, IP(%edi) 82 movl $0, FLAGS(%edi) 83 movl $0, CS(%edi) 84 movl $0, SS(%edi) 85 movl $0, DS(%edi) 86 movl $0, ES(%edi) 87 movl $0, FS(%edi) 88 movl $0, GS(%edi)
|
/linux-4.1.27/arch/x86/boot/compressed/ |
H A D | efi_thunk_64.S | 31 movl %eax, 4(%rsp) 33 movl %eax, (%rsp) 34 movl %eax, 2(%rax) /* Fixup the gdt base address */ 36 movl %ds, %eax 38 movl %es, %eax 40 movl %ss, %eax 47 movl %esi, 0x0(%rsp) 48 movl %edx, 0x4(%rsp) 49 movl %ecx, 0x8(%rsp) 51 movl %esi, 0xc(%rsp) 53 movl %esi, 0x10(%rsp) 78 movl %ebx, %ss 80 movl %ebx, %es 82 movl %ebx, %ds 89 movl %eax, %ecx 115 movl $__KERNEL_DS, %eax 116 movl %eax, %ds 117 movl %eax, %es 118 movl %eax, %ss 121 movl %cr3, %eax 122 movl %eax, %cr3 125 movl %cr0, %eax 127 movl %eax, %cr0 130 movl $MSR_EFER, %ecx 138 movl %eax, %edi 146 movl 56(%esp), %eax 147 movl %eax, 2(%eax) 150 movl %cr4, %eax 152 movl %eax, %cr4 154 movl %cr3, %eax 155 movl %eax, %cr3 157 movl $MSR_EFER, %ecx 165 movl 60(%esp), %eax 170 movl %cr0, %eax 172 movl %eax, %cr0
|
H A D | head_64.S | 54 movl $(__BOOT_DS), %eax 55 movl %eax, %ds 56 movl %eax, %es 57 movl %eax, %ss 74 movl $boot_stack_end, %eax 76 movl %eax, %esp 92 movl %ebp, %ebx 93 movl BP_kernel_alignment(%esi), %eax 101 movl $LOAD_PHYSICAL_ADDR, %ebx 113 movl %eax, gdt+2(%ebp) 117 movl %cr4, %eax 119 movl %eax, %cr4 127 movl $((4096*6)/4), %ecx 133 movl %eax, 0(%edi) 138 movl $4, %ecx 139 1: movl %eax, 0x00(%edi) 147 movl $0x00000183, %eax 148 movl $2048, %ecx 149 1: movl %eax, 0(%edi) 157 movl %eax, %cr3 160 movl $MSR_EFER, %ecx 168 movl $__BOOT_TSS, %eax 184 movl efi32_config(%ebp), %ebx 193 movl $(X86_CR0_PG | X86_CR0_PE), %eax /* Enable Paging and Protected mode */ 194 movl %eax, %cr0 213 movl %ecx, efi32_config(%ebp) 214 movl %edx, efi32_config+8(%ebp) 218 movl %eax, efi_config(%ebp) 266 movl %eax, BP_code32_start(%rsi) 290 movl BP_code32_start(%esi), %eax 299 movl %eax, %ds 300 movl %eax, %es 301 movl %eax, %ss 302 movl %eax, %fs 303 movl %eax, %gs 321 movl BP_kernel_alignment(%rsi), %eax 411 movl $z_input_len, %ecx /* input_len */
|
H A D | head_32.S | 51 movl %ecx, efi32_config(%esi) /* Handle */ 53 movl %ecx, efi32_config+8(%esi) /* EFI System table pointer */ 63 movl %esi, BP_code32_start(%eax) 78 movl %ecx, efi32_config(%esi) /* Handle */ 79 movl %edx, efi32_config+8(%esi) /* EFI System table pointer */ 88 movl %eax, %esi 95 movl BP_code32_start(%esi), %eax 110 movl $__BOOT_DS, %eax 111 movl %eax, %ds 112 movl %eax, %es 113 movl %eax, %fs 114 movl %eax, %gs 115 movl %eax, %ss 138 movl %ebp, %ebx 139 movl BP_kernel_alignment(%esi), %eax 147 movl $LOAD_PHYSICAL_ADDR, %ebx 167 movl $(_bss - startup_32), %ecx
|
H A D | efi_stub_32.S | 50 movl %ecx, saved_return_addr(%edx) 53 movl %ecx, efi_rt_function_ptr(%edx) 70 movl efi_rt_function_ptr(%edx), %ecx 76 movl saved_return_addr(%edx), %ecx
|
H A D | string.c | 9 "movl %4,%%ecx\n\t" memcpy()
|
/linux-4.1.27/arch/x86/platform/efi/ |
H A D | efi_thunk_64.S | 52 movl %ebx, 8(%rsp) 70 movl %ds, %eax 72 movl %es, %eax 74 movl %ss, %eax 78 movl %esi, 0x0(%rsp) 79 movl %edx, 0x4(%rsp) 80 movl %ecx, 0x8(%rsp) 82 movl %esi, 0xc(%rsp) 84 movl %esi, 0x10(%rsp) 98 movl %ebx, %ss 100 movl %ebx, %es 102 movl %ebx, %ds 109 movl %eax, %ecx 132 movl $__KERNEL_DS, %eax 133 movl %eax, %ds 134 movl %eax, %es 135 movl %eax, %ss 140 movl %eax, %edi 142 movl 72(%esp), %eax
|
H A D | efi_stub_32.S | 39 movl $1f, %edx 51 movl %edx, saved_return_addr 54 movl %ecx, efi_rt_function_ptr 55 movl $2f, %edx 62 movl %cr0, %edx 64 movl %edx, %cr0 88 movl %cr0, %edx 90 movl %edx, %cr0 97 movl $1f, %edx 106 movl (%edx), %ecx 113 movl (%edx), %ecx
|
/linux-4.1.27/arch/x86/purgatory/ |
H A D | setup-x86_64.S | 23 movl $0x18, %eax /* data segment */ 24 movl %eax, %ds 25 movl %eax, %es 26 movl %eax, %ss 27 movl %eax, %fs 28 movl %eax, %gs
|
H A D | entry64.S | 24 movl $0x18, %eax /* data segment */ 25 movl %eax, %ds 26 movl %eax, %es 27 movl %eax, %ss 28 movl %eax, %fs 29 movl %eax, %gs
|
/linux-4.1.27/arch/x86/realmode/rm/ |
H A D | wakeup_asm.S | 51 movl %cr0, %eax 53 movl %eax, %cr0 63 movl %eax, %cr0 69 movl $rm_stack_end, %esp 82 movl signature, %eax 87 movl end_signature, %eax 96 movl pmode_behavior, %edi 100 movl pmode_misc_en, %eax 101 movl pmode_misc_en + 4, %edx 102 movl $MSR_IA32_MISC_ENABLE, %ecx 110 movl pmode_cr3, %eax 111 movl %eax, %cr3 115 movl pmode_cr4, %eax 116 movl %eax, %cr4 120 movl pmode_efer, %eax 121 movl pmode_efer + 4, %edx 122 movl $MSR_EFER, %ecx 129 movl pmode_entry, %eax 130 movl pmode_cr0, %ecx 131 movl %ecx, %cr0
|
H A D | trampoline_64.S | 50 movl $0xA5A5A5A5, trampoline_status 54 movl $rm_stack_end, %esp 73 movl $X86_CR0_PE, %eax # protected mode (PE) bit 74 movl %eax, %cr0 # into protected mode 88 movl %edx, %ss 90 movl %edx, %ds 91 movl %edx, %es 92 movl %edx, %fs 93 movl %edx, %gs 95 movl pa_tr_cr4, %eax 96 movl %eax, %cr4 # Enable PAE mode 99 movl $pa_trampoline_pgd, %eax 100 movl %eax, %cr3 103 movl pa_tr_efer, %eax 104 movl pa_tr_efer + 4, %edx 105 movl $MSR_EFER, %ecx 109 movl $(X86_CR0_PG | X86_CR0_WP | X86_CR0_PE), %eax 110 movl %eax, %cr0
|
H A D | reboot.S | 25 movl $__KERNEL_DS, %eax 26 movl %eax, %ds 30 movl %cr0, %eax 32 movl %eax, %cr0 38 movl $MSR_EFER, %ecx 41 movl %edi, %eax 58 movl $16, %ecx 59 movl %ecx, %ds 60 movl %ecx, %es 61 movl %ecx, %fs 62 movl %ecx, %gs 63 movl %ecx, %ss 96 movl %cr0, %edx 99 movl %edx, %cr0 100 movl %ecx, %cr3 101 movl %cr0, %edx 107 movl %edx, %cr0
|
H A D | trampoline_32.S | 41 movl tr_start, %eax # where we need to go 43 movl $0xA5A5A5A5, trampoline_status
|
/linux-4.1.27/arch/x86/crypto/ |
H A D | aes-x86_64-asm_64.S | 58 movl (r7),r5 ## E; \ 59 movl 4(r7),r1 ## E; \ 60 movl 8(r7),r6 ## E; \ 61 movl 12(r7),r7 ## E; \ 62 movl 480(r8),r10 ## E; \ 76 movl r5 ## E,(r9); \ 77 movl r6 ## E,4(r9); \ 78 movl r7 ## E,8(r9); \ 79 movl r8 ## E,12(r9); \ 86 movl TAB+1024(,r5,4),r5 ## E;\ 88 movl TAB(,r6,4),r6 ## E; \ 99 movl TAB+1024(,r4,4),r4 ## E;\ 112 movl TAB+2048(,r3,4),r3 ## E;\ 130 movl r3 ## E,r1 ## E; \ 131 movl r4 ## E,r2 ## E;
|
H A D | camellia-x86_64-asm_64.S | 115 movl (key_table + ((kl) * 2) * 4)(CTX), RT0d; \ 129 movl (key_table + ((kr) * 2) * 4)(CTX), RT0d; \ 213 movl $24, RT1d; /* max */ 220 movl $32, RT1d; /* max */ 247 movl $32, RT2d; 248 movl $24, RXORd; 297 movl (key_table + ((kl) * 2) * 4)(CTX), RT0d; \ 307 movl (key_table + ((kl) * 2) * 4)(CTX), RT2d; \ 321 movl (key_table + ((kr) * 2) * 4)(CTX), RT2d; \ 331 movl (key_table + ((kr) * 2) * 4)(CTX), RT1d; \ 448 movl $24, RT2d; /* max */ 455 movl $32, RT2d; /* max */ 483 movl $32, RT2d; 484 movl $24, RXORd;
|
H A D | des3_ede-asm_64.S | 80 movl a, RT0d; \ 89 movl val##d, RT0d; \ 107 movl left##d, RW0d; \ 120 movl right##d, RW0d; \ 163 movl (io), left##d; \ 164 movl 4(io), right##d; \ 171 movl left##d, (io); \ 172 movl right##d, 4(io); 260 movl val##d, RT0d; \ 289 movl left##0d, RW0d; \ 298 movl left##1d, RW1d; \ 307 movl left##2d, RW2d; \ 320 movl right##0d, RW0d; \ 329 movl right##1d, RW1d; \ 338 movl right##2d, RW2d; \ 443 movl 0 * 4(%rdx), RL0d; 444 movl 1 * 4(%rdx), RR0d; 445 movl 2 * 4(%rdx), RL1d; 446 movl 3 * 4(%rdx), RR1d; 447 movl 4 * 4(%rdx), RL2d; 448 movl 5 * 4(%rdx), RR2d; 523 movl RR0d, 0 * 4(%rsi); 524 movl RL0d, 1 * 4(%rsi); 525 movl RR1d, 2 * 4(%rsi); 526 movl RL1d, 3 * 4(%rsi); 527 movl RR2d, 4 * 4(%rsi); 528 movl RL2d, 5 * 4(%rsi);
|
H A D | aesni-intel_asm.S | 1805 movl 8(%esp), KEYP # ctx 1806 movl 12(%esp), UKEYP # in_key 1807 movl 16(%esp), %edx # key_len 1812 movl %edx, 480(KEYP) 1918 movl 12(%esp), KEYP 1919 movl 16(%esp), OUTP 1920 movl 20(%esp), INP 1922 movl 480(KEYP), KLEN # key length 2107 movl 12(%esp), KEYP 2108 movl 16(%esp), OUTP 2109 movl 20(%esp), INP 2299 movl 16(%esp), KEYP 2300 movl 20(%esp), OUTP 2301 movl 24(%esp), INP 2302 movl 28(%esp), LEN 2357 movl 16(%esp), KEYP 2358 movl 20(%esp), OUTP 2359 movl 24(%esp), INP 2360 movl 28(%esp), LEN 2417 movl 20(%esp), KEYP 2418 movl 24(%esp), OUTP 2419 movl 28(%esp), INP 2420 movl 32(%esp), LEN 2421 movl 36(%esp), IVP 2459 movl 20(%esp), KEYP 2460 movl 24(%esp), OUTP 2461 movl 28(%esp), INP 2462 movl 32(%esp), LEN 2463 movl 36(%esp), IVP 2681 movl $0, %ecx 2682 movl $240, %r10d
|
H A D | serpent-sse2-i586-asm_32.S | 525 movl arg_ctx(%esp), CTX; 527 movl arg_src(%esp), %eax; 564 movl arg_dst(%esp), %eax; 588 movl arg_ctx(%esp), CTX; 590 movl arg_src(%esp), %eax; 627 movl arg_dst(%esp), %eax;
|
H A D | blowfish-x86_64-asm_64.S | 79 movl s0(CTX,RT0,4), RT0d; \ 203 movl s0(CTX,RT0,4), RT0d; \
|
H A D | camellia-aesni-avx-asm_64.S | 764 movl $24, %r8d; 787 movl $32, %r8d; 918 movl $32, %r8d; 919 movl $24, %eax; 946 movl $32, %r8d; 947 movl $24, %eax; 1263 movl $32, %r8d; 1264 movl $24, %eax;
|
H A D | camellia-aesni-avx2-asm_64.S | 804 movl $24, %r8d; 827 movl $32, %r8d; 964 movl $32, %r8d; 965 movl $24, %eax; 996 movl $32, %r8d; 997 movl $24, %eax; 1379 movl $32, %r8d; 1380 movl $24, %eax;
|
H A D | twofish-avx-x86_64-asm_64.S | 107 movl t0(CTX, RID1, 4), dst ## d; \ 108 movl t1(CTX, RID2, 4), RID2d; \
|
H A D | twofish-x86_64-asm_64.S | 267 movl $1,%eax 319 movl $1,%eax
|
H A D | cast6-avx-x86_64-asm_64.S | 103 movl s1(, RID1, 4), dst ## d; \
|
H A D | cast5-avx-x86_64-asm_64.S | 103 movl s1(, RID1, 4), dst ## d; \
|
H A D | crc32c-pcl-intel-asm_64.S | 181 movl $128,%eax
|
/linux-4.1.27/arch/ia64/hp/sim/boot/ |
H A D | boot_head.S | 25 movl gp = __gp 26 movl sp = stack_mem+16384-16 75 movl r9=0x100000000 /* tc.base */ 76 movl r10=0x0000000200000003 /* count[0], count[1] */ 77 movl r11=0x1000000000002000 /* stride[0], stride[1] */ 82 movl r9 =0x100000064 /* proc_ratio (1/100) */ 83 movl r10=0x100000100 /* bus_ratio<<32 (1/256) */ 84 movl r11=0x100000064 /* itc_ratio<<32 (1/100) */ 96 movl r8=524288 /* flush 512k million cache lines (16MB) */ 99 movl r8=0xe000000000000000 114 movl r9 =0x08122f04 /* generic=4 width=47 retired=8 cycles=18 */ 150 movl r9=0x2044040020F1865 /* num_tc_levels=2, num_unique_tcs=4 */ 154 movl r10=0x183C /* rid_size=24, impl_va_msb=60 */
|
/linux-4.1.27/arch/x86/ia32/ |
H A D | ia32entry.S | 54 movl R9(%rsp),%r9d 56 movl RCX(%rsp),%ecx 57 movl RDX(%rsp),%edx 58 movl RSI(%rsp),%esi 59 movl RDI(%rsp),%edi 60 movl %eax,%eax /* zero extension */ 126 movl %ebp, %ebp 127 movl %eax, %eax 129 movl ASM_THREAD_INFO(TI_sysenter_return, %rsp, 0), %r10d 155 1: movl (%rbp),%ebp 176 movl %edi,%r8d /* arg5 */ 177 movl %ebp,%r9d /* arg6 */ 179 movl %ebx,%edi /* arg1 */ 180 movl %edx,%edx /* arg3 (zero extension) */ 200 movl RIP(%rsp),%ecx /* User %eip */ 207 movl EFLAGS(%rsp),%r11d /* User eflags */ 215 movl RSP(%rsp),%esp 243 movl %esi,%r8d /* 5th arg: 4th syscall arg */ 244 movl %ecx,%r9d /*swap with edx*/ 245 movl %edx,%ecx /* 4th arg: 3rd syscall arg */ 246 movl %r9d,%edx /* 3rd arg: 2nd syscall arg */ 247 movl %ebx,%esi /* 2nd arg: 1st syscall arg */ 248 movl %eax,%edi /* 1st arg: syscall number */ 250 movl RAX(%rsp),%eax /* reload syscall number */ 253 movl %ebx,%edi /* reload 1st syscall arg */ 254 movl RCX(%rsp),%esi /* reload 2nd syscall arg */ 255 movl RDX(%rsp),%edx /* reload 3rd syscall arg */ 256 movl RSI(%rsp),%ecx /* reload 4th syscall arg */ 257 movl RDI(%rsp),%r8d /* reload 5th syscall arg */ 265 movl %eax,%esi /* second arg, syscall return value */ 273 movl $(_TIF_ALLWORK_MASK & ~_TIF_SYSCALL_AUDIT),%edi 284 movl %ebp,%r9d /* reload 6th syscall arg */ 357 movl %esp,%r8d 363 movl %eax,%eax 378 movl %ebp,%ecx 388 1: movl (%r8),%r9d 399 movl %edi,%r8d /* arg5 */ 402 movl %ebx,%edi /* arg1 */ 403 movl %edx,%edx /* arg3 (zero extension) */ 414 movl RIP(%rsp),%ecx 416 movl EFLAGS(%rsp),%r11d 422 movl RSP(%rsp),%esp 443 movl %r9d,R9(%rsp) /* register to be clobbered by call */ 445 movl R9(%rsp),%r9d /* reload 6th syscall arg */ 519 movl %eax,%eax 539 movl %edi,%r8d /* arg5 */ 540 movl %ebp,%r9d /* arg6 */ 542 movl %ebx,%edi /* arg1 */ 543 movl %edx,%edx /* arg3 (zero extension) */
|
H A D | ia32_signal.c | 356 0xb858, /* popl %eax ; movl $...,%eax */ ia32_setup_frame() 430 u8 movl; ia32_setup_rt_frame() member in struct:__anon2969
|
/linux-4.1.27/arch/x86/um/asm/ |
H A D | processor_32.h | 56 ({ void *pc; __asm__("movl $1f,%0\n1:":"=g" (pc)); pc; }) 58 #define current_sp() ({ void *sp; __asm__("movl %%esp, %0" : "=r" (sp) : ); sp; }) 59 #define current_bp() ({ unsigned long bp; __asm__("movl %%ebp, %0" : "=r" (bp) : ); bp; })
|
H A D | checksum.h | 122 asm( " movl (%1), %0\n" ip_fast_csum() 133 " movl %0, %2\n" ip_fast_csum()
|
/linux-4.1.27/arch/x86/um/shared/sysdep/ |
H A D | stub_32.h | 82 __asm__ volatile ("movl %%eax,%%ebp ; movl %0,%%eax ; int $0x80 ;" remap_stack() 83 "movl %7, %%ebx ; movl %%eax, (%%ebx)" remap_stack()
|
/linux-4.1.27/arch/unicore32/kernel/ |
H A D | sleep.S | 85 movl r6, #0x00002401 88 movl r6, #0x00100c00 92 movl r8, #0x800001ff @ epip4d 96 movl r5, #0x40000 104 movl r6, #0xc 107 movl r7, #0x1 112 @ movl r0, 0x11111111 162 @ movl r0, 0x20202020 190 @ movl r0, 0x30303030
|
H A D | head.S | 69 movl r1, 0xff00ffff @ mask 70 movl r2, 0x4d000863 @ value 78 movl r0, #KERNEL_PGD_PADDR @ page table address 88 movl r4, #KERNEL_PGD_PADDR @ page table address 110 movl r6, #(KERNEL_END - 1) 142 movl r0, #0x201f @ control register setting
|
H A D | debug-macro.S | 57 movl r2, #UART_DIVISOR_DEFAULT
|
/linux-4.1.27/arch/x86/include/asm/ |
H A D | kexec.h | 100 asm volatile("movl %%ebx,%0" : "=m"(newregs->bx)); crash_setup_regs() 101 asm volatile("movl %%ecx,%0" : "=m"(newregs->cx)); crash_setup_regs() 102 asm volatile("movl %%edx,%0" : "=m"(newregs->dx)); crash_setup_regs() 103 asm volatile("movl %%esi,%0" : "=m"(newregs->si)); crash_setup_regs() 104 asm volatile("movl %%edi,%0" : "=m"(newregs->di)); crash_setup_regs() 105 asm volatile("movl %%ebp,%0" : "=m"(newregs->bp)); crash_setup_regs() 106 asm volatile("movl %%eax,%0" : "=m"(newregs->ax)); crash_setup_regs() 107 asm volatile("movl %%esp,%0" : "=m"(newregs->sp)); crash_setup_regs() 108 asm volatile("movl %%ss, %%eax;" :"=a"(newregs->ss)); crash_setup_regs() 109 asm volatile("movl %%cs, %%eax;" :"=a"(newregs->cs)); crash_setup_regs() 110 asm volatile("movl %%ds, %%eax;" :"=a"(newregs->ds)); crash_setup_regs() 111 asm volatile("movl %%es, %%eax;" :"=a"(newregs->es)); crash_setup_regs() 130 asm volatile("movl %%ss, %%eax;" :"=a"(newregs->ss)); crash_setup_regs() 131 asm volatile("movl %%cs, %%eax;" :"=a"(newregs->cs)); crash_setup_regs()
|
H A D | switch_to.h | 15 "movl %P[task_canary](%[next]), %%ebx\n\t" \ 16 "movl %%ebx, "__percpu_arg([stack_canary])"\n\t" 44 "movl %%esp,%[prev_sp]\n\t" /* save ESP */ \ 45 "movl %[next_sp],%%esp\n\t" /* restore ESP */ \ 46 "movl $1f,%[prev_ip]\n\t" /* save EIP */ \
|
H A D | dwarf2.h | 159 movl %\reg, \offset(%esp) 164 movl \offset(%esp), %\reg
|
H A D | nops.h | 16 2: movl %esi,%esi 51 2: movl %eax,%eax
|
H A D | pci_x86.h | 179 asm volatile("movl (%1),%%eax" : "=a" (val) : "r" (pos)); mmio_config_readl() 195 asm volatile("movl %%eax,(%1)" : : "a" (val), "r" (pos) : "memory"); mmio_config_writel()
|
H A D | checksum_32.h | 74 asm volatile("movl (%1), %0 ;\n" ip_fast_csum() 85 "movl %0, %2 ;\n" ip_fast_csum()
|
H A D | checksum_64.h | 49 asm(" movl (%1), %0\n" ip_fast_csum() 60 " movl %0, %2\n" ip_fast_csum()
|
H A D | stacktrace.h | 52 #define get_bp(bp) asm("movl %%ebp, %0" : "=r" (bp) :)
|
H A D | elf.h | 231 asm("movl %%ds,%0" : "=r" (v)); (pr_reg)[23] = v; \ 232 asm("movl %%es,%0" : "=r" (v)); (pr_reg)[24] = v; \ 233 asm("movl %%fs,%0" : "=r" (v)); (pr_reg)[25] = v; \ 234 asm("movl %%gs,%0" : "=r" (v)); (pr_reg)[26] = v; \
|
H A D | archrandom.h | 73 alternative_io("movl $0, %0\n\t" \
|
H A D | irqflags.h | 143 #define GET_CR0_INTO_EAX movl %cr0, %eax
|
H A D | uaccess.h | 195 "1: movl %%eax,0(%2)\n" \ 196 "2: movl %%edx,4(%2)\n" \ 199 "4: movl %3,%0\n" \ 209 "1: movl %%eax,0(%1)\n" \ 210 "2: movl %%edx,4(%1)\n" \
|
H A D | bitops.h | 420 "movl $-1,%0\n" ffs() 461 "movl $-1,%0\n" fls()
|
H A D | fpu-internal.h | 158 "3: movl $-1,%[err]\n" \ 173 "3: movl $-1,%[err]\n" \
|
H A D | segment.h | 260 "1: movl %k0,%%" #seg " \n" \
|
H A D | string_32.h | 36 "movl %4,%%ecx\n\t" __memcpy()
|
H A D | xsave.h | 64 "3: movl $-1,%[err]\n" \
|
H A D | percpu.h | 9 #define __percpu_mov_op movl
|
H A D | apic.h | 94 alternative_io("movl %0, %P1", "xchgl %0, %P1", X86_BUG_11AP, native_apic_mem_write()
|
/linux-4.1.27/drivers/watchdog/ |
H A D | hpwdt.c | 168 "movl %esp, %ebp \n\t" 174 "movl 8(%ebp),%eax \n\t" 175 "movl 4(%eax),%ebx \n\t" 176 "movl 8(%eax),%ecx \n\t" 177 "movl 12(%eax),%edx \n\t" 178 "movl 16(%eax),%esi \n\t" 179 "movl 20(%eax),%edi \n\t" 180 "movl (%eax),%eax \n\t" 185 "movl 8(%ebp),%eax \n\t" 186 "movl %ebx,4(%eax) \n\t" 187 "movl %ecx,8(%eax) \n\t" 188 "movl %edx,12(%eax) \n\t" 189 "movl %esi,16(%eax) \n\t" 190 "movl %edi,20(%eax) \n\t" 194 "movl %ebx,(%eax) \n\t" 196 "movl %ebx,28(%eax) \n\t" 367 "movl 4(%r9),%ebx \n\t" 368 "movl 8(%r9),%ecx \n\t" 369 "movl 12(%r9),%edx \n\t" 370 "movl 16(%r9),%esi \n\t" 371 "movl 20(%r9),%edi \n\t" 372 "movl (%r9),%eax \n\t" 376 "movl %eax, (%r9) \n\t" 377 "movl %ebx, 4(%r9) \n\t" 378 "movl %ecx, 8(%r9) \n\t" 379 "movl %edx, 12(%r9) \n\t" 380 "movl %esi, 16(%r9) \n\t" 381 "movl %edi, 20(%r9) \n\t" 383 "movl %eax, 28(%r9) \n\t"
|
/linux-4.1.27/drivers/char/ |
H A D | i8k.c | 161 "movl 0(%%rax),%%edx\n\t" i8k_smm() 163 "movl 4(%%rax),%%ebx\n\t" i8k_smm() 164 "movl 8(%%rax),%%ecx\n\t" i8k_smm() 165 "movl 12(%%rax),%%edx\n\t" i8k_smm() 166 "movl 16(%%rax),%%esi\n\t" i8k_smm() 167 "movl 20(%%rax),%%edi\n\t" i8k_smm() 172 "movl %%ebx,4(%%rax)\n\t" i8k_smm() 173 "movl %%ecx,8(%%rax)\n\t" i8k_smm() 174 "movl %%edx,12(%%rax)\n\t" i8k_smm() 175 "movl %%esi,16(%%rax)\n\t" i8k_smm() 176 "movl %%edi,20(%%rax)\n\t" i8k_smm() 178 "movl %%edx,0(%%rax)\n\t" i8k_smm() 187 "movl 0(%%eax),%%edx\n\t" i8k_smm() 189 "movl 4(%%eax),%%ebx\n\t" i8k_smm() 190 "movl 8(%%eax),%%ecx\n\t" i8k_smm() 191 "movl 12(%%eax),%%edx\n\t" i8k_smm() 192 "movl 16(%%eax),%%esi\n\t" i8k_smm() 193 "movl 20(%%eax),%%edi\n\t" i8k_smm() 198 "movl %%ebx,4(%%eax)\n\t" i8k_smm() 199 "movl %%ecx,8(%%eax)\n\t" i8k_smm() 200 "movl %%edx,12(%%eax)\n\t" i8k_smm() 201 "movl %%esi,16(%%eax)\n\t" i8k_smm() 202 "movl %%edi,20(%%eax)\n\t" i8k_smm() 204 "movl %%edx,0(%%eax)\n\t" i8k_smm()
|
H A D | toshiba.c | 225 "movl 0(%%eax),%%edx\n\t" \ tosh_smm() 227 "movl 4(%%eax),%%ebx\n\t" \ tosh_smm() 228 "movl 8(%%eax),%%ecx\n\t" \ tosh_smm() 229 "movl 12(%%eax),%%edx\n\t" \ tosh_smm() 230 "movl 16(%%eax),%%esi\n\t" \ tosh_smm() 231 "movl 20(%%eax),%%edi\n\t" \ tosh_smm() 237 "movl %%ebx,4(%%eax)\n\t" \ tosh_smm() 238 "movl %%ecx,8(%%eax)\n\t" \ tosh_smm() 239 "movl %%edx,12(%%eax)\n\t" \ tosh_smm() 240 "movl %%esi,16(%%eax)\n\t" \ tosh_smm() 241 "movl %%edi,20(%%eax)\n\t" \ tosh_smm() 243 "movl %%edx,0(%%eax)\n\t" \ tosh_smm()
|
/linux-4.1.27/arch/x86/vdso/vdso32/ |
H A D | syscall.S | 20 movl %ecx, %ebp 22 movl %ebp, %ecx
|
H A D | sigreturn.S | 25 movl $__NR_sigreturn, %eax 36 movl $__NR_rt_sigreturn, %eax
|
H A D | sysenter.S | 39 movl %esp,%ebp
|
/linux-4.1.27/arch/x86/xen/ |
H A D | xen-asm_32.S | 44 movl PT_EAX(%esp), %eax /* Shouldn't be necessary? */ 100 movl $(__KERNEL_PERCPU), %eax 101 movl %eax, %fs 102 movl %fs:xen_vcpu, %eax 105 movl %ss:xen_vcpu, %eax 209 movl PT_CS(%esp), %ecx 224 movl 0+4(%edi), %eax /* copy EAX (just above top of frame) */ 225 movl %eax, PT_EAX(%esp)
|
/linux-4.1.27/arch/x86/crypto/sha-mb/ |
H A D | sha1_mb_mgr_submit_avx2.S | 122 movl $STS_BEING_PROCESSED, _status(job) 125 movl _len(job), DWORD_len 131 movl DWORD_len, _lens(state , lane, 4) 140 movl DWORD_tmp, _args_digest+4*32(state , lane, 4) 189 movl $STS_COMPLETED, _status(job_rax) 194 movl $0xFFFFFFFF, _lens(state, idx, 4) 200 movl 4*32(state, idx, 4), DWORD_tmp 203 movl DWORD_tmp, _result_digest+1*16(job_rax)
|
H A D | sha1_mb_mgr_flush_avx2.S | 170 movl $0xFFFFFFFF, offset(state) 214 movl $STS_COMPLETED, _status(job_rax) 220 movl $0xFFFFFFFF, _lens(state, idx, 4) 226 movl _args_digest+4*32(state, idx, 4), tmp2_w 282 movl $STS_COMPLETED, _status(job_rax) 288 movl $0xFFFFFFFF, _lens(state, idx, 4) 294 movl _args_digest+4*32(state, idx, 4), tmp2_w 297 movl tmp2_w, _result_digest+1*16(job_rax)
|
/linux-4.1.27/arch/x86/lguest/ |
H A D | head_32.S | 37 movl $LHCALL_LGUEST_INIT, %eax 38 movl $lguest_data - __PAGE_OFFSET, %ebx 42 movl $LHCALL_NEW_PGTABLE, %eax 43 movl $(initial_page_table - __PAGE_OFFSET), %ebx 47 movl $(init_thread_union+THREAD_SIZE),%esp 62 LGUEST_PATCH(cli, movl $0, lguest_data+LGUEST_DATA_irq_enabled) 63 LGUEST_PATCH(pushf, movl lguest_data+LGUEST_DATA_irq_enabled, %eax) 80 movl $X86_EFLAGS_IF, lguest_data+LGUEST_DATA_irq_enabled 107 movl $LHCALL_SEND_INTERRUPTS, %eax 121 movl %eax, lguest_data+LGUEST_DATA_irq_enabled
|
/linux-4.1.27/arch/ia64/kernel/ |
H A D | pal.S | 33 movl r2=pal_entry_point 58 movl loc2 = pal_entry_point 101 movl loc2 = pal_entry_point 148 movl loc2 = pal_entry_point 170 movl r16=PAL_PSR_BITS_TO_CLEAR 171 movl r17=PAL_PSR_BITS_TO_SET 207 movl loc2 = pal_entry_point 221 movl r16=PAL_PSR_BITS_TO_CLEAR 222 movl r17=PAL_PSR_BITS_TO_SET
|
H A D | relocate_kernel.S | 40 movl r16 = IA64_PSR_AC|IA64_PSR_BN|IA64_PSR_IC 98 movl r16=KERNEL_START 120 movl r19=PAGE_OFFSET 131 movl r16=PAGE_MASK 155 movl r14=PAGE_SIZE/8 - 1;;
|
H A D | head.S | 65 movl _tmp=(num<<61);; \ 90 movl _idx1=0x00;; \ 100 movl reg1=sal_state_for_booting_cpu;; \ 165 movl _tmp1=(num << 61);; \ 238 movl r17=KERNEL_START 244 movl r18=PAGE_KERNEL 261 movl r16=(IA64_PSR_IT|IA64_PSR_IC|IA64_PSR_DT|IA64_PSR_RT|IA64_PSR_DFH|IA64_PSR_BN \ 265 movl r17=1f 281 movl r3=ia64_ivt 284 movl r2=FPSR_DEFAULT 287 movl gp=__gp 300 movl r3=task_for_booting_cpu 303 movl r2=init_task 309 movl r2=init_task 319 movl r17=PAGE_KERNEL 361 movl r19=__phys_per_cpu_start 369 movl r20=__cpu0_per_cpu 393 (isBP) movl r2=ia64_boot_param 399 movl r14=hypervisor_setup_hooks 400 movl r15=hypervisor_type 991 movl r18=KERNEL_START 1126 movl r18=tlb_purge_done;; 1129 movl r18=ia64_do_tlb_purge;; 1133 movl r17=1f;; 1136 movl r16=SAL_PSR_BITS_TO_SET;;
|
H A D | efi_stub.S | 57 movl r16=PSR_BITS_TO_CLEAR 59 movl r17=PSR_BITS_TO_SET
|
H A D | entry.h | 66 movl r28=1f; \ 76 movl r28=1f; \
|
H A D | esi_stub.S | 74 movl r16=PSR_BITS_TO_CLEAR 76 movl r17=PSR_BITS_TO_SET
|
H A D | paravirtentry.S | 41 movl reg=targ ; \
|
H A D | minstate.h | 111 movl r11=FPSR_DEFAULT; /* L-unit */ \ 146 movl r1=__gp; /* establish kernel global pointer */ \ 228 movl r16=2f; \
|
H A D | ivt.S | 113 movl r18=PAGE_SHIFT 262 movl r30=1f // load nested fault continuation point 306 movl r30=1f // load nested fault continuation point 340 movl r17=PAGE_KERNEL 342 movl r19=(((1 << IA64_MAX_PHYS_BITS) - 1) & ~0xfff) 378 movl r17=PAGE_KERNEL 380 movl r19=(((1 << IA64_MAX_PHYS_BITS) - 1) & ~0xfff) 539 movl r30=1f // load continuation point in case of nested fault 594 movl r30=1f // load continuation point in case of nested fault 660 movl r30=1f // load continuation point in case of nested fault 747 movl r30=sys_call_table // X 783 (p7) movl r30=sys_ni_syscall // X 851 movl r3=ia64_ret_from_syscall // X 1019 movl r1=__gp // establish kernel global pointer 1026 movl r17=FPSR_DEFAULT 1530 movl r14=ia64_leave_kernel 1557 movl r15=ia64_leave_kernel 1582 movl r14=ia64_leave_kernel 1608 movl r14=ia64_leave_kernel 1643 movl r14=ia64_leave_kernel 1680 movl r15=ia64_leave_kernel
|
H A D | mca_asm.S | 99 movl r16=KERNEL_START 123 movl r19=PAGE_OFFSET 160 movl r18=ia64_reload_tr;; 169 movl r17=KERNEL_START 175 movl r18=PAGE_KERNEL 210 movl r19=PAGE_OFFSET 213 movl r20=PAGE_KERNEL 876 movl r21=PAGE_KERNEL // page properties 1050 movl r21=PAGE_KERNEL // page properties 1059 movl r17=FPSR_DEFAULT 1107 movl r15 = 1f
|
H A D | fsys.S | 196 movl r20 = fsyscall_gtod_data // load fsyscall gettimeofday data address 199 movl r29 = itc_jitter_data // itc_jitter 276 movl r2 = 1000000000 278 (p14) movl r3 = 2361183241434822607 // Prep for / 1000 hack 351 movl r17=cpu_to_node_map 393 movl r14=sys_call_table 466 movl r14=ia64_ret_from_syscall // X 469 movl r28=__kernel_syscall_via_break // X create cr.iip 497 movl r8=PSR_ONE_BITS // X
|
H A D | jprobes.S | 66 movl r16=invalidate_restore_cfm
|
H A D | paravirt.c | 382 "movl r2 = 0x2000000000000000\n" 385 "shl r3 = r2, 1\n" /* movl r3 = 0x4000000000000000 */ 387 "add r2 = r2, r3\n" /* movl r2 = 0x6000000000000000 */ 391 "shl r3 = r3, 1\n" /* movl r3 = 0x8000000000000000 */
|
H A D | entry.S | 179 movl r25=init_task 209 movl r25=PAGE_KERNEL 531 movl r16=sys_call_table 537 (p7) movl r20=sys_ni_syscall 822 movl r14=__kernel_syscall_via_epc // X 829 movl r14=__kernel_syscall_via_epc // X 1380 movl gp=__gp 1388 movl r3 = .here;; 1413 movl r2 = ftrace_stub 1414 movl r3 = ftrace_trace_function;; 1445 movl r2 = _mcount_ret_helper
|
H A D | gate.S | 28 [1:] movl reg=0; \ 266 movl r17=0x8208208208208209
|
H A D | patch.c | 19 * The 64-bit value in a "movl reg=value" is scattered between the two words of the bundle
|
H A D | module.c | 221 0x00, 0x00, 0x00, 0x00, 0x00, 0x20, /* movl gp=TARGET_GP */ 266 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, /* movl r16=TARGET_IP */ 271 0x00, 0x00, 0x00, 0x00, 0x00, 0x20, /* movl gp=TARGET_GP */
|
/linux-4.1.27/drivers/lguest/x86/ |
H A D | switcher_32.S | 124 movl %esp, LGUEST_PAGES_host_sp(%eax) 134 movl %eax, %edx 136 movl %edx, %esp 152 movl $(GDT_ENTRY_TSS*8), %edx 160 movl (LGUEST_PAGES_host_gdt_desc+2)(%eax), %edx 167 movl %ebx, %cr3 220 movl $(LGUEST_DS), %eax; \ 221 movl %eax, %ds; \ 230 movl %esp, %eax; \ 235 movl LGUEST_PAGES_regs_trapnum(%eax), %ebx; \ 240 movl LGUEST_PAGES_host_cr3(%eax), %edx; \ 241 movl %edx, %cr3; \ 250 movl LGUEST_PAGES_host_sp(%eax), %esp; \ 252 movl $(GDT_ENTRY_TSS*8), %edx; \ 288 movl (LGUEST_PAGES_host_idt_desc+2)(%eax), %edx 299 movl 4(%eax), %eax
|
/linux-4.1.27/arch/ia64/include/asm/ |
H A D | patch.h | 18 extern void ia64_patch_imm64 (u64 insn_addr, u64 val); /* patch "movl" w/abs. value*/
|
H A D | asmmacro.h | 77 [1:](pr)movl reg = obj; \
|
H A D | mca_asm.h | 200 movl temp2 = start_addr; \ 203 movl gp = __gp \
|
H A D | paravirt_privop.h | 211 "movl r2 = %[op_addr]\n"/* get function pointer address */ \
|
/linux-4.1.27/arch/ia64/lib/ |
H A D | carta_random.S | 22 movl a = (16807 << 16) | 16807
|
/linux-4.1.27/tools/perf/arch/x86/util/ |
H A D | header.c | 14 "movl %%ebx, %%esi\n\t.byte 0x5b" cpuid()
|
/linux-4.1.27/arch/x86/boot/ |
H A D | boot.h | 130 asm volatile("movl %%fs:%1,%0" : "=r" (v) : "m" (*(u32 *)addr)); rdfs32() 144 asm volatile("movl %1,%%fs:%0" : "+m" (*(u32 *)addr) : "ri" (v)); wrfs32() 162 asm volatile("movl %%gs:%1,%0" : "=r" (v) : "m" (*(u32 *)addr)); rdgs32() 176 asm volatile("movl %1,%%gs:%0" : "+m" (*(u32 *)addr) : "ri" (v)); wrgs32()
|
H A D | cpuflags.c | 75 asm volatile(".ifnc %%ebx,%3 ; movl %%ebx,%3 ; .endif \n\t" cpuid()
|
H A D | header.S | 513 movl $setup_corrupt, %eax
|
/linux-4.1.27/arch/m68k/kernel/ |
H A D | sun3-head.S | 72 movl #(PAGESIZE),%d0
|
H A D | head.S | 3159 movl %pc@(L(iobase)),%a1 3170 movl %pc@(L(iobase)),%a1
|
/linux-4.1.27/arch/ia64/sn/kernel/sn2/ |
H A D | ptc_deadlock.S | 38 movl mask=WRITECOUNTMASK
|
/linux-4.1.27/arch/x86/mm/ |
H A D | extable.c | 29 "movl %0, %%esp\n\t" fixup_exception()
|
/linux-4.1.27/arch/m68k/lib/ |
H A D | udivsi3.S | 146 movl d2,d3 | subtract b from p, store in tmp. 150 movl d3,d2 | and store tmp in p.
|
/linux-4.1.27/arch/ia64/include/asm/native/ |
H A D | inst.h | 40 movl clob = PARAVIRT_POISON; \
|
/linux-4.1.27/drivers/pnp/pnpbios/ |
H A D | bioscalls.c | 118 "movl %%esp, pnp_bios_fault_esp\n\t" call_pnp_bios() 119 "movl $1f, pnp_bios_fault_eip\n\t" call_pnp_bios()
|
/linux-4.1.27/arch/x86/kernel/kprobes/ |
H A D | core.c | 693 " movl %esp, %eax\n" kretprobe_trampoline_holder() 696 " movl 56(%esp), %edx\n" kretprobe_trampoline_holder() 697 " movl %edx, 52(%esp)\n" kretprobe_trampoline_holder() 699 " movl %eax, 56(%esp)\n" kretprobe_trampoline_holder()
|
H A D | opt.c | 117 " movl %esp, %edx\n"
|
/linux-4.1.27/drivers/char/mwave/ |
H A D | smapi.c | 78 "movl %7,%%ebx\n\t" smapi_request() 81 "movl %8,%%edi\n\t" smapi_request()
|
/linux-4.1.27/drivers/input/misc/ |
H A D | wistron_btns.c | 79 "movl %7, %%ebp;" call_bios()
|
/linux-4.1.27/arch/x86/kernel/cpu/ |
H A D | common.c | 216 "movl %0, %1 \n\t" flag_is_changeable_p()
|
/linux-4.1.27/tools/perf/util/ |
H A D | annotate.c | 411 { .name = "movl", .ops = &mov_ops, },
|
/linux-4.1.27/arch/x86/kvm/ |
H A D | x86.c | 1319 " movl $0, %[faulted]\n" kvm_write_tsc() 1322 "4: movl $1, %[faulted]\n" kvm_write_tsc()
|