/src/tests/lib/csu/arch/powerpc/ |
h_initfini_align.S | 14 bnelr %cr0
|
/src/sys/external/mit/xen-include-public/dist/xen/include/public/hvm/ |
hvm_vcpu.h | 40 uint32_t cr0; member in struct:vcpu_hvm_x86_32 103 uint64_t cr0; member in struct:vcpu_hvm_x86_64
|
/src/sys/arch/i386/i386/ |
mptramp.S | 123 movl %cr0,%eax 125 movl %eax,%cr0 182 movl %cr0,%eax 184 movl %eax,%cr0 237 movl %eax,%cr0
|
mtrr_k6.c | 105 uint32_t origcr0, cr0; local in function:k6_mtrr_reload 110 origcr0 = cr0 = rcr0(); 111 cr0 |= CR0_CD; 112 lcr0(cr0);
|
cpufunc.S | 123 /* save CR0, and disable WP */ 124 movl %cr0,%ecx 127 movl %ecx,%cr0 137 /* restore CR0 */ 139 movl %ecx,%cr0 346 movl %cr0, %eax 350 movl %eax, %cr0
|
locore.S | 567 /* Disable Paging in CR0 */ 568 movl %cr0, %eax 570 movl %eax, %cr0 784 /* enable caching in CR0 */ 785 movl %cr0,%eax 787 movl %eax,%cr0 1048 movl %cr0,%eax 1050 movl %eax,%cr0
|
/src/sys/arch/amd64/acpi/ |
acpi_wakecode.S | 147 mov %cr0,%eax 149 mov %eax,%cr0 203 movl %cr0,%eax 205 movl %eax,%cr0
|
acpi_wakeup_low.S | 87 movq %rax,%cr0 125 movq %cr0,%rax
|
/src/sys/arch/i386/acpi/ |
acpi_wakecode.S | 139 mov %cr0,%eax 141 mov %eax,%cr0 174 movl %cr0,%eax 176 movl %eax,%cr0
|
acpi_wakeup_low.S | 113 movl %cr0,%eax
|
/src/sys/arch/x86/x86/ |
cpu_rng.c | 204 uint32_t cr0, status, nbytes; local in function:cpu_rng_via 208 * requires the CR0 TS and CR0 EM bits to be clear. We disable 210 * interrupt handler changing CR0 while we work -- although 217 cr0 = rcr0(); 218 lcr0(cr0 & ~(CR0_EM|CR0_TS)); 226 /* Restore CR0 and interrupts. */ 227 lcr0(cr0);
|
mtrr_i686.c | 168 /* XXX cr0 is 64-bit on amd64 too, but the upper bits are 172 uint32_t cr0; local in function:i686_mtrr_reload 195 * 4. Enter the no-fill cache mode (set the CD flag in CR0 to 1 and 199 origcr0 = cr0 = rcr0(); 200 cr0 |= CR0_CD; 201 cr0 &= ~CR0_NW; 202 lcr0(cr0); 268 * NW flags in CR0 to 0)
|
via_padlock.c | 337 unsigned int cr0; local in function:via_padlock_cbc 342 cr0 = rcr0(); /* Permit access to SIMD/FPU path */ 343 lcr0(cr0 & ~(CR0_EM|CR0_TS)); 351 lcr0(cr0);
|
/src/sys/arch/i386/stand/lib/ |
realprot.S | 150 movl %cr0, %eax 152 movl %eax, %cr0 /* Enter 'protected mode' */ 230 movl %cr0, %eax 232 movl %eax, %cr0 /* Disable protected mode */
|
/src/sys/arch/amd64/amd64/ |
mptramp.S | 122 movl %cr0,%eax 124 movl %eax,%cr0 177 movl %cr0,%eax 179 movl %eax,%cr0 247 movq %rax,%cr0
|
cpufunc.S | 231 /* save CR0, and disable WP */ 232 movq %cr0,%rcx 235 movq %rcx,%cr0 242 /* restore CR0 */ 244 movq %rcx,%cr0 446 movq %cr0, %rax 448 movq %rax, %cr0
|
/src/sys/arch/i386/bioscall/ |
biostramp.S | 43 * addresses, clearing PG in CR0, and zeroing CR3 (PDBR). 51 * 4) clear PE in CR0, execute FAR jump to load CS. 169 movl %cr0,%eax 171 movl %eax,%cr0 188 mov %cr0,%eax 191 mov %eax,%cr0 267 mov %cr0,%eax 270 mov %eax,%cr0 287 movl %cr0,%eax 289 movl %eax,%cr0 [all...] |
/src/sys/arch/mvmeppc/mvmeppc/ |
locore.S | 132 cmpwi %cr0,%r9,1 147 cmpwi %cr0,%r9,4 /* check for 604 */
|
/src/sys/arch/i386/stand/efiboot/bootia32/ |
startprog32.S | 214 /* Disable Paging in CR0 */ 215 movl %cr0, %eax 217 movl %eax, %cr0
|
/src/sys/arch/i386/stand/efiboot/bootx64/ |
startprog64.S | 234 /* Disable Paging in CR0 */ 235 movl %cr0, %eax 237 movl %eax, %cr0
|
/src/sys/arch/i386/stand/dosboot/ |
start_dos.S | 141 movl %cr0, %eax 211 # set the PE bit of CR0 212 movl %cr0, %eax 214 movl %eax, %cr0 266 # clear the PE bit of CR0 267 movl %cr0, %eax 269 movl %eax, %cr0
|
/src/sys/arch/arm/sa11x0/ |
sa11x0_com.c | 842 u_int cr0; local in function:cflag2cr0 844 cr0 = (cflag & PARENB) ? CR0_PE : 0; 845 cr0 |= (cflag & PARODD) ? 0 : CR0_OES; 846 cr0 |= (cflag & CSTOPB) ? CR0_SBS : 0; 847 cr0 |= ((cflag & CSIZE) == CS8) ? CR0_DSS : 0; 849 return cr0; 858 u_int cr0; local in function:sacomparam 889 cr0 = cflag2cr0(t->c_cflag); 894 sc->sc_cr0 = cr0; 1429 int brd, cr0; local in function:sacominit [all...] |
/src/sys/external/mit/xen-include-public/dist/xen/include/public/ |
vm_event.h | 184 uint64_t cr0; member in struct:vm_event_regs_x86
|
/src/sys/arch/powerpc/booke/ |
trap_subr.S | 734 beqlr %cr0 /* yes, return to fallback to trap */ 741 beqlr %cr0 /* yes, return to fallback to trap */ 749 beqlr %cr0 /* no, return to fallback to trap */ 784 beq+ %cr0, 2f /* skip munging mas2 */ 949 blt %cr0, 1f
|
/src/sys/external/mit/xen-include-public/dist/xen/include/public/arch-x86/hvm/ |
save.h | 78 uint64_t cr0; member in struct:hvm_hw_cpu 192 uint64_t cr0; member in struct:hvm_hw_cpu_compat
|