/src/lib/libc/arch/hppa/gen/ |
fpgetmask.c | 20 uint64_t fpsr; local in function:fpgetmask 22 __asm volatile("fstd %%fr0,0(%1)" : "=m" (fpsr) : "r" (&fpsr)); 23 return ((fp_except)(fpsr >> 32) & 0x1f);
|
fpgetround.c | 20 uint64_t fpsr; local in function:fpgetround 22 __asm volatile("fstd %%fr0,0(%1)" : "=m" (fpsr) : "r" (&fpsr)); 23 return ((fp_rnd)(fpsr >> 41) & 0x3);
|
fpgetsticky.c | 20 uint64_t fpsr; local in function:fpgetsticky 22 __asm volatile("fstd %%fr0,0(%1)" : "=m" (fpsr) : "r" (&fpsr)); 23 return ((fp_except)(fpsr >> 59) & 0x1f);
|
flt_rounds.c | 27 uint64_t fpsr; local in function:__flt_rounds 29 __asm volatile("fstd %%fr0,0(%1)" : "=m" (fpsr) : "r" (&fpsr)); 30 return map[(unsigned int)(fpsr >> 41) & 0x03];
|
fpsetmask.c | 20 uint64_t fpsr; local in function:fpsetmask 23 __asm volatile("fstd %%fr0,0(%1)" : "=m"(fpsr) : "r"(&fpsr) : "memory"); 24 old = (fp_except)(fpsr >> 32) & 0x1f; 25 fpsr = (fpsr & 0xffffffe000000000LL) | ((uint64_t)(mask & 0x1f) << 32); 26 __asm volatile("fldd 0(%0),%%fr0" : : "r"(&fpsr) : "memory");
|
fpsetround.c | 20 uint64_t fpsr; local in function:fpsetround 23 __asm volatile("fstd %%fr0,0(%1)" : "=m" (fpsr) : "r" (&fpsr) : "memory"); 24 old = (fp_rnd)(fpsr >> 41) & 0x03; 25 fpsr = (fpsr & 0xfffff9ff00000000LL) | 27 __asm volatile("fldd 0(%0),%%fr0" : : "r" (&fpsr) : "memory");
|
fpsetsticky.c | 20 uint64_t fpsr; local in function:fpsetsticky 23 __asm volatile("fstd %%fr0,0(%1)" : "=m" (fpsr) : "r" (&fpsr) : "memory"); 24 old = (fp_except)(fpsr >> 59) & 0x1f; 25 fpsr = (fpsr & 0x07ffffff00000000LL) | ((uint64_t)(mask & 0x1f) << 59); 26 __asm volatile("fldd 0(%0),%%fr0" : : "r" (&fpsr) : "memory");
|
/src/lib/libc/arch/ia64/gen/ |
fpgetmask.c | 37 uint64_t fpsr; local in function:fpgetmask 39 __asm __volatile("mov %0=ar.fpsr" : "=r" (fpsr)); 40 return (~fpsr & 0x3d);
|
fpgetround.c | 35 uint64_t fpsr; local in function:fpgetround 37 __asm __volatile("mov %0=ar.fpsr" : "=r"(fpsr)); 38 return ((fp_rnd)((fpsr >> 10) & 3));
|
fpsetmask.c | 36 int64_t fpsr; local in function:fpsetmask 39 __asm __volatile("mov %0=ar.fpsr" : "=r" (fpsr)); 40 oldmask = ~fpsr & 0x3d; 41 fpsr = (fpsr & ~0x3d) | (~mask & 0x3d); 42 __asm __volatile("mov ar.fpsr=%0" :: "r" (fpsr));
|
fpsetround.c | 35 uint64_t fpsr; local in function:fpsetround 38 __asm __volatile("mov %0=ar.fpsr" : "=r"(fpsr)); 39 prev = (fp_rnd)((fpsr >> 10) & 3); 40 fpsr = (fpsr & ~0xC00ULL) | ((unsigned int)rnd << 10); 41 __asm __volatile("mov ar.fpsr=%0" :: "r"(fpsr));
|
/src/lib/libc/arch/m68k/hardfloat/ |
fpgetsticky.c | 42 int fpsr; local in function:__weak_alias 44 __asm("fmovel %%fpsr,%0" : "=d"(fpsr)); 46 return __SHIFTOUT(fpsr, FPSR_AEX);
|
/src/lib/libm/arch/ia64/ |
fenv.c | 85 fenv_t fpsr; local in function:feupdateenv 87 __stfpsr(&fpsr); 89 feraiseexcept((fpsr >> _FPUSW_SHIFT) & FE_ALL_EXCEPT);
|
/src/sys/arch/aarch64/include/ |
reg.h | 55 uint32_t fpsr; member in struct:fpreg
|
/src/lib/libm/arch/aarch64/ |
fenv.c | 109 unsigned int fpsr = reg_fpsr_read(); local in function:feraiseexcept 111 fpsr |= __SHIFTIN(excepts, FPSR_CSUM); 112 reg_fpsr_write(fpsr); 131 unsigned int fpsr = reg_fpsr_read(); local in function:fesetexceptflag 132 fpsr &= ~__SHIFTIN(excepts, FPSR_CSUM); 133 fpsr |= __SHIFTIN((*flagp & excepts), FPSR_CSUM); 134 reg_fpsr_write(fpsr);
|
/src/sys/arch/m68k/fpe/ |
fpu_fscale.c | 60 uint32_t buf[3], fpsr; local in function:fpu_emul_fscale 70 fpsr = fe->fe_fpsr & ~FPSR_EXCP & ~FPSR_CCB; 72 printf("fpu_emul_fscale: FPSR = %08x, FPCR = %08x\n", fpsr, fe->fe_fpcr); 213 fpsr |= FPSR_SNAN; 234 fpsr |= FPSR_INEX2; 277 fpsr |= FPSR_UNFL; 279 fpsr |= FPSR_ZERO; 297 fpsr |= FPSR_OVFL | FPSR_INF; 310 fpsr |= FPSR_UNFL [all...] |
fpu_log.c | 481 uint32_t fpsr; local in function:fpu_log10 483 fpsr = fe->fe_fpsr & ~FPSR_EXCP; /* clear all exceptions */ 488 fpsr |= FPSR_OPERR; 501 fpsr |= FPSR_DZ; 503 fpsr |= FPSR_SNAN; 509 fe->fe_fpsr = fpsr; 518 uint32_t fpsr; local in function:fpu_log2 520 fpsr = fe->fe_fpsr & ~FPSR_EXCP; /* clear all exceptions */ 525 fpsr |= FPSR_OPERR; 546 fpsr |= FPSR_DZ 562 uint32_t fpsr; local in function:fpu_logn [all...] |
fpu_emulate.c | 102 DPRINTF(("%s: ENTERING: FPSR=%08x, FPCR=%08x\n", 242 DPRINTF(("%s: EXITING: w/FPSR=%08x, FPCR=%08x\n", __func__, 254 uint32_t fpsr; local in function:fpu_upd_excp 257 fpsr = fe->fe_fpsr; 260 * update fpsr accrued exception bits; each insn doesn't have to 263 if (fpsr & (FPSR_BSUN | FPSR_SNAN | FPSR_OPERR)) { 264 fpsr |= FPSR_AIOP; 266 if (fpsr & FPSR_OVFL) { 267 fpsr |= FPSR_AOVFL; 269 if ((fpsr & FPSR_UNFL) && (fpsr & FPSR_INEX2)) 288 uint32_t fpsr; local in function:fpu_upd_fpsr 964 int fpsr; local in function:test_cc [all...] |
/src/sys/compat/linux/arch/aarch64/ |
linux_machdep.h | 46 uint32_t fpsr; member in struct:fpsimd_context
|
/src/sys/arch/m68k/include/ |
fenv.h | 41 /* Exception bits, from FPSR */ 71 uint32_t fpsr; member in struct:__anon8efedb950108 83 __asm__ __volatile__ ("fmove%.l %/fpsr,%0" : "=dm" (__fpsr)) 85 __asm__ __volatile__ ("fmove%.l %0,%/fpsr" : : "dm" (__fpsr)) 106 __asm__ __volatile__ ("fmovem%.l %/fpcr/%/fpsr/%/fpiar,%0" : "=m" (__envp)) 109 __asm__ __volatile__ ("fmovem%.l %0,%/fpcr/%/fpsr/%/fpiar" : : "m" (__envp)) 231 __fpsr = __envp->fpsr & ~FE_ALL_EXCEPT; 249 __tenv.fpsr |= __envp->fpsr & FE_ALL_EXCEPT;
|
/src/sys/arch/ia64/include/ |
_regset.h | 68 unsigned long fpsr; member in struct:_special
|