#if CPUFAM_X86
pushreg ebx
pushreg edi
- mov edi, [esp + 12]
- mov eax, [esp + 16]
- mov ecx, [esp + 20]
+ mov edi, [SP + 12]
+ mov eax, [SP + 16]
+ mov ecx, [SP + 20]
# define OUT edi
#endif
#if CPUFAM_AMD64 && ABI_SYSV
// First, check that this is even a thing, using the complicated
// dance with the flags register.
pushf
- pop R_d(r) // current flags in d
+ pop DX // current flags in d
- or R_d(r), EFLAGS_ID // force the id bit on and check it
- push R_d(r)
+ or DX, EFLAGS_ID // force the id bit on and check it
+ push DX
popf
pushf
- pop R_d(r)
+ pop DX
test edx, EFLAGS_ID
jz 8f
- and R_d(r), ~EFLAGS_ID // force the id bit off and check it
- push R_d(r)
+ and DX, ~EFLAGS_ID // force the id bit off and check it
+ push DX
popf
pushf
- pop R_d(r)
+ pop DX
test edx, EFLAGS_ID
jnz 8f
// Enter with no arguments. Return nonzero if the XMM registers are
// usable.
- pushreg R_bp(r)
+ pushreg BP
setfp
stalloc 512
- and R_sp(r), ~15
+ and SP, ~15
endprologue
// Save the floating point and SIMD registers, and try to clobber
// xmm0.
- fxsave [R_sp(r)]
- mov eax, [R_sp(r) + 160]
- xor dword ptr [R_sp(r) + 160], 0xaaaa5555
- fxrstor [R_sp(r)]
+ fxsave [SP]
+ mov eax, [SP + 160]
+ xor dword ptr [SP + 160], 0xaaaa5555
+ fxrstor [SP]
// Save them again, and read back the low word of xmm0. Undo the
// clobbering and restore.
- fxsave [R_sp(r)]
- mov ecx, [R_sp(r) + 160]
- mov [R_sp(r) + 160], eax
- fxrstor [R_sp(r)]
+ fxsave [SP]
+ mov ecx, [SP + 160]
+ mov [SP + 160], eax
+ fxrstor [SP]
// The register are live if we read different things.
xor eax, ecx
// Done.
dropfp
- popreg R_bp(r)
+ popreg BP
ret
ENDFUNC
#if CPUFAM_X86
# define X_OUT edx
# define COUNT ecx
- mov X_OUT, [esp + 4]
+ mov X_OUT, [SP + 4]
#endif
#if CPUFAM_AMD64 && ABI_SYSV
# define X_OUT rdi