regmap_avx = 2*WORDSZ
regmap_size = 3*WORDSZ
-#define REGDEF_GPX86_COMMON(rn, RN) \
- regsrc.e##rn = REGSRC_GP | REGIX_##RN; \
+#define REGDEF_GPX86_COMMON(rn, ix) \
+ regsrc.e##rn = REGSRC_GP | ix; \
regty.e##rn = REGF_32; \
regfmt.e##rn = REGF_HEX; \
- regsrc.r##rn = REGSRC_GP | REGIX_##RN; \
+ regsrc.r##rn = REGSRC_GP | ix; \
regty.r##rn = REGF_64; \
regfmt.r##rn = REGF_HEX
regsrc.rn##x = REGSRC_GP | REGIX_##RN##X; \
regty.rn##x = REGF_16; \
regfmt.rn##x = REGF_HEX; \
- REGDEF_GPX86_COMMON(rn##x, RN##X)
+ REGDEF_GPX86_COMMON(rn##x, REGIX_##RN##X)
REGDEF_GPX86_ABCD(a, A)
REGDEF_GPX86_ABCD(b, B)
REGDEF_GPX86_ABCD(c, C)
regsrc.rn = REGSRC_GP | REGIX_##RN; \
regty.rn = REGF_16; \
regfmt.rn = REGF_HEX; \
- REGDEF_GPX86_COMMON(rn, RN)
+ REGDEF_GPX86_COMMON(rn, REGIX_##RN)
REGDEF_GPX86_XP(ip, IP)
REGDEF_GPX86_XP(si, SI)
REGDEF_GPX86_XP(di, DI)
// Stash r/eax. This is bletcherous: hope we don't get a signal in
// the next few instructions.
- mov [R_sp(r) - REGDUMP_SPADJ + (REGIX_AX - 1)*WORDSZ], R_a(r)
+ mov [SP - REGDUMP_SPADJ + (REGIX_AX - 1)*WORDSZ], AX
.ifnes "\addr", "nil"
// Collect the effective address for the following dump, leaving it
// in the `addr' slot of the dump.
- lea R_a(r), \addr
- mov [R_sp(r) - REGDUMP_SPADJ + (REGIX_ADDR - 1)*WORDSZ], R_a(r)
+ lea AX, \addr
+ mov [SP - REGDUMP_SPADJ + (REGIX_ADDR - 1)*WORDSZ], AX
.endif
// Make space for the register save area. On AMD64 with System/V
// ABI, also skip the red zone. Use `lea' here to preserve the
// flags.
- lea R_sp(r), [R_sp(r) - REGDUMP_SPADJ]
+ lea SP, [SP - REGDUMP_SPADJ]
// Save flags and general-purpose registers. On 32-bit x86, we save
// ebx here and establish a GOT pointer here for the benefit of the
// PLT-indirect calls made later on.
pushf
# if CPUFAM_X86
- mov [esp + 4*REGIX_BX], ebx
+ mov [SP + 4*REGIX_BX], ebx
ldgot
# endif
callext F(regdump_gpsave)
// Make space for the extended registers.
- sub R_sp(r), R_c(r)
+ sub SP, CX
callext F(regdump_xtsave)
// Prepare for calling back into C. On 32-bit x86, leave space for
// the `shadow space' for the called-function's arguments. Also,
// forcibly align the stack pointer to a 16-byte boundary.
# if CPUFAM_X86
- sub esp, 16
+ sub SP, 16
# elif ABI_WIN
- sub rsp, 32
+ sub SP, 32
# endif
- and R_sp(r), ~15
+ and SP, ~15
.endm
.macro _rstrregs
// We assume r/ebp still points to the register map.
callext F(regdump_xtrstr)
- mov R_sp(r), R_bp(r)
+ mov SP, BP
callext F(regdump_gprstr)
popf
- lea R_sp(r), [R_sp(r) + REGDUMP_SPADJ]
+ lea SP, [SP + REGDUMP_SPADJ]
.endm
.macro _regbase
# if CPUFAM_X86
- mov [esp + 0], ebp
+ mov [SP + 0], BP
# elif ABI_SYSV
- mov rdi, rbp
+ mov rdi, BP
# elif ABI_WIN
- mov rcx, rbp
+ mov rcx, BP
# endif
.endm
.macro _membase
- mov R_a(r), [R_bp(r) + regmap_gp]
+ mov AX, [BP + regmap_gp]
# if CPUFAM_X86
mov eax, [eax + REGIX_ADDR*WORDSZ]
- mov [esp + 0], eax
+ mov [SP + 0], eax
# elif ABI_SYSV
mov rdi, [rax + REGIX_ADDR*WORDSZ]
# elif ABI_WIN
.macro _reglbl msg
.ifeqs "\msg", ""
# if CPUFAM_X86
- mov dword ptr [esp + 4], 0
+ mov dword ptr [SP + 4], 0
# elif ABI_SYSV
xor esi, esi
# elif ABI_WIN
.else
# if CPUFAM_X86
lea eax, [INTADDR(.L$_reglbl$\@)]
- mov [esp + 4], eax
+ mov [SP + 4], eax
# elif ABI_SYSV
lea rsi, [INTADDR(.L$_reglbl$\@)]
# elif ABI_WIN
.macro _regfmt arg
# if CPUFAM_X86
- mov dword ptr [esp + 8], \arg
+ mov dword ptr [SP + 8], \arg
# elif ABI_SYSV
mov edx, \arg
# elif ABI_WIN
.endm
.macro _regfmt arg
- movw r2, #\arg&0xffff
- movt r2, #(\arg >> 16)&0xffff
+ movw r2, #(\arg)&0xffff
+ movt r2, #((\arg) >> 16)&0xffff
.endm
#endif
.endm
.macro _regfmt arg
- movz w2, #\arg&0xffff
- movk w2, #(\arg >> 16)&0xffff, lsl #16
+ movz w2, #(\arg)&0xffff
+ movk w2, #((\arg) >> 16)&0xffff, lsl #16
.endm
#endif