aboutsummaryrefslogtreecommitdiff
path: root/py/asmx64.h
diff options
context:
space:
mode:
authorDamien George <damien.p.george@gmail.com>2018-10-13 14:53:35 +1100
committerDamien George <damien.p.george@gmail.com>2018-10-13 15:16:33 +1100
commit006671056da6627073f041b4d451cab9db031ff0 (patch)
treeed1385d037fab8ed3b32869f0a9bb9ebd88b79f6 /py/asmx64.h
parent355eb8eafb1a0b0e096cd452d1107ab45bbf72c4 (diff)
py/emitnative: Load native fun table ptr from const table for all archs.
All architectures now have a dedicated register to hold the pointer to the native function table mp_fun_table, and so they all need to load this register at the start of the native function. This commit makes the loading of this register uniform across architectures by passing the pointer in the constant table for the native function, and then loading the register from the constant table. Doing it this way means that the pointer is not stored in the assembly code, helping to make the code more portable.
Diffstat (limited to 'py/asmx64.h')
-rw-r--r--py/asmx64.h6
1 files changed, 6 insertions, 0 deletions
diff --git a/py/asmx64.h b/py/asmx64.h
index f40b127e5..76e3ad556 100644
--- a/py/asmx64.h
+++ b/py/asmx64.h
@@ -116,6 +116,9 @@ void asm_x64_mov_local_addr_to_r64(asm_x64_t* as, int local_num, int dest_r64);
void asm_x64_mov_reg_pcrel(asm_x64_t *as, int dest_r64, mp_uint_t label);
void asm_x64_call_ind(asm_x64_t* as, size_t fun_id, int temp_r32);
+// Holds a pointer to mp_fun_table
+#define ASM_X64_REG_FUN_TABLE ASM_X64_REG_RBP
+
#if GENERIC_ASM_API
// The following macros provide a (mostly) arch-independent API to
@@ -141,6 +144,9 @@ void asm_x64_call_ind(asm_x64_t* as, size_t fun_id, int temp_r32);
#define REG_LOCAL_3 ASM_X64_REG_R13
#define REG_LOCAL_NUM (3)
+// Holds a pointer to mp_fun_table
+#define REG_FUN_TABLE ASM_X64_REG_FUN_TABLE
+
#define ASM_T asm_x64_t
#define ASM_END_PASS asm_x64_end_pass
#define ASM_ENTRY asm_x64_entry