diff options
author | Damien George <damien.p.george@gmail.com> | 2014-09-29 18:45:42 +0100 |
---|---|---|
committer | Damien George <damien.p.george@gmail.com> | 2014-09-29 19:42:06 +0100 |
commit | 3112cde9006809a1ffa7f19e96fa8ee28311f411 (patch) | |
tree | c888f841266f72afb787069eaf824086ad91d22e /py/asmx64.h | |
parent | 6f81348fa25216f03686b342765f337ab57e2e5f (diff) |
py: Implement more binary ops for viper emitter.
This included a bit of restructuring of the assembler backends. Note
that the ARM backend is missing a few functions and won't compile.
Diffstat (limited to 'py/asmx64.h')
-rw-r--r-- | py/asmx64.h | 16 |
1 files changed, 13 insertions, 3 deletions
diff --git a/py/asmx64.h b/py/asmx64.h index 3b138a753..0d3f58ecd 100644 --- a/py/asmx64.h +++ b/py/asmx64.h @@ -31,6 +31,11 @@ // - RAX, RCX, RDX, RSI, RDI, R08, R09, R10, R11 are caller-save // - RBX, RBP, R12, R13, R14, R15 are callee-save +// In the functions below, argument order follows x86 docs and generally +// the destination is the first argument. +// NOTE: this is a change from the old convention used in this file and +// some functions still use the old (reverse) convention. + #define ASM_X64_PASS_COMPUTE (1) #define ASM_X64_PASS_EMIT (2) @@ -58,6 +63,8 @@ #define ASM_X64_CC_JNZ (0x5) #define ASM_X64_CC_JNE (0x5) #define ASM_X64_CC_JL (0xc) // less, signed +#define ASM_X64_CC_JGE (0xd) // greater or equal, signed +#define ASM_X64_CC_JLE (0xe) // less or equal, signed #define ASM_X64_CC_JG (0xf) // greater, signed typedef struct _asm_x64_t asm_x64_t; @@ -72,15 +79,18 @@ void* asm_x64_get_code(asm_x64_t* as); void asm_x64_nop(asm_x64_t* as); void asm_x64_push_r64(asm_x64_t* as, int src_r64); void asm_x64_pop_r64(asm_x64_t* as, int dest_r64); -void asm_x64_mov_r64_to_r64(asm_x64_t* as, int src_r64, int dest_r64); +void asm_x64_mov_r64_r64(asm_x64_t* as, int dest_r64, int src_r64); void asm_x64_mov_i64_to_r64(asm_x64_t* as, int64_t src_i64, int dest_r64); void asm_x64_mov_i64_to_r64_optimised(asm_x64_t *as, int64_t src_i64, int dest_r64); void asm_x64_mov_i64_to_r64_aligned(asm_x64_t *as, int64_t src_i64, int dest_r64); void asm_x64_mov_r8_to_disp(asm_x64_t *as, int src_r64, int dest_r64, int dest_disp); void asm_x64_mov_r16_to_disp(asm_x64_t *as, int src_r64, int dest_r64, int dest_disp); void asm_x64_mov_r64_to_disp(asm_x64_t *as, int src_r64, int dest_r64, int dest_disp); -void asm_x64_xor_r64_to_r64(asm_x64_t *as, int src_r64, int dest_r64); -void asm_x64_add_r64_to_r64(asm_x64_t* as, int src_r64, int dest_r64); +void asm_x64_xor_r64_r64(asm_x64_t *as, int dest_r64, int src_r64); +void asm_x64_shl_r64_cl(asm_x64_t* as, int dest_r64); +void asm_x64_sar_r64_cl(asm_x64_t* as, int dest_r64); +void asm_x64_add_r64_r64(asm_x64_t* as, int dest_r64, int src_r64); +void asm_x64_sub_r64_r64(asm_x64_t* as, int dest_r64, int src_r64); void asm_x64_cmp_r64_with_r64(asm_x64_t* as, int src_r64_a, int src_r64_b); void asm_x64_test_r8_with_r8(asm_x64_t* as, int src_r64_a, int src_r64_b); void asm_x64_setcc_r8(asm_x64_t* as, int jcc_type, int dest_r8); |