aboutsummaryrefslogtreecommitdiff
path: root/py/asmthumb.h
diff options
context:
space:
mode:
authorDamien George <damien.p.george@gmail.com>2018-08-16 13:45:24 +1000
committerDamien George <damien.p.george@gmail.com>2018-08-16 13:45:24 +1000
commit2964b41c282917bfb3f6e3a1d6e3fd7a078abed6 (patch)
tree82e8039004d9a19790878c9cff77413d87cf2f81 /py/asmthumb.h
parentf7d6108d1ad83f26598e73d3b163f3de411c902b (diff)
py/asm*: Support assembling code to jump to a register, and get PC+off.
Useful for position independent code, and implementing state machines.
Diffstat (limited to 'py/asmthumb.h')
-rw-r--r--py/asmthumb.h23
1 files changed, 23 insertions, 0 deletions
diff --git a/py/asmthumb.h b/py/asmthumb.h
index 9d25b973f..fb42a76ac 100644
--- a/py/asmthumb.h
+++ b/py/asmthumb.h
@@ -180,6 +180,26 @@ void asm_thumb_format_4(asm_thumb_t *as, uint op, uint rlo_dest, uint rlo_src);
static inline void asm_thumb_cmp_rlo_rlo(asm_thumb_t *as, uint rlo_dest, uint rlo_src) { asm_thumb_format_4(as, ASM_THUMB_FORMAT_4_CMP, rlo_dest, rlo_src); }
+// FORMAT 5: hi register operations (add, cmp, mov, bx)
+// For add/cmp/mov, at least one of the args must be a high register
+
+#define ASM_THUMB_FORMAT_5_ADD (0x4400)
+#define ASM_THUMB_FORMAT_5_BX (0x4700)
+
+#define ASM_THUMB_FORMAT_5_ENCODE(op, r_dest, r_src) \
+ ((op) | ((r_dest) << 4 & 0x0080) | ((r_src) << 3) | ((r_dest) & 0x0007))
+
+static inline void asm_thumb_format_5(asm_thumb_t *as, uint op, uint r_dest, uint r_src) {
+ asm_thumb_op16(as, ASM_THUMB_FORMAT_5_ENCODE(op, r_dest, r_src));
+}
+
+static inline void asm_thumb_add_reg_reg(asm_thumb_t *as, uint r_dest, uint r_src) {
+ asm_thumb_format_5(as, ASM_THUMB_FORMAT_5_ADD, r_dest, r_src);
+}
+static inline void asm_thumb_bx_reg(asm_thumb_t *as, uint r_src) {
+ asm_thumb_format_5(as, ASM_THUMB_FORMAT_5_BX, 0, r_src);
+}
+
// FORMAT 9: load/store with immediate offset
// For word transfers the offset must be aligned, and >>2
@@ -233,6 +253,7 @@ void asm_thumb_mov_reg_i32_aligned(asm_thumb_t *as, uint reg_dest, int i32); //
void asm_thumb_mov_local_reg(asm_thumb_t *as, int local_num_dest, uint rlo_src); // convenience
void asm_thumb_mov_reg_local(asm_thumb_t *as, uint rlo_dest, int local_num); // convenience
void asm_thumb_mov_reg_local_addr(asm_thumb_t *as, uint rlo_dest, int local_num); // convenience
+void asm_thumb_mov_reg_pcrel(asm_thumb_t *as, uint rlo_dest, uint label);
void asm_thumb_b_label(asm_thumb_t *as, uint label); // convenience: picks narrow or wide branch
void asm_thumb_bcc_label(asm_thumb_t *as, int cc, uint label); // convenience: picks narrow or wide branch
@@ -282,6 +303,7 @@ void asm_thumb_bl_ind(asm_thumb_t *as, void *fun_ptr, uint fun_id, uint reg_temp
asm_thumb_cmp_rlo_rlo(as, reg1, reg2); \
asm_thumb_bcc_label(as, ASM_THUMB_CC_EQ, label); \
} while (0)
+#define ASM_JUMP_REG(as, reg) asm_thumb_bx_reg((as), (reg))
#define ASM_CALL_IND(as, ptr, idx) asm_thumb_bl_ind(as, ptr, idx, ASM_THUMB_REG_R3)
#define ASM_MOV_LOCAL_REG(as, local_num, reg) asm_thumb_mov_local_reg((as), (local_num), (reg))
@@ -290,6 +312,7 @@ void asm_thumb_bl_ind(asm_thumb_t *as, void *fun_ptr, uint fun_id, uint reg_temp
#define ASM_MOV_REG_LOCAL(as, reg_dest, local_num) asm_thumb_mov_reg_local((as), (reg_dest), (local_num))
#define ASM_MOV_REG_REG(as, reg_dest, reg_src) asm_thumb_mov_reg_reg((as), (reg_dest), (reg_src))
#define ASM_MOV_REG_LOCAL_ADDR(as, reg_dest, local_num) asm_thumb_mov_reg_local_addr((as), (reg_dest), (local_num))
+#define ASM_MOV_REG_PCREL(as, rlo_dest, label) asm_thumb_mov_reg_pcrel((as), (rlo_dest), (label))
#define ASM_LSL_REG_REG(as, reg_dest, reg_shift) asm_thumb_format_4((as), ASM_THUMB_FORMAT_4_LSL, (reg_dest), (reg_shift))
#define ASM_ASR_REG_REG(as, reg_dest, reg_shift) asm_thumb_format_4((as), ASM_THUMB_FORMAT_4_ASR, (reg_dest), (reg_shift))