26 #ifndef MICROPY_INCLUDED_PY_ASMX64_H 27 #define MICROPY_INCLUDED_PY_ASMX64_H 45 #define ASM_X64_REG_RAX (0) 46 #define ASM_X64_REG_RCX (1) 47 #define ASM_X64_REG_RDX (2) 48 #define ASM_X64_REG_RBX (3) 49 #define ASM_X64_REG_RSP (4) 50 #define ASM_X64_REG_RBP (5) 51 #define ASM_X64_REG_RSI (6) 52 #define ASM_X64_REG_RDI (7) 53 #define ASM_X64_REG_R08 (8) 54 #define ASM_X64_REG_R09 (9) 55 #define ASM_X64_REG_R10 (10) 56 #define ASM_X64_REG_R11 (11) 57 #define ASM_X64_REG_R12 (12) 58 #define ASM_X64_REG_R13 (13) 59 #define ASM_X64_REG_R14 (14) 60 #define ASM_X64_REG_R15 (15) 63 #define ASM_X64_CC_JB (0x2) // below, unsigned 64 #define ASM_X64_CC_JZ (0x4) 65 #define ASM_X64_CC_JE (0x4) 66 #define ASM_X64_CC_JNZ (0x5) 67 #define ASM_X64_CC_JNE (0x5) 68 #define ASM_X64_CC_JL (0xc) // less, signed 69 #define ASM_X64_CC_JGE (0xd) // greater or equal, signed 70 #define ASM_X64_CC_JLE (0xe) // less or equal, signed 71 #define ASM_X64_CC_JG (0xf) // greater, signed 78 static inline void asm_x64_end_pass(
asm_x64_t *as) {
122 #define ASM_WORD_SIZE (8) 124 #define REG_RET ASM_X64_REG_RAX 125 #define REG_ARG_1 ASM_X64_REG_RDI 126 #define REG_ARG_2 ASM_X64_REG_RSI 127 #define REG_ARG_3 ASM_X64_REG_RDX 128 #define REG_ARG_4 ASM_X64_REG_RCX 129 #define REG_ARG_5 ASM_X64_REG_R08 132 #define REG_TEMP0 ASM_X64_REG_RAX 133 #define REG_TEMP1 ASM_X64_REG_RDI 134 #define REG_TEMP2 ASM_X64_REG_RSI 137 #define REG_LOCAL_1 ASM_X64_REG_RBX 138 #define REG_LOCAL_2 ASM_X64_REG_R12 139 #define REG_LOCAL_3 ASM_X64_REG_R13 140 #define REG_LOCAL_NUM (3) 142 #define ASM_T asm_x64_t 143 #define ASM_END_PASS asm_x64_end_pass 144 #define ASM_ENTRY asm_x64_entry 145 #define ASM_EXIT asm_x64_exit 147 #define ASM_JUMP asm_x64_jmp_label 148 #define ASM_JUMP_IF_REG_ZERO(as, reg, label) \ 150 asm_x64_test_r8_with_r8(as, reg, reg); \ 151 asm_x64_jcc_label(as, ASM_X64_CC_JZ, label); \ 153 #define ASM_JUMP_IF_REG_NONZERO(as, reg, label) \ 155 asm_x64_test_r8_with_r8(as, reg, reg); \ 156 asm_x64_jcc_label(as, ASM_X64_CC_JNZ, label); \ 158 #define ASM_JUMP_IF_REG_EQ(as, reg1, reg2, label) \ 160 asm_x64_cmp_r64_with_r64(as, reg1, reg2); \ 161 asm_x64_jcc_label(as, ASM_X64_CC_JE, label); \ 163 #define ASM_CALL_IND(as, ptr, idx) asm_x64_call_ind(as, ptr, ASM_X64_REG_RAX) 165 #define ASM_MOV_REG_TO_LOCAL asm_x64_mov_r64_to_local 166 #define ASM_MOV_IMM_TO_REG asm_x64_mov_i64_to_r64_optimised 167 #define ASM_MOV_ALIGNED_IMM_TO_REG asm_x64_mov_i64_to_r64_aligned 168 #define ASM_MOV_IMM_TO_LOCAL_USING(as, imm, local_num, reg_temp) \ 170 asm_x64_mov_i64_to_r64_optimised(as, (imm), (reg_temp)); \ 171 asm_x64_mov_r64_to_local(as, (reg_temp), (local_num)); \ 173 #define ASM_MOV_LOCAL_TO_REG asm_x64_mov_local_to_r64 174 #define ASM_MOV_REG_REG(as, reg_dest, reg_src) asm_x64_mov_r64_r64((as), (reg_dest), (reg_src)) 175 #define ASM_MOV_LOCAL_ADDR_TO_REG asm_x64_mov_local_addr_to_r64 177 #define ASM_LSL_REG(as, reg) asm_x64_shl_r64_cl((as), (reg)) 178 #define ASM_ASR_REG(as, reg) asm_x64_sar_r64_cl((as), (reg)) 179 #define ASM_OR_REG_REG(as, reg_dest, reg_src) asm_x64_or_r64_r64((as), (reg_dest), (reg_src)) 180 #define ASM_XOR_REG_REG(as, reg_dest, reg_src) asm_x64_xor_r64_r64((as), (reg_dest), (reg_src)) 181 #define ASM_AND_REG_REG(as, reg_dest, reg_src) asm_x64_and_r64_r64((as), (reg_dest), (reg_src)) 182 #define ASM_ADD_REG_REG(as, reg_dest, reg_src) asm_x64_add_r64_r64((as), (reg_dest), (reg_src)) 183 #define ASM_SUB_REG_REG(as, reg_dest, reg_src) asm_x64_sub_r64_r64((as), (reg_dest), (reg_src)) 184 #define ASM_MUL_REG_REG(as, reg_dest, reg_src) asm_x64_mul_r64_r64((as), (reg_dest), (reg_src)) 186 #define ASM_LOAD_REG_REG(as, reg_dest, reg_base) asm_x64_mov_mem64_to_r64((as), (reg_base), 0, (reg_dest)) 187 #define ASM_LOAD_REG_REG_OFFSET(as, reg_dest, reg_base, word_offset) asm_x64_mov_mem64_to_r64((as), (reg_base), 8 * (word_offset), (reg_dest)) 188 #define ASM_LOAD8_REG_REG(as, reg_dest, reg_base) asm_x64_mov_mem8_to_r64zx((as), (reg_base), 0, (reg_dest)) 189 #define ASM_LOAD16_REG_REG(as, reg_dest, reg_base) asm_x64_mov_mem16_to_r64zx((as), (reg_base), 0, (reg_dest)) 190 #define ASM_LOAD32_REG_REG(as, reg_dest, reg_base) asm_x64_mov_mem32_to_r64zx((as), (reg_base), 0, (reg_dest)) 192 #define ASM_STORE_REG_REG(as, reg_src, reg_base) asm_x64_mov_r64_to_mem64((as), (reg_src), (reg_base), 0) 193 #define ASM_STORE_REG_REG_OFFSET(as, reg_src, reg_base, word_offset) asm_x64_mov_r64_to_mem64((as), (reg_src), (reg_base), 8 * (word_offset)) 194 #define ASM_STORE8_REG_REG(as, reg_src, reg_base) asm_x64_mov_r8_to_mem8((as), (reg_src), (reg_base), 0) 195 #define ASM_STORE16_REG_REG(as, reg_src, reg_base) asm_x64_mov_r16_to_mem16((as), (reg_src), (reg_base), 0) 196 #define ASM_STORE32_REG_REG(as, reg_src, reg_base) asm_x64_mov_r32_to_mem32((as), (reg_src), (reg_base), 0) 198 #endif // GENERIC_ASM_API 200 #endif // MICROPY_INCLUDED_PY_ASMX64_H void asm_x64_shl_r64_cl(asm_x64_t *as, int dest_r64)
void asm_x64_cmp_r64_with_r64(asm_x64_t *as, int src_r64_a, int src_r64_b)
void asm_x64_mov_mem8_to_r64zx(asm_x64_t *as, int src_r64, int src_disp, int dest_r64)
void asm_x64_mov_r64_to_local(asm_x64_t *as, int src_r64, int dest_local_num)
void asm_x64_sub_r64_r64(asm_x64_t *as, int dest_r64, int src_r64)
void asm_x64_call_ind(asm_x64_t *as, void *ptr, int temp_r32)
void asm_x64_mov_r32_to_mem32(asm_x64_t *as, int src_r64, int dest_r64, int dest_disp)
void asm_x64_jcc_label(asm_x64_t *as, int jcc_type, mp_uint_t label)
void asm_x64_mul_r64_r64(asm_x64_t *as, int dest_r64, int src_r64)
void asm_x64_pop_r64(asm_x64_t *as, int dest_r64)
void asm_x64_setcc_r8(asm_x64_t *as, int jcc_type, int dest_r8)
void asm_x64_nop(asm_x64_t *as)
void asm_x64_mov_i64_to_r64(asm_x64_t *as, int64_t src_i64, int dest_r64)
void asm_x64_mov_r64_r64(asm_x64_t *as, int dest_r64, int src_r64)
void asm_x64_add_r64_r64(asm_x64_t *as, int dest_r64, int src_r64)
void asm_x64_entry(asm_x64_t *as, int num_locals)
struct _asm_x64_t asm_x64_t
void asm_x64_mov_i64_to_r64_optimised(asm_x64_t *as, int64_t src_i64, int dest_r64)
void asm_x64_xor_r64_r64(asm_x64_t *as, int dest_r64, int src_r64)
void asm_x64_push_r64(asm_x64_t *as, int src_r64)
void asm_x64_test_r8_with_r8(asm_x64_t *as, int src_r64_a, int src_r64_b)
void asm_x64_exit(asm_x64_t *as)
void asm_x64_jmp_label(asm_x64_t *as, mp_uint_t label)
void asm_x64_mov_r16_to_mem16(asm_x64_t *as, int src_r64, int dest_r64, int dest_disp)
void asm_x64_mov_local_addr_to_r64(asm_x64_t *as, int local_num, int dest_r64)
void asm_x64_mov_mem64_to_r64(asm_x64_t *as, int src_r64, int src_disp, int dest_r64)
void asm_x64_sar_r64_cl(asm_x64_t *as, int dest_r64)
void asm_x64_mov_mem32_to_r64zx(asm_x64_t *as, int src_r64, int src_disp, int dest_r64)
void asm_x64_mov_r8_to_mem8(asm_x64_t *as, int src_r64, int dest_r64, int dest_disp)
void asm_x64_mov_i64_to_r64_aligned(asm_x64_t *as, int64_t src_i64, int dest_r64)
void asm_x64_mov_mem16_to_r64zx(asm_x64_t *as, int src_r64, int src_disp, int dest_r64)
void asm_x64_or_r64_r64(asm_x64_t *as, int dest_r64, int src_r64)
void asm_x64_and_r64_r64(asm_x64_t *as, int dest_r64, int src_r64)
void asm_x64_mov_r64_to_mem64(asm_x64_t *as, int src_r64, int dest_r64, int dest_disp)
void asm_x64_mov_local_to_r64(asm_x64_t *as, int src_local_num, int dest_r64)