Numworks Epsilon  1.4.1
Graphing Calculator Operating System
asmx64.c
Go to the documentation of this file.
1 /*
2  * This file is part of the MicroPython project, http://micropython.org/
3  *
4  * The MIT License (MIT)
5  *
6  * Copyright (c) 2013, 2014 Damien P. George
7  *
8  * Permission is hereby granted, free of charge, to any person obtaining a copy
9  * of this software and associated documentation files (the "Software"), to deal
10  * in the Software without restriction, including without limitation the rights
11  * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
12  * copies of the Software, and to permit persons to whom the Software is
13  * furnished to do so, subject to the following conditions:
14  *
15  * The above copyright notice and this permission notice shall be included in
16  * all copies or substantial portions of the Software.
17  *
18  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
19  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
20  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
21  * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
22  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
23  * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
24  * THE SOFTWARE.
25  */
26 
27 #include <stdint.h>
28 #include <stdio.h>
29 #include <assert.h>
30 #include <string.h>
31 
32 #include "py/mpconfig.h"
33 
34 // wrapper around everything in this file
35 #if MICROPY_EMIT_X64
36 
37 #include "py/asmx64.h"
38 
39 /* all offsets are measured in multiples of 8 bytes */
40 #define WORD_SIZE (8)
41 
42 #define OPCODE_NOP (0x90)
43 #define OPCODE_PUSH_R64 (0x50) /* +rq */
44 #define OPCODE_PUSH_I64 (0x68)
45 #define OPCODE_PUSH_M64 (0xff) /* /6 */
46 #define OPCODE_POP_R64 (0x58) /* +rq */
47 #define OPCODE_RET (0xc3)
48 #define OPCODE_MOV_I8_TO_R8 (0xb0) /* +rb */
49 #define OPCODE_MOV_I64_TO_R64 (0xb8) /* +rq */
50 #define OPCODE_MOV_I32_TO_RM32 (0xc7)
51 #define OPCODE_MOV_R8_TO_RM8 (0x88) /* /r */
52 #define OPCODE_MOV_R64_TO_RM64 (0x89) /* /r */
53 #define OPCODE_MOV_RM64_TO_R64 (0x8b) /* /r */
54 #define OPCODE_MOVZX_RM8_TO_R64 (0xb6) /* 0x0f 0xb6/r */
55 #define OPCODE_MOVZX_RM16_TO_R64 (0xb7) /* 0x0f 0xb7/r */
56 #define OPCODE_LEA_MEM_TO_R64 (0x8d) /* /r */
57 #define OPCODE_AND_R64_TO_RM64 (0x21) /* /r */
58 #define OPCODE_OR_R64_TO_RM64 (0x09) /* /r */
59 #define OPCODE_XOR_R64_TO_RM64 (0x31) /* /r */
60 #define OPCODE_ADD_R64_TO_RM64 (0x01) /* /r */
61 #define OPCODE_ADD_I32_TO_RM32 (0x81) /* /0 */
62 #define OPCODE_ADD_I8_TO_RM32 (0x83) /* /0 */
63 #define OPCODE_SUB_R64_FROM_RM64 (0x29)
64 #define OPCODE_SUB_I32_FROM_RM64 (0x81) /* /5 */
65 #define OPCODE_SUB_I8_FROM_RM64 (0x83) /* /5 */
66 //#define OPCODE_SHL_RM32_BY_I8 (0xc1) /* /4 */
67 //#define OPCODE_SHR_RM32_BY_I8 (0xc1) /* /5 */
68 //#define OPCODE_SAR_RM32_BY_I8 (0xc1) /* /7 */
69 #define OPCODE_SHL_RM64_CL (0xd3) /* /4 */
70 #define OPCODE_SAR_RM64_CL (0xd3) /* /7 */
71 //#define OPCODE_CMP_I32_WITH_RM32 (0x81) /* /7 */
72 //#define OPCODE_CMP_I8_WITH_RM32 (0x83) /* /7 */
73 #define OPCODE_CMP_R64_WITH_RM64 (0x39) /* /r */
74 //#define OPCODE_CMP_RM32_WITH_R32 (0x3b)
75 #define OPCODE_TEST_R8_WITH_RM8 (0x84) /* /r */
76 #define OPCODE_JMP_REL8 (0xeb)
77 #define OPCODE_JMP_REL32 (0xe9)
78 #define OPCODE_JCC_REL8 (0x70) /* | jcc type */
79 #define OPCODE_JCC_REL32_A (0x0f)
80 #define OPCODE_JCC_REL32_B (0x80) /* | jcc type */
81 #define OPCODE_SETCC_RM8_A (0x0f)
82 #define OPCODE_SETCC_RM8_B (0x90) /* | jcc type, /0 */
83 #define OPCODE_CALL_REL32 (0xe8)
84 #define OPCODE_CALL_RM32 (0xff) /* /2 */
85 #define OPCODE_LEAVE (0xc9)
86 
87 #define MODRM_R64(x) (((x) & 0x7) << 3)
88 #define MODRM_RM_DISP0 (0x00)
89 #define MODRM_RM_DISP8 (0x40)
90 #define MODRM_RM_DISP32 (0x80)
91 #define MODRM_RM_REG (0xc0)
92 #define MODRM_RM_R64(x) ((x) & 0x7)
93 
94 #define OP_SIZE_PREFIX (0x66)
95 
96 #define REX_PREFIX (0x40)
97 #define REX_W (0x08) // width
98 #define REX_R (0x04) // register
99 #define REX_X (0x02) // index
100 #define REX_B (0x01) // base
101 #define REX_W_FROM_R64(r64) ((r64) >> 0 & 0x08)
102 #define REX_R_FROM_R64(r64) ((r64) >> 1 & 0x04)
103 #define REX_X_FROM_R64(r64) ((r64) >> 2 & 0x02)
104 #define REX_B_FROM_R64(r64) ((r64) >> 3 & 0x01)
105 
106 #define IMM32_L0(x) ((x) & 0xff)
107 #define IMM32_L1(x) (((x) >> 8) & 0xff)
108 #define IMM32_L2(x) (((x) >> 16) & 0xff)
109 #define IMM32_L3(x) (((x) >> 24) & 0xff)
110 #define IMM64_L4(x) (((x) >> 32) & 0xff)
111 #define IMM64_L5(x) (((x) >> 40) & 0xff)
112 #define IMM64_L6(x) (((x) >> 48) & 0xff)
113 #define IMM64_L7(x) (((x) >> 56) & 0xff)
114 
115 #define UNSIGNED_FIT8(x) (((x) & 0xffffffffffffff00) == 0)
116 #define UNSIGNED_FIT32(x) (((x) & 0xffffffff00000000) == 0)
117 #define SIGNED_FIT8(x) (((x) & 0xffffff80) == 0) || (((x) & 0xffffff80) == 0xffffff80)
118 
119 static inline byte *asm_x64_get_cur_to_write_bytes(asm_x64_t *as, int n) {
121 }
122 
123 STATIC void asm_x64_write_byte_1(asm_x64_t *as, byte b1) {
124  byte* c = asm_x64_get_cur_to_write_bytes(as, 1);
125  if (c != NULL) {
126  c[0] = b1;
127  }
128 }
129 
130 STATIC void asm_x64_write_byte_2(asm_x64_t *as, byte b1, byte b2) {
131  byte* c = asm_x64_get_cur_to_write_bytes(as, 2);
132  if (c != NULL) {
133  c[0] = b1;
134  c[1] = b2;
135  }
136 }
137 
138 STATIC void asm_x64_write_byte_3(asm_x64_t *as, byte b1, byte b2, byte b3) {
139  byte* c = asm_x64_get_cur_to_write_bytes(as, 3);
140  if (c != NULL) {
141  c[0] = b1;
142  c[1] = b2;
143  c[2] = b3;
144  }
145 }
146 
147 STATIC void asm_x64_write_word32(asm_x64_t *as, int w32) {
148  byte* c = asm_x64_get_cur_to_write_bytes(as, 4);
149  if (c != NULL) {
150  c[0] = IMM32_L0(w32);
151  c[1] = IMM32_L1(w32);
152  c[2] = IMM32_L2(w32);
153  c[3] = IMM32_L3(w32);
154  }
155 }
156 
157 STATIC void asm_x64_write_word64(asm_x64_t *as, int64_t w64) {
158  byte* c = asm_x64_get_cur_to_write_bytes(as, 8);
159  if (c != NULL) {
160  c[0] = IMM32_L0(w64);
161  c[1] = IMM32_L1(w64);
162  c[2] = IMM32_L2(w64);
163  c[3] = IMM32_L3(w64);
164  c[4] = IMM64_L4(w64);
165  c[5] = IMM64_L5(w64);
166  c[6] = IMM64_L6(w64);
167  c[7] = IMM64_L7(w64);
168  }
169 }
170 
171 /* unused
172 STATIC void asm_x64_write_word32_to(asm_x64_t *as, int offset, int w32) {
173  byte* c;
174  assert(offset + 4 <= as->code_size);
175  c = as->code_base + offset;
176  c[0] = IMM32_L0(w32);
177  c[1] = IMM32_L1(w32);
178  c[2] = IMM32_L2(w32);
179  c[3] = IMM32_L3(w32);
180 }
181 */
182 
183 STATIC void asm_x64_write_r64_disp(asm_x64_t *as, int r64, int disp_r64, int disp_offset) {
184  assert(disp_r64 != ASM_X64_REG_RSP);
185 
186  if (disp_r64 == ASM_X64_REG_R12) {
187  // special case for r12; not fully implemented
188  assert(SIGNED_FIT8(disp_offset));
189  asm_x64_write_byte_3(as, MODRM_R64(r64) | MODRM_RM_DISP8 | MODRM_RM_R64(disp_r64), 0x24, IMM32_L0(disp_offset));
190  return;
191  }
192 
193  if (disp_offset == 0 && disp_r64 != ASM_X64_REG_RBP) {
194  asm_x64_write_byte_1(as, MODRM_R64(r64) | MODRM_RM_DISP0 | MODRM_RM_R64(disp_r64));
195  } else if (SIGNED_FIT8(disp_offset)) {
196  asm_x64_write_byte_2(as, MODRM_R64(r64) | MODRM_RM_DISP8 | MODRM_RM_R64(disp_r64), IMM32_L0(disp_offset));
197  } else {
198  asm_x64_write_byte_1(as, MODRM_R64(r64) | MODRM_RM_DISP32 | MODRM_RM_R64(disp_r64));
199  asm_x64_write_word32(as, disp_offset);
200  }
201 }
202 
203 STATIC void asm_x64_generic_r64_r64(asm_x64_t *as, int dest_r64, int src_r64, int op) {
204  asm_x64_write_byte_3(as, REX_PREFIX | REX_W | REX_R_FROM_R64(src_r64) | REX_B_FROM_R64(dest_r64), op, MODRM_R64(src_r64) | MODRM_RM_REG | MODRM_RM_R64(dest_r64));
205 }
206 
207 void asm_x64_nop(asm_x64_t *as) {
208  asm_x64_write_byte_1(as, OPCODE_NOP);
209 }
210 
211 void asm_x64_push_r64(asm_x64_t *as, int src_r64) {
212  if (src_r64 < 8) {
213  asm_x64_write_byte_1(as, OPCODE_PUSH_R64 | src_r64);
214  } else {
215  asm_x64_write_byte_2(as, REX_PREFIX | REX_B, OPCODE_PUSH_R64 | (src_r64 & 7));
216  }
217 }
218 
219 /*
220 void asm_x64_push_i32(asm_x64_t *as, int src_i32) {
221  asm_x64_write_byte_1(as, OPCODE_PUSH_I64);
222  asm_x64_write_word32(as, src_i32); // will be sign extended to 64 bits
223 }
224 */
225 
226 /*
227 void asm_x64_push_disp(asm_x64_t *as, int src_r64, int src_offset) {
228  assert(src_r64 < 8);
229  asm_x64_write_byte_1(as, OPCODE_PUSH_M64);
230  asm_x64_write_r64_disp(as, 6, src_r64, src_offset);
231 }
232 */
233 
234 void asm_x64_pop_r64(asm_x64_t *as, int dest_r64) {
235  if (dest_r64 < 8) {
236  asm_x64_write_byte_1(as, OPCODE_POP_R64 | dest_r64);
237  } else {
238  asm_x64_write_byte_2(as, REX_PREFIX | REX_B, OPCODE_POP_R64 | (dest_r64 & 7));
239  }
240 }
241 
242 STATIC void asm_x64_ret(asm_x64_t *as) {
243  asm_x64_write_byte_1(as, OPCODE_RET);
244 }
245 
246 void asm_x64_mov_r64_r64(asm_x64_t *as, int dest_r64, int src_r64) {
247  asm_x64_generic_r64_r64(as, dest_r64, src_r64, OPCODE_MOV_R64_TO_RM64);
248 }
249 
250 void asm_x64_mov_r8_to_mem8(asm_x64_t *as, int src_r64, int dest_r64, int dest_disp) {
251  if (src_r64 < 8 && dest_r64 < 8) {
252  asm_x64_write_byte_1(as, OPCODE_MOV_R8_TO_RM8);
253  } else {
254  asm_x64_write_byte_2(as, REX_PREFIX | REX_R_FROM_R64(src_r64) | REX_B_FROM_R64(dest_r64), OPCODE_MOV_R8_TO_RM8);
255  }
256  asm_x64_write_r64_disp(as, src_r64, dest_r64, dest_disp);
257 }
258 
259 void asm_x64_mov_r16_to_mem16(asm_x64_t *as, int src_r64, int dest_r64, int dest_disp) {
260  if (src_r64 < 8 && dest_r64 < 8) {
261  asm_x64_write_byte_2(as, OP_SIZE_PREFIX, OPCODE_MOV_R64_TO_RM64);
262  } else {
263  asm_x64_write_byte_3(as, OP_SIZE_PREFIX, REX_PREFIX | REX_R_FROM_R64(src_r64) | REX_B_FROM_R64(dest_r64), OPCODE_MOV_R64_TO_RM64);
264  }
265  asm_x64_write_r64_disp(as, src_r64, dest_r64, dest_disp);
266 }
267 
268 void asm_x64_mov_r32_to_mem32(asm_x64_t *as, int src_r64, int dest_r64, int dest_disp) {
269  if (src_r64 < 8 && dest_r64 < 8) {
270  asm_x64_write_byte_1(as, OPCODE_MOV_R64_TO_RM64);
271  } else {
272  asm_x64_write_byte_2(as, REX_PREFIX | REX_R_FROM_R64(src_r64) | REX_B_FROM_R64(dest_r64), OPCODE_MOV_R64_TO_RM64);
273  }
274  asm_x64_write_r64_disp(as, src_r64, dest_r64, dest_disp);
275 }
276 
277 void asm_x64_mov_r64_to_mem64(asm_x64_t *as, int src_r64, int dest_r64, int dest_disp) {
278  // use REX prefix for 64 bit operation
279  asm_x64_write_byte_2(as, REX_PREFIX | REX_W | REX_R_FROM_R64(src_r64) | REX_B_FROM_R64(dest_r64), OPCODE_MOV_R64_TO_RM64);
280  asm_x64_write_r64_disp(as, src_r64, dest_r64, dest_disp);
281 }
282 
283 void asm_x64_mov_mem8_to_r64zx(asm_x64_t *as, int src_r64, int src_disp, int dest_r64) {
284  assert(src_r64 < 8);
285  if (dest_r64 < 8) {
286  asm_x64_write_byte_2(as, 0x0f, OPCODE_MOVZX_RM8_TO_R64);
287  } else {
288  asm_x64_write_byte_3(as, REX_PREFIX | REX_R, 0x0f, OPCODE_MOVZX_RM8_TO_R64);
289  }
290  asm_x64_write_r64_disp(as, dest_r64, src_r64, src_disp);
291 }
292 
293 void asm_x64_mov_mem16_to_r64zx(asm_x64_t *as, int src_r64, int src_disp, int dest_r64) {
294  assert(src_r64 < 8);
295  if (dest_r64 < 8) {
296  asm_x64_write_byte_2(as, 0x0f, OPCODE_MOVZX_RM16_TO_R64);
297  } else {
298  asm_x64_write_byte_3(as, REX_PREFIX | REX_R, 0x0f, OPCODE_MOVZX_RM16_TO_R64);
299  }
300  asm_x64_write_r64_disp(as, dest_r64, src_r64, src_disp);
301 }
302 
303 void asm_x64_mov_mem32_to_r64zx(asm_x64_t *as, int src_r64, int src_disp, int dest_r64) {
304  assert(src_r64 < 8);
305  if (dest_r64 < 8) {
306  asm_x64_write_byte_1(as, OPCODE_MOV_RM64_TO_R64);
307  } else {
308  asm_x64_write_byte_2(as, REX_PREFIX | REX_R, OPCODE_MOV_RM64_TO_R64);
309  }
310  asm_x64_write_r64_disp(as, dest_r64, src_r64, src_disp);
311 }
312 
313 void asm_x64_mov_mem64_to_r64(asm_x64_t *as, int src_r64, int src_disp, int dest_r64) {
314  // use REX prefix for 64 bit operation
315  asm_x64_write_byte_2(as, REX_PREFIX | REX_W | REX_R_FROM_R64(dest_r64) | REX_B_FROM_R64(src_r64), OPCODE_MOV_RM64_TO_R64);
316  asm_x64_write_r64_disp(as, dest_r64, src_r64, src_disp);
317 }
318 
319 STATIC void asm_x64_lea_disp_to_r64(asm_x64_t *as, int src_r64, int src_disp, int dest_r64) {
320  // use REX prefix for 64 bit operation
321  assert(src_r64 < 8);
322  assert(dest_r64 < 8);
323  asm_x64_write_byte_2(as, REX_PREFIX | REX_W, OPCODE_LEA_MEM_TO_R64);
324  asm_x64_write_r64_disp(as, dest_r64, src_r64, src_disp);
325 }
326 
327 /*
328 void asm_x64_mov_i8_to_r8(asm_x64_t *as, int src_i8, int dest_r64) {
329  assert(dest_r64 < 8);
330  asm_x64_write_byte_2(as, OPCODE_MOV_I8_TO_R8 | dest_r64, src_i8);
331 }
332 */
333 
334 STATIC void asm_x64_mov_i32_to_r64(asm_x64_t *as, int src_i32, int dest_r64) {
335  // cpu defaults to i32 to r64, with zero extension
336  if (dest_r64 < 8) {
337  asm_x64_write_byte_1(as, OPCODE_MOV_I64_TO_R64 | dest_r64);
338  } else {
339  asm_x64_write_byte_2(as, REX_PREFIX | REX_B, OPCODE_MOV_I64_TO_R64 | (dest_r64 & 7));
340  }
341  asm_x64_write_word32(as, src_i32);
342 }
343 
344 void asm_x64_mov_i64_to_r64(asm_x64_t *as, int64_t src_i64, int dest_r64) {
345  // cpu defaults to i32 to r64
346  // to mov i64 to r64 need to use REX prefix
347  asm_x64_write_byte_2(as,
348  REX_PREFIX | REX_W | (dest_r64 < 8 ? 0 : REX_B),
349  OPCODE_MOV_I64_TO_R64 | (dest_r64 & 7));
350  asm_x64_write_word64(as, src_i64);
351 }
352 
353 void asm_x64_mov_i64_to_r64_optimised(asm_x64_t *as, int64_t src_i64, int dest_r64) {
354  // TODO use movzx, movsx if possible
355  if (UNSIGNED_FIT32(src_i64)) {
356  // 5 bytes
357  asm_x64_mov_i32_to_r64(as, src_i64 & 0xffffffff, dest_r64);
358  } else {
359  // 10 bytes
360  asm_x64_mov_i64_to_r64(as, src_i64, dest_r64);
361  }
362 }
363 
364 // src_i64 is stored as a full word in the code, and aligned to machine-word boundary
365 void asm_x64_mov_i64_to_r64_aligned(asm_x64_t *as, int64_t src_i64, int dest_r64) {
366  // mov instruction uses 2 bytes for the instruction, before the i64
367  while (((as->base.code_offset + 2) & (WORD_SIZE - 1)) != 0) {
368  asm_x64_nop(as);
369  }
370  asm_x64_mov_i64_to_r64(as, src_i64, dest_r64);
371 }
372 
373 void asm_x64_and_r64_r64(asm_x64_t *as, int dest_r64, int src_r64) {
374  asm_x64_generic_r64_r64(as, dest_r64, src_r64, OPCODE_AND_R64_TO_RM64);
375 }
376 
377 void asm_x64_or_r64_r64(asm_x64_t *as, int dest_r64, int src_r64) {
378  asm_x64_generic_r64_r64(as, dest_r64, src_r64, OPCODE_OR_R64_TO_RM64);
379 }
380 
381 void asm_x64_xor_r64_r64(asm_x64_t *as, int dest_r64, int src_r64) {
382  asm_x64_generic_r64_r64(as, dest_r64, src_r64, OPCODE_XOR_R64_TO_RM64);
383 }
384 
385 void asm_x64_shl_r64_cl(asm_x64_t* as, int dest_r64) {
386  asm_x64_generic_r64_r64(as, dest_r64, 4, OPCODE_SHL_RM64_CL);
387 }
388 
389 void asm_x64_sar_r64_cl(asm_x64_t* as, int dest_r64) {
390  asm_x64_generic_r64_r64(as, dest_r64, 7, OPCODE_SAR_RM64_CL);
391 }
392 
393 void asm_x64_add_r64_r64(asm_x64_t *as, int dest_r64, int src_r64) {
394  asm_x64_generic_r64_r64(as, dest_r64, src_r64, OPCODE_ADD_R64_TO_RM64);
395 }
396 
397 void asm_x64_sub_r64_r64(asm_x64_t *as, int dest_r64, int src_r64) {
398  asm_x64_generic_r64_r64(as, dest_r64, src_r64, OPCODE_SUB_R64_FROM_RM64);
399 }
400 
401 void asm_x64_mul_r64_r64(asm_x64_t *as, int dest_r64, int src_r64) {
402  // imul reg64, reg/mem64 -- 0x0f 0xaf /r
403  asm_x64_write_byte_1(as, REX_PREFIX | REX_W | REX_R_FROM_R64(dest_r64) | REX_B_FROM_R64(src_r64));
404  asm_x64_write_byte_3(as, 0x0f, 0xaf, MODRM_R64(dest_r64) | MODRM_RM_REG | MODRM_RM_R64(src_r64));
405 }
406 
407 /*
408 void asm_x64_sub_i32_from_r32(asm_x64_t *as, int src_i32, int dest_r32) {
409  if (SIGNED_FIT8(src_i32)) {
410  // defaults to 32 bit operation
411  asm_x64_write_byte_2(as, OPCODE_SUB_I8_FROM_RM64, MODRM_R64(5) | MODRM_RM_REG | MODRM_RM_R64(dest_r32));
412  asm_x64_write_byte_1(as, src_i32 & 0xff);
413  } else {
414  // defaults to 32 bit operation
415  asm_x64_write_byte_2(as, OPCODE_SUB_I32_FROM_RM64, MODRM_R64(5) | MODRM_RM_REG | MODRM_RM_R64(dest_r32));
416  asm_x64_write_word32(as, src_i32);
417  }
418 }
419 */
420 
421 STATIC void asm_x64_sub_r64_i32(asm_x64_t *as, int dest_r64, int src_i32) {
422  assert(dest_r64 < 8);
423  if (SIGNED_FIT8(src_i32)) {
424  // use REX prefix for 64 bit operation
425  asm_x64_write_byte_3(as, REX_PREFIX | REX_W, OPCODE_SUB_I8_FROM_RM64, MODRM_R64(5) | MODRM_RM_REG | MODRM_RM_R64(dest_r64));
426  asm_x64_write_byte_1(as, src_i32 & 0xff);
427  } else {
428  // use REX prefix for 64 bit operation
429  asm_x64_write_byte_3(as, REX_PREFIX | REX_W, OPCODE_SUB_I32_FROM_RM64, MODRM_R64(5) | MODRM_RM_REG | MODRM_RM_R64(dest_r64));
430  asm_x64_write_word32(as, src_i32);
431  }
432 }
433 
434 /*
435 void asm_x64_shl_r32_by_imm(asm_x64_t *as, int r32, int imm) {
436  asm_x64_write_byte_2(as, OPCODE_SHL_RM32_BY_I8, MODRM_R64(4) | MODRM_RM_REG | MODRM_RM_R64(r32));
437  asm_x64_write_byte_1(as, imm);
438 }
439 
440 void asm_x64_shr_r32_by_imm(asm_x64_t *as, int r32, int imm) {
441  asm_x64_write_byte_2(as, OPCODE_SHR_RM32_BY_I8, MODRM_R64(5) | MODRM_RM_REG | MODRM_RM_R64(r32));
442  asm_x64_write_byte_1(as, imm);
443 }
444 
445 void asm_x64_sar_r32_by_imm(asm_x64_t *as, int r32, int imm) {
446  asm_x64_write_byte_2(as, OPCODE_SAR_RM32_BY_I8, MODRM_R64(7) | MODRM_RM_REG | MODRM_RM_R64(r32));
447  asm_x64_write_byte_1(as, imm);
448 }
449 */
450 
451 void asm_x64_cmp_r64_with_r64(asm_x64_t *as, int src_r64_a, int src_r64_b) {
452  asm_x64_generic_r64_r64(as, src_r64_b, src_r64_a, OPCODE_CMP_R64_WITH_RM64);
453 }
454 
455 /*
456 void asm_x64_cmp_i32_with_r32(asm_x64_t *as, int src_i32, int src_r32) {
457  if (SIGNED_FIT8(src_i32)) {
458  asm_x64_write_byte_2(as, OPCODE_CMP_I8_WITH_RM32, MODRM_R64(7) | MODRM_RM_REG | MODRM_RM_R64(src_r32));
459  asm_x64_write_byte_1(as, src_i32 & 0xff);
460  } else {
461  asm_x64_write_byte_2(as, OPCODE_CMP_I32_WITH_RM32, MODRM_R64(7) | MODRM_RM_REG | MODRM_RM_R64(src_r32));
462  asm_x64_write_word32(as, src_i32);
463  }
464 }
465 */
466 
467 void asm_x64_test_r8_with_r8(asm_x64_t *as, int src_r64_a, int src_r64_b) {
468  // TODO implement for other registers
469  assert(src_r64_a == ASM_X64_REG_RAX);
470  assert(src_r64_b == ASM_X64_REG_RAX);
471  asm_x64_write_byte_2(as, OPCODE_TEST_R8_WITH_RM8, MODRM_R64(src_r64_a) | MODRM_RM_REG | MODRM_RM_R64(src_r64_b));
472 }
473 
474 void asm_x64_setcc_r8(asm_x64_t *as, int jcc_type, int dest_r8) {
475  assert(dest_r8 < 8);
476  asm_x64_write_byte_3(as, OPCODE_SETCC_RM8_A, OPCODE_SETCC_RM8_B | jcc_type, MODRM_R64(0) | MODRM_RM_REG | MODRM_RM_R64(dest_r8));
477 }
478 
479 STATIC mp_uint_t get_label_dest(asm_x64_t *as, mp_uint_t label) {
480  assert(label < as->base.max_num_labels);
481  return as->base.label_offsets[label];
482 }
483 
484 void asm_x64_jmp_label(asm_x64_t *as, mp_uint_t label) {
485  mp_uint_t dest = get_label_dest(as, label);
486  mp_int_t rel = dest - as->base.code_offset;
487  if (dest != (mp_uint_t)-1 && rel < 0) {
488  // is a backwards jump, so we know the size of the jump on the first pass
489  // calculate rel assuming 8 bit relative jump
490  rel -= 2;
491  if (SIGNED_FIT8(rel)) {
492  asm_x64_write_byte_2(as, OPCODE_JMP_REL8, rel & 0xff);
493  } else {
494  rel += 2;
495  goto large_jump;
496  }
497  } else {
498  // is a forwards jump, so need to assume it's large
499  large_jump:
500  rel -= 5;
501  asm_x64_write_byte_1(as, OPCODE_JMP_REL32);
502  asm_x64_write_word32(as, rel);
503  }
504 }
505 
506 void asm_x64_jcc_label(asm_x64_t *as, int jcc_type, mp_uint_t label) {
507  mp_uint_t dest = get_label_dest(as, label);
508  mp_int_t rel = dest - as->base.code_offset;
509  if (dest != (mp_uint_t)-1 && rel < 0) {
510  // is a backwards jump, so we know the size of the jump on the first pass
511  // calculate rel assuming 8 bit relative jump
512  rel -= 2;
513  if (SIGNED_FIT8(rel)) {
514  asm_x64_write_byte_2(as, OPCODE_JCC_REL8 | jcc_type, rel & 0xff);
515  } else {
516  rel += 2;
517  goto large_jump;
518  }
519  } else {
520  // is a forwards jump, so need to assume it's large
521  large_jump:
522  rel -= 6;
523  asm_x64_write_byte_2(as, OPCODE_JCC_REL32_A, OPCODE_JCC_REL32_B | jcc_type);
524  asm_x64_write_word32(as, rel);
525  }
526 }
527 
528 void asm_x64_entry(asm_x64_t *as, int num_locals) {
531  if (num_locals < 0) {
532  num_locals = 0;
533  }
534  num_locals |= 1; // make it odd so stack is aligned on 16 byte boundary
535  asm_x64_sub_r64_i32(as, ASM_X64_REG_RSP, num_locals * WORD_SIZE);
539  as->num_locals = num_locals;
540 }
541 
542 void asm_x64_exit(asm_x64_t *as) {
546  asm_x64_write_byte_1(as, OPCODE_LEAVE);
547  asm_x64_ret(as);
548 }
549 
550 // locals:
551 // - stored on the stack in ascending order
552 // - numbered 0 through as->num_locals-1
553 // - RBP points above the last local
554 //
555 // | RBP
556 // v
557 // l0 l1 l2 ... l(n-1)
558 // ^ ^
559 // | low address | high address in RAM
560 //
561 STATIC int asm_x64_local_offset_from_ebp(asm_x64_t *as, int local_num) {
562  return (-as->num_locals + local_num) * WORD_SIZE;
563 }
564 
565 void asm_x64_mov_local_to_r64(asm_x64_t *as, int src_local_num, int dest_r64) {
566  asm_x64_mov_mem64_to_r64(as, ASM_X64_REG_RBP, asm_x64_local_offset_from_ebp(as, src_local_num), dest_r64);
567 }
568 
569 void asm_x64_mov_r64_to_local(asm_x64_t *as, int src_r64, int dest_local_num) {
570  asm_x64_mov_r64_to_mem64(as, src_r64, ASM_X64_REG_RBP, asm_x64_local_offset_from_ebp(as, dest_local_num));
571 }
572 
573 void asm_x64_mov_local_addr_to_r64(asm_x64_t *as, int local_num, int dest_r64) {
574  int offset = asm_x64_local_offset_from_ebp(as, local_num);
575  if (offset == 0) {
576  asm_x64_mov_r64_r64(as, dest_r64, ASM_X64_REG_RBP);
577  } else {
578  asm_x64_lea_disp_to_r64(as, ASM_X64_REG_RBP, offset, dest_r64);
579  }
580 }
581 
582 /*
583 void asm_x64_push_local(asm_x64_t *as, int local_num) {
584  asm_x64_push_disp(as, ASM_X64_REG_RBP, asm_x64_local_offset_from_ebp(as, local_num));
585 }
586 
587 void asm_x64_push_local_addr(asm_x64_t *as, int local_num, int temp_r64) {
588  asm_x64_mov_r64_r64(as, temp_r64, ASM_X64_REG_RBP);
589  asm_x64_add_i32_to_r32(as, asm_x64_local_offset_from_ebp(as, local_num), temp_r64);
590  asm_x64_push_r64(as, temp_r64);
591 }
592 */
593 
594 /*
595  can't use these because code might be relocated when resized
596 
597 void asm_x64_call(asm_x64_t *as, void* func) {
598  asm_x64_sub_i32_from_r32(as, 8, ASM_X64_REG_RSP);
599  asm_x64_write_byte_1(as, OPCODE_CALL_REL32);
600  asm_x64_write_word32(as, func - (void*)(as->code_cur + 4));
601  asm_x64_mov_r64_r64(as, ASM_X64_REG_RSP, ASM_X64_REG_RBP);
602 }
603 
604 void asm_x64_call_i1(asm_x64_t *as, void* func, int i1) {
605  asm_x64_sub_i32_from_r32(as, 8, ASM_X64_REG_RSP);
606  asm_x64_sub_i32_from_r32(as, 12, ASM_X64_REG_RSP);
607  asm_x64_push_i32(as, i1);
608  asm_x64_write_byte_1(as, OPCODE_CALL_REL32);
609  asm_x64_write_word32(as, func - (void*)(as->code_cur + 4));
610  asm_x64_add_i32_to_r32(as, 16, ASM_X64_REG_RSP);
611  asm_x64_mov_r64_r64(as, ASM_X64_REG_RSP, ASM_X64_REG_RBP);
612 }
613 */
614 
615 void asm_x64_call_ind(asm_x64_t *as, void *ptr, int temp_r64) {
616  assert(temp_r64 < 8);
617 #ifdef __LP64__
618  asm_x64_mov_i64_to_r64_optimised(as, (int64_t)ptr, temp_r64);
619 #else
620  // If we get here, sizeof(int) == sizeof(void*).
621  asm_x64_mov_i64_to_r64_optimised(as, (int64_t)(unsigned int)ptr, temp_r64);
622 #endif
623  asm_x64_write_byte_2(as, OPCODE_CALL_RM32, MODRM_R64(2) | MODRM_RM_REG | MODRM_RM_R64(temp_r64));
624  // this reduces code size by 2 bytes per call, but doesn't seem to speed it up at all
625  // doesn't work anymore because calls are 64 bits away
626  /*
627  asm_x64_write_byte_1(as, OPCODE_CALL_REL32);
628  asm_x64_write_word32(as, ptr - (void*)(as->code_base + as->code_offset + 4));
629  */
630 }
631 
632 #endif // MICROPY_EMIT_X64
void asm_x64_shl_r64_cl(asm_x64_t *as, int dest_r64)
void asm_x64_cmp_r64_with_r64(asm_x64_t *as, int src_r64_a, int src_r64_b)
uint8_t * mp_asm_base_get_cur_to_write_bytes(mp_asm_base_t *as, size_t num_bytes_to_write)
intptr_t mp_int_t
Definition: mpconfigport.h:73
uintptr_t mp_uint_t
Definition: mpconfigport.h:74
void asm_x64_mov_mem8_to_r64zx(asm_x64_t *as, int src_r64, int src_disp, int dest_r64)
void asm_x64_mov_r64_to_local(asm_x64_t *as, int src_r64, int dest_local_num)
#define assert(e)
Definition: assert.h:9
void asm_x64_sub_r64_r64(asm_x64_t *as, int dest_r64, int src_r64)
size_t code_offset
Definition: asmbase.h:37
#define ASM_X64_REG_RBX
Definition: asmx64.h:48
void asm_x64_call_ind(asm_x64_t *as, void *ptr, int temp_r32)
void asm_x64_mov_r32_to_mem32(asm_x64_t *as, int src_r64, int dest_r64, int dest_disp)
#define ASM_X64_REG_RSP
Definition: asmx64.h:49
size_t * label_offsets
Definition: asmbase.h:42
void asm_x64_jcc_label(asm_x64_t *as, int jcc_type, mp_uint_t label)
void asm_x64_mul_r64_r64(asm_x64_t *as, int dest_r64, int src_r64)
void asm_x64_pop_r64(asm_x64_t *as, int dest_r64)
#define ASM_X64_REG_R12
Definition: asmx64.h:57
void asm_x64_setcc_r8(asm_x64_t *as, int jcc_type, int dest_r8)
void asm_x64_nop(asm_x64_t *as)
void asm_x64_mov_i64_to_r64(asm_x64_t *as, int64_t src_i64, int dest_r64)
#define STATIC
Definition: mpconfig.h:1178
void asm_x64_mov_r64_r64(asm_x64_t *as, int dest_r64, int src_r64)
void asm_x64_add_r64_r64(asm_x64_t *as, int dest_r64, int src_r64)
void asm_x64_entry(asm_x64_t *as, int num_locals)
c(generic_all_nodes)
#define ASM_X64_REG_R13
Definition: asmx64.h:58
void asm_x64_mov_i64_to_r64_optimised(asm_x64_t *as, int64_t src_i64, int dest_r64)
#define NULL
Definition: stddef.h:4
void asm_x64_xor_r64_r64(asm_x64_t *as, int dest_r64, int src_r64)
void asm_x64_push_r64(asm_x64_t *as, int src_r64)
void asm_x64_test_r8_with_r8(asm_x64_t *as, int src_r64_a, int src_r64_b)
void asm_x64_exit(asm_x64_t *as)
void asm_x64_jmp_label(asm_x64_t *as, mp_uint_t label)
void asm_x64_mov_r16_to_mem16(asm_x64_t *as, int src_r64, int dest_r64, int dest_disp)
void asm_x64_mov_local_addr_to_r64(asm_x64_t *as, int local_num, int dest_r64)
void asm_x64_mov_mem64_to_r64(asm_x64_t *as, int src_r64, int src_disp, int dest_r64)
unsigned char byte
Definition: misc.h:37
void asm_x64_sar_r64_cl(asm_x64_t *as, int dest_r64)
void asm_x64_mov_mem32_to_r64zx(asm_x64_t *as, int src_r64, int src_disp, int dest_r64)
void asm_x64_mov_r8_to_mem8(asm_x64_t *as, int src_r64, int dest_r64, int dest_disp)
int num_locals
Definition: asmx64.h:75
#define ASM_X64_REG_RBP
Definition: asmx64.h:50
void asm_x64_mov_i64_to_r64_aligned(asm_x64_t *as, int64_t src_i64, int dest_r64)
void asm_x64_mov_mem16_to_r64zx(asm_x64_t *as, int src_r64, int src_disp, int dest_r64)
signed long long int64_t
Definition: stdint.h:12
mp_asm_base_t base
Definition: asmx64.h:74
#define ASM_X64_REG_RAX
Definition: asmx64.h:45
void asm_x64_or_r64_r64(asm_x64_t *as, int dest_r64, int src_r64)
void asm_x64_and_r64_r64(asm_x64_t *as, int dest_r64, int src_r64)
void asm_x64_mov_r64_to_mem64(asm_x64_t *as, int src_r64, int dest_r64, int dest_disp)
void asm_x64_mov_local_to_r64(asm_x64_t *as, int src_local_num, int dest_r64)