Numworks Epsilon  1.4.1
Graphing Calculator Operating System
emitnative.c
Go to the documentation of this file.
1 /*
2  * This file is part of the MicroPython project, http://micropython.org/
3  *
4  * The MIT License (MIT)
5  *
6  * Copyright (c) 2013, 2014 Damien P. George
7  *
8  * Permission is hereby granted, free of charge, to any person obtaining a copy
9  * of this software and associated documentation files (the "Software"), to deal
10  * in the Software without restriction, including without limitation the rights
11  * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
12  * copies of the Software, and to permit persons to whom the Software is
13  * furnished to do so, subject to the following conditions:
14  *
15  * The above copyright notice and this permission notice shall be included in
16  * all copies or substantial portions of the Software.
17  *
18  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
19  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
20  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
21  * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
22  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
23  * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
24  * THE SOFTWARE.
25  */
26 
27 // Essentially normal Python has 1 type: Python objects
28 // Viper has more than 1 type, and is just a more complicated (a superset of) Python.
29 // If you declare everything in Viper as a Python object (ie omit type decls) then
30 // it should in principle be exactly the same as Python native.
31 // Having types means having more opcodes, like binary_op_nat_nat, binary_op_nat_obj etc.
32 // In practice we won't have a VM but rather do this in asm which is actually very minimal.
33 
34 // Because it breaks strict Python equivalence it should be a completely separate
35 // decorator. It breaks equivalence because overflow on integers wraps around.
36 // It shouldn't break equivalence if you don't use the new types, but since the
37 // type decls might be used in normal Python for other reasons, it's probably safest,
38 // cleanest and clearest to make it a separate decorator.
39 
40 // Actually, it does break equivalence because integers default to native integers,
41 // not Python objects.
42 
43 // for x in l[0:8]: can be compiled into a native loop if l has pointer type
44 
45 #include <stdio.h>
46 #include <string.h>
47 #include <assert.h>
48 
49 #include "py/emit.h"
50 #include "py/bc.h"
51 
52 #if MICROPY_DEBUG_VERBOSE // print debugging info
53 #define DEBUG_PRINT (1)
54 #define DEBUG_printf DEBUG_printf
55 #else // don't print debugging info
56 #define DEBUG_printf(...) (void)0
57 #endif
58 
59 // wrapper around everything in this file
60 #if (MICROPY_EMIT_X64 && N_X64) \
61  || (MICROPY_EMIT_X86 && N_X86) \
62  || (MICROPY_EMIT_THUMB && N_THUMB) \
63  || (MICROPY_EMIT_ARM && N_ARM) \
64  || (MICROPY_EMIT_XTENSA && N_XTENSA) \
65 
66 // this is defined so that the assembler exports generic assembler API macros
67 #define GENERIC_ASM_API (1)
68 
69 #if N_X64
70 
71 // x64 specific stuff
72 #include "py/asmx64.h"
73 #define EXPORT_FUN(name) emit_native_x64_##name
74 
75 #elif N_X86
76 
77 // x86 specific stuff
78 
79 STATIC byte mp_f_n_args[MP_F_NUMBER_OF] = {
82  [MP_F_LOAD_NAME] = 1,
83  [MP_F_LOAD_GLOBAL] = 1,
85  [MP_F_LOAD_ATTR] = 2,
86  [MP_F_LOAD_METHOD] = 3,
88  [MP_F_STORE_NAME] = 2,
89  [MP_F_STORE_GLOBAL] = 2,
90  [MP_F_STORE_ATTR] = 3,
91  [MP_F_OBJ_SUBSCR] = 3,
92  [MP_F_OBJ_IS_TRUE] = 1,
93  [MP_F_UNARY_OP] = 2,
94  [MP_F_BINARY_OP] = 3,
95  [MP_F_BUILD_TUPLE] = 2,
96  [MP_F_BUILD_LIST] = 2,
97  [MP_F_LIST_APPEND] = 2,
98  [MP_F_BUILD_MAP] = 1,
99  [MP_F_STORE_MAP] = 3,
100 #if MICROPY_PY_BUILTINS_SET
101  [MP_F_BUILD_SET] = 2,
102  [MP_F_STORE_SET] = 2,
103 #endif
106  [MP_F_CALL_METHOD_N_KW] = 3,
108  [MP_F_NATIVE_GETITER] = 2,
109  [MP_F_NATIVE_ITERNEXT] = 1,
110  [MP_F_NLR_PUSH] = 1,
111  [MP_F_NLR_POP] = 0,
112  [MP_F_NATIVE_RAISE] = 1,
113  [MP_F_IMPORT_NAME] = 3,
114  [MP_F_IMPORT_FROM] = 2,
115  [MP_F_IMPORT_ALL] = 1,
116 #if MICROPY_PY_BUILTINS_SLICE
117  [MP_F_NEW_SLICE] = 3,
118 #endif
119  [MP_F_UNPACK_SEQUENCE] = 3,
120  [MP_F_UNPACK_EX] = 3,
121  [MP_F_DELETE_NAME] = 1,
122  [MP_F_DELETE_GLOBAL] = 1,
123  [MP_F_NEW_CELL] = 1,
125  [MP_F_SETUP_CODE_STATE] = 5,
127  [MP_F_SMALL_INT_MODULO] = 2,
128 };
129 
130 #include "py/asmx86.h"
131 #define EXPORT_FUN(name) emit_native_x86_##name
132 
133 #elif N_THUMB
134 
135 // thumb specific stuff
136 #include "py/asmthumb.h"
137 #define EXPORT_FUN(name) emit_native_thumb_##name
138 
139 #elif N_ARM
140 
141 // ARM specific stuff
142 #include "py/asmarm.h"
143 #define EXPORT_FUN(name) emit_native_arm_##name
144 
145 #elif N_XTENSA
146 
147 // Xtensa specific stuff
148 #include "py/asmxtensa.h"
149 #define EXPORT_FUN(name) emit_native_xtensa_##name
150 
151 #else
152 
153 #error unknown native emitter
154 
155 #endif
156 
157 #define EMIT_NATIVE_VIPER_TYPE_ERROR(emit, ...) do { \
158  *emit->error_slot = mp_obj_new_exception_msg_varg(&mp_type_ViperTypeError, __VA_ARGS__); \
159  } while (0)
160 
161 typedef enum {
162  STACK_VALUE,
163  STACK_REG,
164  STACK_IMM,
165 } stack_info_kind_t;
166 
167 // these enums must be distinct and the bottom 4 bits
168 // must correspond to the correct MP_NATIVE_TYPE_xxx value
169 typedef enum {
170  VTYPE_PYOBJ = 0x00 | MP_NATIVE_TYPE_OBJ,
171  VTYPE_BOOL = 0x00 | MP_NATIVE_TYPE_BOOL,
172  VTYPE_INT = 0x00 | MP_NATIVE_TYPE_INT,
173  VTYPE_UINT = 0x00 | MP_NATIVE_TYPE_UINT,
174  VTYPE_PTR = 0x00 | MP_NATIVE_TYPE_PTR,
175  VTYPE_PTR8 = 0x00 | MP_NATIVE_TYPE_PTR8,
176  VTYPE_PTR16 = 0x00 | MP_NATIVE_TYPE_PTR16,
177  VTYPE_PTR32 = 0x00 | MP_NATIVE_TYPE_PTR32,
178 
179  VTYPE_PTR_NONE = 0x50 | MP_NATIVE_TYPE_PTR,
180 
181  VTYPE_UNBOUND = 0x60 | MP_NATIVE_TYPE_OBJ,
182  VTYPE_BUILTIN_CAST = 0x70 | MP_NATIVE_TYPE_OBJ,
183 } vtype_kind_t;
184 
185 STATIC qstr vtype_to_qstr(vtype_kind_t vtype) {
186  switch (vtype) {
187  case VTYPE_PYOBJ: return MP_QSTR_object;
188  case VTYPE_BOOL: return MP_QSTR_bool;
189  case VTYPE_INT: return MP_QSTR_int;
190  case VTYPE_UINT: return MP_QSTR_uint;
191  case VTYPE_PTR: return MP_QSTR_ptr;
192  case VTYPE_PTR8: return MP_QSTR_ptr8;
193  case VTYPE_PTR16: return MP_QSTR_ptr16;
194  case VTYPE_PTR32: return MP_QSTR_ptr32;
195  case VTYPE_PTR_NONE: default: return MP_QSTR_None;
196  }
197 }
198 
199 typedef struct _stack_info_t {
200  vtype_kind_t vtype;
201  stack_info_kind_t kind;
202  union {
203  int u_reg;
204  mp_int_t u_imm;
205  } data;
206 } stack_info_t;
207 
208 struct _emit_t {
209  mp_obj_t *error_slot;
210  int pass;
211 
212  bool do_viper_types;
213 
214  vtype_kind_t return_vtype;
215 
216  mp_uint_t local_vtype_alloc;
217  vtype_kind_t *local_vtype;
218 
219  mp_uint_t stack_info_alloc;
220  stack_info_t *stack_info;
221  vtype_kind_t saved_stack_vtype;
222 
223  int prelude_offset;
224  int const_table_offset;
225  int n_state;
226  int stack_start;
227  int stack_size;
228 
229  bool last_emit_was_return_value;
230 
231  scope_t *scope;
232 
233  ASM_T *as;
234 };
235 
236 emit_t *EXPORT_FUN(new)(mp_obj_t *error_slot, mp_uint_t max_num_labels) {
237  emit_t *emit = m_new0(emit_t, 1);
238  emit->error_slot = error_slot;
239  emit->as = m_new0(ASM_T, 1);
240  mp_asm_base_init(&emit->as->base, max_num_labels);
241  return emit;
242 }
243 
244 void EXPORT_FUN(free)(emit_t *emit) {
245  mp_asm_base_deinit(&emit->as->base, false);
246  m_del_obj(ASM_T, emit->as);
247  m_del(vtype_kind_t, emit->local_vtype, emit->local_vtype_alloc);
248  m_del(stack_info_t, emit->stack_info, emit->stack_info_alloc);
249  m_del_obj(emit_t, emit);
250 }
251 
252 STATIC void emit_native_set_native_type(emit_t *emit, mp_uint_t op, mp_uint_t arg1, qstr arg2) {
253  switch (op) {
255  emit->do_viper_types = arg1;
256  break;
257 
258  default: {
259  vtype_kind_t type;
260  switch (arg2) {
261  case MP_QSTR_object: type = VTYPE_PYOBJ; break;
262  case MP_QSTR_bool: type = VTYPE_BOOL; break;
263  case MP_QSTR_int: type = VTYPE_INT; break;
264  case MP_QSTR_uint: type = VTYPE_UINT; break;
265  case MP_QSTR_ptr: type = VTYPE_PTR; break;
266  case MP_QSTR_ptr8: type = VTYPE_PTR8; break;
267  case MP_QSTR_ptr16: type = VTYPE_PTR16; break;
268  case MP_QSTR_ptr32: type = VTYPE_PTR32; break;
269  default: EMIT_NATIVE_VIPER_TYPE_ERROR(emit, "unknown type '%q'", arg2); return;
270  }
271  if (op == MP_EMIT_NATIVE_TYPE_RETURN) {
272  emit->return_vtype = type;
273  } else {
274  assert(arg1 < emit->local_vtype_alloc);
275  emit->local_vtype[arg1] = type;
276  }
277  break;
278  }
279  }
280 }
281 
282 STATIC void emit_pre_pop_reg(emit_t *emit, vtype_kind_t *vtype, int reg_dest);
283 STATIC void emit_post_push_reg(emit_t *emit, vtype_kind_t vtype, int reg);
284 STATIC void emit_native_load_fast(emit_t *emit, qstr qst, mp_uint_t local_num);
285 STATIC void emit_native_store_fast(emit_t *emit, qstr qst, mp_uint_t local_num);
286 
287 #define STATE_START (sizeof(mp_code_state_t) / sizeof(mp_uint_t))
288 
289 STATIC void emit_native_start_pass(emit_t *emit, pass_kind_t pass, scope_t *scope) {
290  DEBUG_printf("start_pass(pass=%u, scope=%p)\n", pass, scope);
291 
292  emit->pass = pass;
293  emit->stack_start = 0;
294  emit->stack_size = 0;
295  emit->last_emit_was_return_value = false;
296  emit->scope = scope;
297 
298  // allocate memory for keeping track of the types of locals
299  if (emit->local_vtype_alloc < scope->num_locals) {
300  emit->local_vtype = m_renew(vtype_kind_t, emit->local_vtype, emit->local_vtype_alloc, scope->num_locals);
301  emit->local_vtype_alloc = scope->num_locals;
302  }
303 
304  // allocate memory for keeping track of the objects on the stack
305  // XXX don't know stack size on entry, and it should be maximum over all scopes
306  // XXX this is such a big hack and really needs to be fixed
307  if (emit->stack_info == NULL) {
308  emit->stack_info_alloc = scope->stack_size + 200;
309  emit->stack_info = m_new(stack_info_t, emit->stack_info_alloc);
310  }
311 
312  // set default type for return
313  emit->return_vtype = VTYPE_PYOBJ;
314 
315  // set default type for arguments
316  mp_uint_t num_args = emit->scope->num_pos_args + emit->scope->num_kwonly_args;
317  if (scope->scope_flags & MP_SCOPE_FLAG_VARARGS) {
318  num_args += 1;
319  }
320  if (scope->scope_flags & MP_SCOPE_FLAG_VARKEYWORDS) {
321  num_args += 1;
322  }
323  for (mp_uint_t i = 0; i < num_args; i++) {
324  emit->local_vtype[i] = VTYPE_PYOBJ;
325  }
326 
327  // local variables begin unbound, and have unknown type
328  for (mp_uint_t i = num_args; i < emit->local_vtype_alloc; i++) {
329  emit->local_vtype[i] = VTYPE_UNBOUND;
330  }
331 
332  // values on stack begin unbound
333  for (mp_uint_t i = 0; i < emit->stack_info_alloc; i++) {
334  emit->stack_info[i].kind = STACK_VALUE;
335  emit->stack_info[i].vtype = VTYPE_UNBOUND;
336  }
337 
339 
340  // generate code for entry to function
341 
342  if (emit->do_viper_types) {
343 
344  // right now we have a restriction of maximum of 4 arguments
345  if (scope->num_pos_args >= 5) {
346  EMIT_NATIVE_VIPER_TYPE_ERROR(emit, "Viper functions don't currently support more than 4 arguments");
347  return;
348  }
349 
350  // entry to function
351  int num_locals = 0;
352  if (pass > MP_PASS_SCOPE) {
353  num_locals = scope->num_locals - REG_LOCAL_NUM;
354  if (num_locals < 0) {
355  num_locals = 0;
356  }
357  emit->stack_start = num_locals;
358  num_locals += scope->stack_size;
359  }
360  ASM_ENTRY(emit->as, num_locals);
361 
362  // TODO don't load r7 if we don't need it
363  #if N_THUMB
365  #elif N_ARM
367  #endif
368 
369  #if N_X86
370  for (int i = 0; i < scope->num_pos_args; i++) {
371  if (i == 0) {
372  asm_x86_mov_arg_to_r32(emit->as, i, REG_LOCAL_1);
373  } else if (i == 1) {
374  asm_x86_mov_arg_to_r32(emit->as, i, REG_LOCAL_2);
375  } else if (i == 2) {
376  asm_x86_mov_arg_to_r32(emit->as, i, REG_LOCAL_3);
377  } else {
378  asm_x86_mov_arg_to_r32(emit->as, i, REG_TEMP0);
379  asm_x86_mov_r32_to_local(emit->as, REG_TEMP0, i - REG_LOCAL_NUM);
380  }
381  }
382  #else
383  for (int i = 0; i < scope->num_pos_args; i++) {
384  if (i == 0) {
385  ASM_MOV_REG_REG(emit->as, REG_LOCAL_1, REG_ARG_1);
386  } else if (i == 1) {
387  ASM_MOV_REG_REG(emit->as, REG_LOCAL_2, REG_ARG_2);
388  } else if (i == 2) {
389  ASM_MOV_REG_REG(emit->as, REG_LOCAL_3, REG_ARG_3);
390  } else {
391  assert(i == 3); // should be true; max 4 args is checked above
392  ASM_MOV_REG_TO_LOCAL(emit->as, REG_ARG_4, i - REG_LOCAL_NUM);
393  }
394  }
395  #endif
396 
397  } else {
398  // work out size of state (locals plus stack)
399  emit->n_state = scope->num_locals + scope->stack_size;
400 
401  // allocate space on C-stack for code_state structure, which includes state
402  ASM_ENTRY(emit->as, STATE_START + emit->n_state);
403 
404  // TODO don't load r7 if we don't need it
405  #if N_THUMB
407  #elif N_ARM
409  #endif
410 
411  // prepare incoming arguments for call to mp_setup_code_state
412 
413  #if N_X86
414  asm_x86_mov_arg_to_r32(emit->as, 0, REG_ARG_1);
415  asm_x86_mov_arg_to_r32(emit->as, 1, REG_ARG_2);
416  asm_x86_mov_arg_to_r32(emit->as, 2, REG_ARG_3);
417  asm_x86_mov_arg_to_r32(emit->as, 3, REG_ARG_4);
418  #endif
419 
420  // set code_state.fun_bc
421  ASM_MOV_REG_TO_LOCAL(emit->as, REG_ARG_1, offsetof(mp_code_state_t, fun_bc) / sizeof(uintptr_t));
422 
423  // set code_state.ip (offset from start of this function to prelude info)
424  // XXX this encoding may change size
425  ASM_MOV_IMM_TO_LOCAL_USING(emit->as, emit->prelude_offset, offsetof(mp_code_state_t, ip) / sizeof(uintptr_t), REG_ARG_1);
426 
427  // put address of code_state into first arg
428  ASM_MOV_LOCAL_ADDR_TO_REG(emit->as, 0, REG_ARG_1);
429 
430  // call mp_setup_code_state to prepare code_state structure
431  #if N_THUMB
433  #elif N_ARM
435  #else
436  ASM_CALL_IND(emit->as, mp_fun_table[MP_F_SETUP_CODE_STATE], MP_F_SETUP_CODE_STATE);
437  #endif
438 
439  // cache some locals in registers
440  if (scope->num_locals > 0) {
441  ASM_MOV_LOCAL_TO_REG(emit->as, STATE_START + emit->n_state - 1 - 0, REG_LOCAL_1);
442  if (scope->num_locals > 1) {
443  ASM_MOV_LOCAL_TO_REG(emit->as, STATE_START + emit->n_state - 1 - 1, REG_LOCAL_2);
444  if (scope->num_locals > 2) {
445  ASM_MOV_LOCAL_TO_REG(emit->as, STATE_START + emit->n_state - 1 - 2, REG_LOCAL_3);
446  }
447  }
448  }
449 
450  // set the type of closed over variables
451  for (mp_uint_t i = 0; i < scope->id_info_len; i++) {
452  id_info_t *id = &scope->id_info[i];
453  if (id->kind == ID_INFO_KIND_CELL) {
454  emit->local_vtype[id->local_num] = VTYPE_PYOBJ;
455  }
456  }
457  }
458 
459 }
460 
461 STATIC void emit_native_end_pass(emit_t *emit) {
462  if (!emit->last_emit_was_return_value) {
463  ASM_EXIT(emit->as);
464  }
465 
466  if (!emit->do_viper_types) {
467  emit->prelude_offset = mp_asm_base_get_code_pos(&emit->as->base);
468  mp_asm_base_data(&emit->as->base, 1, 0x80 | ((emit->n_state >> 7) & 0x7f));
469  mp_asm_base_data(&emit->as->base, 1, emit->n_state & 0x7f);
470  mp_asm_base_data(&emit->as->base, 1, 0); // n_exc_stack
471  mp_asm_base_data(&emit->as->base, 1, emit->scope->scope_flags);
472  mp_asm_base_data(&emit->as->base, 1, emit->scope->num_pos_args);
473  mp_asm_base_data(&emit->as->base, 1, emit->scope->num_kwonly_args);
474  mp_asm_base_data(&emit->as->base, 1, emit->scope->num_def_pos_args);
475 
476  // write code info
477  #if MICROPY_PERSISTENT_CODE
478  mp_asm_base_data(&emit->as->base, 1, 5);
479  mp_asm_base_data(&emit->as->base, 1, emit->scope->simple_name);
480  mp_asm_base_data(&emit->as->base, 1, emit->scope->simple_name >> 8);
481  mp_asm_base_data(&emit->as->base, 1, emit->scope->source_file);
482  mp_asm_base_data(&emit->as->base, 1, emit->scope->source_file >> 8);
483  #else
484  mp_asm_base_data(&emit->as->base, 1, 1);
485  #endif
486 
487  // bytecode prelude: initialise closed over variables
488  for (int i = 0; i < emit->scope->id_info_len; i++) {
489  id_info_t *id = &emit->scope->id_info[i];
490  if (id->kind == ID_INFO_KIND_CELL) {
491  assert(id->local_num < 255);
492  mp_asm_base_data(&emit->as->base, 1, id->local_num); // write the local which should be converted to a cell
493  }
494  }
495  mp_asm_base_data(&emit->as->base, 1, 255); // end of list sentinel
496 
497  mp_asm_base_align(&emit->as->base, ASM_WORD_SIZE);
498  emit->const_table_offset = mp_asm_base_get_code_pos(&emit->as->base);
499 
500  // write argument names as qstr objects
501  // see comment in corresponding part of emitbc.c about the logic here
502  for (int i = 0; i < emit->scope->num_pos_args + emit->scope->num_kwonly_args; i++) {
503  qstr qst = MP_QSTR__star_;
504  for (int j = 0; j < emit->scope->id_info_len; ++j) {
505  id_info_t *id = &emit->scope->id_info[j];
506  if ((id->flags & ID_FLAG_IS_PARAM) && id->local_num == i) {
507  qst = id->qst;
508  break;
509  }
510  }
511  mp_asm_base_data(&emit->as->base, ASM_WORD_SIZE, (mp_uint_t)MP_OBJ_NEW_QSTR(qst));
512  }
513 
514  }
515 
516  ASM_END_PASS(emit->as);
517 
518  // check stack is back to zero size
519  assert(emit->stack_size == 0);
520 
521  if (emit->pass == MP_PASS_EMIT) {
522  void *f = mp_asm_base_get_code(&emit->as->base);
523  mp_uint_t f_len = mp_asm_base_get_code_size(&emit->as->base);
524 
525  // compute type signature
526  // note that the lower 4 bits of a vtype are tho correct MP_NATIVE_TYPE_xxx
527  mp_uint_t type_sig = emit->return_vtype & 0xf;
528  for (mp_uint_t i = 0; i < emit->scope->num_pos_args; i++) {
529  type_sig |= (emit->local_vtype[i] & 0xf) << (i * 4 + 4);
530  }
531 
532  mp_emit_glue_assign_native(emit->scope->raw_code,
533  emit->do_viper_types ? MP_CODE_NATIVE_VIPER : MP_CODE_NATIVE_PY,
534  f, f_len, (mp_uint_t*)((byte*)f + emit->const_table_offset),
535  emit->scope->num_pos_args, emit->scope->scope_flags, type_sig);
536  }
537 }
538 
539 STATIC bool emit_native_last_emit_was_return_value(emit_t *emit) {
540  return emit->last_emit_was_return_value;
541 }
542 
543 STATIC void adjust_stack(emit_t *emit, mp_int_t stack_size_delta) {
544  assert((mp_int_t)emit->stack_size + stack_size_delta >= 0);
545  emit->stack_size += stack_size_delta;
546  if (emit->pass > MP_PASS_SCOPE && emit->stack_size > emit->scope->stack_size) {
547  emit->scope->stack_size = emit->stack_size;
548  }
549 #ifdef DEBUG_PRINT
550  DEBUG_printf(" adjust_stack; stack_size=%d+%d; stack now:", emit->stack_size - stack_size_delta, stack_size_delta);
551  for (int i = 0; i < emit->stack_size; i++) {
552  stack_info_t *si = &emit->stack_info[i];
553  DEBUG_printf(" (v=%d k=%d %d)", si->vtype, si->kind, si->data.u_reg);
554  }
555  DEBUG_printf("\n");
556 #endif
557 }
558 
559 STATIC void emit_native_adjust_stack_size(emit_t *emit, mp_int_t delta) {
560  DEBUG_printf("adjust_stack_size(" INT_FMT ")\n", delta);
561  // If we are adjusting the stack in a positive direction (pushing) then we
562  // need to fill in values for the stack kind and vtype of the newly-pushed
563  // entries. These should be set to "value" (ie not reg or imm) because we
564  // should only need to adjust the stack due to a jump to this part in the
565  // code (and hence we have settled the stack before the jump).
566  for (mp_int_t i = 0; i < delta; i++) {
567  stack_info_t *si = &emit->stack_info[emit->stack_size + i];
568  si->kind = STACK_VALUE;
569  // TODO we don't know the vtype to use here. At the moment this is a
570  // hack to get the case of multi comparison working.
571  if (delta == 1) {
572  si->vtype = emit->saved_stack_vtype;
573  } else {
574  si->vtype = VTYPE_PYOBJ;
575  }
576  }
577  adjust_stack(emit, delta);
578 }
579 
580 STATIC void emit_native_set_source_line(emit_t *emit, mp_uint_t source_line) {
581  (void)emit;
582  (void)source_line;
583 }
584 
585 // this must be called at start of emit functions
586 STATIC void emit_native_pre(emit_t *emit) {
587  emit->last_emit_was_return_value = false;
588 }
589 
590 // depth==0 is top, depth==1 is before top, etc
591 STATIC stack_info_t *peek_stack(emit_t *emit, mp_uint_t depth) {
592  return &emit->stack_info[emit->stack_size - 1 - depth];
593 }
594 
595 // depth==0 is top, depth==1 is before top, etc
596 STATIC vtype_kind_t peek_vtype(emit_t *emit, mp_uint_t depth) {
597  return peek_stack(emit, depth)->vtype;
598 }
599 
600 // pos=1 is TOS, pos=2 is next, etc
601 // use pos=0 for no skipping
602 STATIC void need_reg_single(emit_t *emit, int reg_needed, int skip_stack_pos) {
603  skip_stack_pos = emit->stack_size - skip_stack_pos;
604  for (int i = 0; i < emit->stack_size; i++) {
605  if (i != skip_stack_pos) {
606  stack_info_t *si = &emit->stack_info[i];
607  if (si->kind == STACK_REG && si->data.u_reg == reg_needed) {
608  si->kind = STACK_VALUE;
609  ASM_MOV_REG_TO_LOCAL(emit->as, si->data.u_reg, emit->stack_start + i);
610  }
611  }
612  }
613 }
614 
615 STATIC void need_reg_all(emit_t *emit) {
616  for (int i = 0; i < emit->stack_size; i++) {
617  stack_info_t *si = &emit->stack_info[i];
618  if (si->kind == STACK_REG) {
619  si->kind = STACK_VALUE;
620  ASM_MOV_REG_TO_LOCAL(emit->as, si->data.u_reg, emit->stack_start + i);
621  }
622  }
623 }
624 
625 STATIC void need_stack_settled(emit_t *emit) {
626  DEBUG_printf(" need_stack_settled; stack_size=%d\n", emit->stack_size);
627  for (int i = 0; i < emit->stack_size; i++) {
628  stack_info_t *si = &emit->stack_info[i];
629  if (si->kind == STACK_REG) {
630  DEBUG_printf(" reg(%u) to local(%u)\n", si->data.u_reg, emit->stack_start + i);
631  si->kind = STACK_VALUE;
632  ASM_MOV_REG_TO_LOCAL(emit->as, si->data.u_reg, emit->stack_start + i);
633  }
634  }
635  for (int i = 0; i < emit->stack_size; i++) {
636  stack_info_t *si = &emit->stack_info[i];
637  if (si->kind == STACK_IMM) {
638  DEBUG_printf(" imm(" INT_FMT ") to local(%u)\n", si->data.u_imm, emit->stack_start + i);
639  si->kind = STACK_VALUE;
640  ASM_MOV_IMM_TO_LOCAL_USING(emit->as, si->data.u_imm, emit->stack_start + i, REG_TEMP0);
641  }
642  }
643 }
644 
645 // pos=1 is TOS, pos=2 is next, etc
646 STATIC void emit_access_stack(emit_t *emit, int pos, vtype_kind_t *vtype, int reg_dest) {
647  need_reg_single(emit, reg_dest, pos);
648  stack_info_t *si = &emit->stack_info[emit->stack_size - pos];
649  *vtype = si->vtype;
650  switch (si->kind) {
651  case STACK_VALUE:
652  ASM_MOV_LOCAL_TO_REG(emit->as, emit->stack_start + emit->stack_size - pos, reg_dest);
653  break;
654 
655  case STACK_REG:
656  if (si->data.u_reg != reg_dest) {
657  ASM_MOV_REG_REG(emit->as, reg_dest, si->data.u_reg);
658  }
659  break;
660 
661  case STACK_IMM:
662  ASM_MOV_IMM_TO_REG(emit->as, si->data.u_imm, reg_dest);
663  break;
664  }
665 }
666 
667 // does an efficient X=pop(); discard(); push(X)
668 // needs a (non-temp) register in case the poped element was stored in the stack
669 STATIC void emit_fold_stack_top(emit_t *emit, int reg_dest) {
670  stack_info_t *si = &emit->stack_info[emit->stack_size - 2];
671  si[0] = si[1];
672  if (si->kind == STACK_VALUE) {
673  // if folded element was on the stack we need to put it in a register
674  ASM_MOV_LOCAL_TO_REG(emit->as, emit->stack_start + emit->stack_size - 1, reg_dest);
675  si->kind = STACK_REG;
676  si->data.u_reg = reg_dest;
677  }
678  adjust_stack(emit, -1);
679 }
680 
681 // If stacked value is in a register and the register is not r1 or r2, then
682 // *reg_dest is set to that register. Otherwise the value is put in *reg_dest.
683 STATIC void emit_pre_pop_reg_flexible(emit_t *emit, vtype_kind_t *vtype, int *reg_dest, int not_r1, int not_r2) {
684  emit->last_emit_was_return_value = false;
685  stack_info_t *si = peek_stack(emit, 0);
686  if (si->kind == STACK_REG && si->data.u_reg != not_r1 && si->data.u_reg != not_r2) {
687  *vtype = si->vtype;
688  *reg_dest = si->data.u_reg;
689  need_reg_single(emit, *reg_dest, 1);
690  } else {
691  emit_access_stack(emit, 1, vtype, *reg_dest);
692  }
693  adjust_stack(emit, -1);
694 }
695 
696 STATIC void emit_pre_pop_discard(emit_t *emit) {
697  emit->last_emit_was_return_value = false;
698  adjust_stack(emit, -1);
699 }
700 
701 STATIC void emit_pre_pop_reg(emit_t *emit, vtype_kind_t *vtype, int reg_dest) {
702  emit->last_emit_was_return_value = false;
703  emit_access_stack(emit, 1, vtype, reg_dest);
704  adjust_stack(emit, -1);
705 }
706 
707 STATIC void emit_pre_pop_reg_reg(emit_t *emit, vtype_kind_t *vtypea, int rega, vtype_kind_t *vtypeb, int regb) {
708  emit_pre_pop_reg(emit, vtypea, rega);
709  emit_pre_pop_reg(emit, vtypeb, regb);
710 }
711 
712 STATIC void emit_pre_pop_reg_reg_reg(emit_t *emit, vtype_kind_t *vtypea, int rega, vtype_kind_t *vtypeb, int regb, vtype_kind_t *vtypec, int regc) {
713  emit_pre_pop_reg(emit, vtypea, rega);
714  emit_pre_pop_reg(emit, vtypeb, regb);
715  emit_pre_pop_reg(emit, vtypec, regc);
716 }
717 
718 STATIC void emit_post(emit_t *emit) {
719  (void)emit;
720 }
721 
722 STATIC void emit_post_top_set_vtype(emit_t *emit, vtype_kind_t new_vtype) {
723  stack_info_t *si = &emit->stack_info[emit->stack_size - 1];
724  si->vtype = new_vtype;
725 }
726 
727 STATIC void emit_post_push_reg(emit_t *emit, vtype_kind_t vtype, int reg) {
728  stack_info_t *si = &emit->stack_info[emit->stack_size];
729  si->vtype = vtype;
730  si->kind = STACK_REG;
731  si->data.u_reg = reg;
732  adjust_stack(emit, 1);
733 }
734 
735 STATIC void emit_post_push_imm(emit_t *emit, vtype_kind_t vtype, mp_int_t imm) {
736  stack_info_t *si = &emit->stack_info[emit->stack_size];
737  si->vtype = vtype;
738  si->kind = STACK_IMM;
739  si->data.u_imm = imm;
740  adjust_stack(emit, 1);
741 }
742 
743 STATIC void emit_post_push_reg_reg(emit_t *emit, vtype_kind_t vtypea, int rega, vtype_kind_t vtypeb, int regb) {
744  emit_post_push_reg(emit, vtypea, rega);
745  emit_post_push_reg(emit, vtypeb, regb);
746 }
747 
748 STATIC void emit_post_push_reg_reg_reg(emit_t *emit, vtype_kind_t vtypea, int rega, vtype_kind_t vtypeb, int regb, vtype_kind_t vtypec, int regc) {
749  emit_post_push_reg(emit, vtypea, rega);
750  emit_post_push_reg(emit, vtypeb, regb);
751  emit_post_push_reg(emit, vtypec, regc);
752 }
753 
754 STATIC void emit_post_push_reg_reg_reg_reg(emit_t *emit, vtype_kind_t vtypea, int rega, vtype_kind_t vtypeb, int regb, vtype_kind_t vtypec, int regc, vtype_kind_t vtyped, int regd) {
755  emit_post_push_reg(emit, vtypea, rega);
756  emit_post_push_reg(emit, vtypeb, regb);
757  emit_post_push_reg(emit, vtypec, regc);
758  emit_post_push_reg(emit, vtyped, regd);
759 }
760 
761 STATIC void emit_call(emit_t *emit, mp_fun_kind_t fun_kind) {
762  need_reg_all(emit);
763  ASM_CALL_IND(emit->as, mp_fun_table[fun_kind], fun_kind);
764 }
765 
766 STATIC void emit_call_with_imm_arg(emit_t *emit, mp_fun_kind_t fun_kind, mp_int_t arg_val, int arg_reg) {
767  need_reg_all(emit);
768  ASM_MOV_IMM_TO_REG(emit->as, arg_val, arg_reg);
769  ASM_CALL_IND(emit->as, mp_fun_table[fun_kind], fun_kind);
770 }
771 
772 // the first arg is stored in the code aligned on a mp_uint_t boundary
773 STATIC void emit_call_with_imm_arg_aligned(emit_t *emit, mp_fun_kind_t fun_kind, mp_int_t arg_val, int arg_reg) {
774  need_reg_all(emit);
775  ASM_MOV_ALIGNED_IMM_TO_REG(emit->as, arg_val, arg_reg);
776  ASM_CALL_IND(emit->as, mp_fun_table[fun_kind], fun_kind);
777 }
778 
779 STATIC void emit_call_with_2_imm_args(emit_t *emit, mp_fun_kind_t fun_kind, mp_int_t arg_val1, int arg_reg1, mp_int_t arg_val2, int arg_reg2) {
780  need_reg_all(emit);
781  ASM_MOV_IMM_TO_REG(emit->as, arg_val1, arg_reg1);
782  ASM_MOV_IMM_TO_REG(emit->as, arg_val2, arg_reg2);
783  ASM_CALL_IND(emit->as, mp_fun_table[fun_kind], fun_kind);
784 }
785 
786 // the first arg is stored in the code aligned on a mp_uint_t boundary
787 STATIC void emit_call_with_3_imm_args_and_first_aligned(emit_t *emit, mp_fun_kind_t fun_kind, mp_int_t arg_val1, int arg_reg1, mp_int_t arg_val2, int arg_reg2, mp_int_t arg_val3, int arg_reg3) {
788  need_reg_all(emit);
789  ASM_MOV_ALIGNED_IMM_TO_REG(emit->as, arg_val1, arg_reg1);
790  ASM_MOV_IMM_TO_REG(emit->as, arg_val2, arg_reg2);
791  ASM_MOV_IMM_TO_REG(emit->as, arg_val3, arg_reg3);
792  ASM_CALL_IND(emit->as, mp_fun_table[fun_kind], fun_kind);
793 }
794 
795 // vtype of all n_pop objects is VTYPE_PYOBJ
796 // Will convert any items that are not VTYPE_PYOBJ to this type and put them back on the stack.
797 // If any conversions of non-immediate values are needed, then it uses REG_ARG_1, REG_ARG_2 and REG_RET.
798 // Otherwise, it does not use any temporary registers (but may use reg_dest before loading it with stack pointer).
799 STATIC void emit_get_stack_pointer_to_reg_for_pop(emit_t *emit, mp_uint_t reg_dest, mp_uint_t n_pop) {
800  need_reg_all(emit);
801 
802  // First, store any immediate values to their respective place on the stack.
803  for (mp_uint_t i = 0; i < n_pop; i++) {
804  stack_info_t *si = &emit->stack_info[emit->stack_size - 1 - i];
805  // must push any imm's to stack
806  // must convert them to VTYPE_PYOBJ for viper code
807  if (si->kind == STACK_IMM) {
808  si->kind = STACK_VALUE;
809  switch (si->vtype) {
810  case VTYPE_PYOBJ:
811  ASM_MOV_IMM_TO_LOCAL_USING(emit->as, si->data.u_imm, emit->stack_start + emit->stack_size - 1 - i, reg_dest);
812  break;
813  case VTYPE_BOOL:
814  if (si->data.u_imm == 0) {
815  ASM_MOV_IMM_TO_LOCAL_USING(emit->as, (mp_uint_t)mp_const_false, emit->stack_start + emit->stack_size - 1 - i, reg_dest);
816  } else {
817  ASM_MOV_IMM_TO_LOCAL_USING(emit->as, (mp_uint_t)mp_const_true, emit->stack_start + emit->stack_size - 1 - i, reg_dest);
818  }
819  si->vtype = VTYPE_PYOBJ;
820  break;
821  case VTYPE_INT:
822  case VTYPE_UINT:
823  ASM_MOV_IMM_TO_LOCAL_USING(emit->as, (uintptr_t)MP_OBJ_NEW_SMALL_INT(si->data.u_imm), emit->stack_start + emit->stack_size - 1 - i, reg_dest);
824  si->vtype = VTYPE_PYOBJ;
825  break;
826  default:
827  // not handled
828  mp_raise_NotImplementedError("conversion to object");
829  }
830  }
831 
832  // verify that this value is on the stack
833  assert(si->kind == STACK_VALUE);
834  }
835 
836  // Second, convert any non-VTYPE_PYOBJ to that type.
837  for (mp_uint_t i = 0; i < n_pop; i++) {
838  stack_info_t *si = &emit->stack_info[emit->stack_size - 1 - i];
839  if (si->vtype != VTYPE_PYOBJ) {
840  mp_uint_t local_num = emit->stack_start + emit->stack_size - 1 - i;
841  ASM_MOV_LOCAL_TO_REG(emit->as, local_num, REG_ARG_1);
842  emit_call_with_imm_arg(emit, MP_F_CONVERT_NATIVE_TO_OBJ, si->vtype, REG_ARG_2); // arg2 = type
843  ASM_MOV_REG_TO_LOCAL(emit->as, REG_RET, local_num);
844  si->vtype = VTYPE_PYOBJ;
845  DEBUG_printf(" convert_native_to_obj(local_num=" UINT_FMT ")\n", local_num);
846  }
847  }
848 
849  // Adujust the stack for a pop of n_pop items, and load the stack pointer into reg_dest.
850  adjust_stack(emit, -n_pop);
851  ASM_MOV_LOCAL_ADDR_TO_REG(emit->as, emit->stack_start + emit->stack_size, reg_dest);
852 }
853 
854 // vtype of all n_push objects is VTYPE_PYOBJ
855 STATIC void emit_get_stack_pointer_to_reg_for_push(emit_t *emit, mp_uint_t reg_dest, mp_uint_t n_push) {
856  need_reg_all(emit);
857  for (mp_uint_t i = 0; i < n_push; i++) {
858  emit->stack_info[emit->stack_size + i].kind = STACK_VALUE;
859  emit->stack_info[emit->stack_size + i].vtype = VTYPE_PYOBJ;
860  }
861  ASM_MOV_LOCAL_ADDR_TO_REG(emit->as, emit->stack_start + emit->stack_size, reg_dest);
862  adjust_stack(emit, n_push);
863 }
864 
865 STATIC void emit_native_label_assign(emit_t *emit, mp_uint_t l) {
866  DEBUG_printf("label_assign(" UINT_FMT ")\n", l);
867  emit_native_pre(emit);
868  // need to commit stack because we can jump here from elsewhere
869  need_stack_settled(emit);
870  mp_asm_base_label_assign(&emit->as->base, l);
871  emit_post(emit);
872 }
873 
874 STATIC void emit_native_import_name(emit_t *emit, qstr qst) {
875  DEBUG_printf("import_name %s\n", qstr_str(qst));
876 
877  // get arguments from stack: arg2 = fromlist, arg3 = level
878  // if using viper types these arguments must be converted to proper objects
879  if (emit->do_viper_types) {
880  // fromlist should be None or a tuple
881  stack_info_t *top = peek_stack(emit, 0);
882  if (top->vtype == VTYPE_PTR_NONE) {
883  emit_pre_pop_discard(emit);
884  ASM_MOV_IMM_TO_REG(emit->as, (mp_uint_t)mp_const_none, REG_ARG_2);
885  } else {
886  vtype_kind_t vtype_fromlist;
887  emit_pre_pop_reg(emit, &vtype_fromlist, REG_ARG_2);
888  assert(vtype_fromlist == VTYPE_PYOBJ);
889  }
890 
891  // level argument should be an immediate integer
892  top = peek_stack(emit, 0);
893  assert(top->vtype == VTYPE_INT && top->kind == STACK_IMM);
894  ASM_MOV_IMM_TO_REG(emit->as, (mp_uint_t)MP_OBJ_NEW_SMALL_INT(top->data.u_imm), REG_ARG_3);
895  emit_pre_pop_discard(emit);
896 
897  } else {
898  vtype_kind_t vtype_fromlist;
899  vtype_kind_t vtype_level;
900  emit_pre_pop_reg_reg(emit, &vtype_fromlist, REG_ARG_2, &vtype_level, REG_ARG_3);
901  assert(vtype_fromlist == VTYPE_PYOBJ);
902  assert(vtype_level == VTYPE_PYOBJ);
903  }
904 
905  emit_call_with_imm_arg(emit, MP_F_IMPORT_NAME, qst, REG_ARG_1); // arg1 = import name
906  emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
907 }
908 
909 STATIC void emit_native_import_from(emit_t *emit, qstr qst) {
910  DEBUG_printf("import_from %s\n", qstr_str(qst));
911  emit_native_pre(emit);
912  vtype_kind_t vtype_module;
913  emit_access_stack(emit, 1, &vtype_module, REG_ARG_1); // arg1 = module
914  assert(vtype_module == VTYPE_PYOBJ);
915  emit_call_with_imm_arg(emit, MP_F_IMPORT_FROM, qst, REG_ARG_2); // arg2 = import name
916  emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
917 }
918 
919 STATIC void emit_native_import_star(emit_t *emit) {
920  DEBUG_printf("import_star\n");
921  vtype_kind_t vtype_module;
922  emit_pre_pop_reg(emit, &vtype_module, REG_ARG_1); // arg1 = module
923  assert(vtype_module == VTYPE_PYOBJ);
924  emit_call(emit, MP_F_IMPORT_ALL);
925  emit_post(emit);
926 }
927 
928 STATIC void emit_native_load_const_tok(emit_t *emit, mp_token_kind_t tok) {
929  DEBUG_printf("load_const_tok(tok=%u)\n", tok);
930  emit_native_pre(emit);
931  vtype_kind_t vtype;
932  mp_uint_t val;
933  if (emit->do_viper_types) {
934  switch (tok) {
935  case MP_TOKEN_KW_NONE: vtype = VTYPE_PTR_NONE; val = 0; break;
936  case MP_TOKEN_KW_FALSE: vtype = VTYPE_BOOL; val = 0; break;
937  case MP_TOKEN_KW_TRUE: vtype = VTYPE_BOOL; val = 1; break;
938  default:
940  vtype = VTYPE_PYOBJ; val = (mp_uint_t)&mp_const_ellipsis_obj; break;
941  }
942  } else {
943  vtype = VTYPE_PYOBJ;
944  switch (tok) {
945  case MP_TOKEN_KW_NONE: val = (mp_uint_t)mp_const_none; break;
946  case MP_TOKEN_KW_FALSE: val = (mp_uint_t)mp_const_false; break;
947  case MP_TOKEN_KW_TRUE: val = (mp_uint_t)mp_const_true; break;
948  default:
950  val = (mp_uint_t)&mp_const_ellipsis_obj; break;
951  }
952  }
953  emit_post_push_imm(emit, vtype, val);
954 }
955 
956 STATIC void emit_native_load_const_small_int(emit_t *emit, mp_int_t arg) {
957  DEBUG_printf("load_const_small_int(int=" INT_FMT ")\n", arg);
958  emit_native_pre(emit);
959  if (emit->do_viper_types) {
960  emit_post_push_imm(emit, VTYPE_INT, arg);
961  } else {
962  emit_post_push_imm(emit, VTYPE_PYOBJ, (mp_uint_t)MP_OBJ_NEW_SMALL_INT(arg));
963  }
964 }
965 
966 STATIC void emit_native_load_const_str(emit_t *emit, qstr qst) {
967  emit_native_pre(emit);
968  // TODO: Eventually we want to be able to work with raw pointers in viper to
969  // do native array access. For now we just load them as any other object.
970  /*
971  if (emit->do_viper_types) {
972  // load a pointer to the asciiz string?
973  emit_post_push_imm(emit, VTYPE_PTR, (mp_uint_t)qstr_str(qst));
974  } else
975  */
976  {
977  emit_post_push_imm(emit, VTYPE_PYOBJ, (mp_uint_t)MP_OBJ_NEW_QSTR(qst));
978  }
979 }
980 
981 STATIC void emit_native_load_const_obj(emit_t *emit, mp_obj_t obj) {
982  emit_native_pre(emit);
983  need_reg_single(emit, REG_RET, 0);
984  ASM_MOV_ALIGNED_IMM_TO_REG(emit->as, (mp_uint_t)obj, REG_RET);
985  emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
986 }
987 
988 STATIC void emit_native_load_null(emit_t *emit) {
989  emit_native_pre(emit);
990  emit_post_push_imm(emit, VTYPE_PYOBJ, 0);
991 }
992 
993 STATIC void emit_native_load_fast(emit_t *emit, qstr qst, mp_uint_t local_num) {
994  DEBUG_printf("load_fast(%s, " UINT_FMT ")\n", qstr_str(qst), local_num);
995  vtype_kind_t vtype = emit->local_vtype[local_num];
996  if (vtype == VTYPE_UNBOUND) {
997  EMIT_NATIVE_VIPER_TYPE_ERROR(emit, "local '%q' used before type known", qst);
998  }
999  emit_native_pre(emit);
1000  if (local_num == 0) {
1001  emit_post_push_reg(emit, vtype, REG_LOCAL_1);
1002  } else if (local_num == 1) {
1003  emit_post_push_reg(emit, vtype, REG_LOCAL_2);
1004  } else if (local_num == 2) {
1005  emit_post_push_reg(emit, vtype, REG_LOCAL_3);
1006  } else {
1007  need_reg_single(emit, REG_TEMP0, 0);
1008  if (emit->do_viper_types) {
1009  ASM_MOV_LOCAL_TO_REG(emit->as, local_num - REG_LOCAL_NUM, REG_TEMP0);
1010  } else {
1011  ASM_MOV_LOCAL_TO_REG(emit->as, STATE_START + emit->n_state - 1 - local_num, REG_TEMP0);
1012  }
1013  emit_post_push_reg(emit, vtype, REG_TEMP0);
1014  }
1015 }
1016 
1017 STATIC void emit_native_load_deref(emit_t *emit, qstr qst, mp_uint_t local_num) {
1018  DEBUG_printf("load_deref(%s, " UINT_FMT ")\n", qstr_str(qst), local_num);
1019  need_reg_single(emit, REG_RET, 0);
1020  emit_native_load_fast(emit, qst, local_num);
1021  vtype_kind_t vtype;
1022  int reg_base = REG_RET;
1023  emit_pre_pop_reg_flexible(emit, &vtype, &reg_base, -1, -1);
1024  ASM_LOAD_REG_REG_OFFSET(emit->as, REG_RET, reg_base, 1);
1025  // closed over vars are always Python objects
1026  emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
1027 }
1028 
1029 STATIC void emit_native_load_name(emit_t *emit, qstr qst) {
1030  DEBUG_printf("load_name(%s)\n", qstr_str(qst));
1031  emit_native_pre(emit);
1032  emit_call_with_imm_arg(emit, MP_F_LOAD_NAME, qst, REG_ARG_1);
1033  emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
1034 }
1035 
1036 STATIC void emit_native_load_global(emit_t *emit, qstr qst) {
1037  DEBUG_printf("load_global(%s)\n", qstr_str(qst));
1038  emit_native_pre(emit);
1039  // check for builtin casting operators
1040  if (emit->do_viper_types && qst == MP_QSTR_int) {
1041  emit_post_push_imm(emit, VTYPE_BUILTIN_CAST, VTYPE_INT);
1042  } else if (emit->do_viper_types && qst == MP_QSTR_uint) {
1043  emit_post_push_imm(emit, VTYPE_BUILTIN_CAST, VTYPE_UINT);
1044  } else if (emit->do_viper_types && qst == MP_QSTR_ptr) {
1045  emit_post_push_imm(emit, VTYPE_BUILTIN_CAST, VTYPE_PTR);
1046  } else if (emit->do_viper_types && qst == MP_QSTR_ptr8) {
1047  emit_post_push_imm(emit, VTYPE_BUILTIN_CAST, VTYPE_PTR8);
1048  } else if (emit->do_viper_types && qst == MP_QSTR_ptr16) {
1049  emit_post_push_imm(emit, VTYPE_BUILTIN_CAST, VTYPE_PTR16);
1050  } else if (emit->do_viper_types && qst == MP_QSTR_ptr32) {
1051  emit_post_push_imm(emit, VTYPE_BUILTIN_CAST, VTYPE_PTR32);
1052  } else {
1053  emit_call_with_imm_arg(emit, MP_F_LOAD_GLOBAL, qst, REG_ARG_1);
1054  emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
1055  }
1056 }
1057 
1058 STATIC void emit_native_load_attr(emit_t *emit, qstr qst) {
1059  // depends on type of subject:
1060  // - integer, function, pointer to integers: error
1061  // - pointer to structure: get member, quite easy
1062  // - Python object: call mp_load_attr, and needs to be typed to convert result
1063  vtype_kind_t vtype_base;
1064  emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1); // arg1 = base
1065  assert(vtype_base == VTYPE_PYOBJ);
1066  emit_call_with_imm_arg(emit, MP_F_LOAD_ATTR, qst, REG_ARG_2); // arg2 = attribute name
1067  emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
1068 }
1069 
1070 STATIC void emit_native_load_method(emit_t *emit, qstr qst, bool is_super) {
1071  if (is_super) {
1072  emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_2, 3); // arg2 = dest ptr
1073  emit_get_stack_pointer_to_reg_for_push(emit, REG_ARG_2, 2); // arg2 = dest ptr
1074  emit_call_with_imm_arg(emit, MP_F_LOAD_SUPER_METHOD, qst, REG_ARG_1); // arg1 = method name
1075  } else {
1076  vtype_kind_t vtype_base;
1077  emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1); // arg1 = base
1078  assert(vtype_base == VTYPE_PYOBJ);
1079  emit_get_stack_pointer_to_reg_for_push(emit, REG_ARG_3, 2); // arg3 = dest ptr
1080  emit_call_with_imm_arg(emit, MP_F_LOAD_METHOD, qst, REG_ARG_2); // arg2 = method name
1081  }
1082 }
1083 
1084 STATIC void emit_native_load_build_class(emit_t *emit) {
1085  emit_native_pre(emit);
1086  emit_call(emit, MP_F_LOAD_BUILD_CLASS);
1087  emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
1088 }
1089 
1090 STATIC void emit_native_load_subscr(emit_t *emit) {
1091  DEBUG_printf("load_subscr\n");
1092  // need to compile: base[index]
1093 
1094  // pop: index, base
1095  // optimise case where index is an immediate
1096  vtype_kind_t vtype_base = peek_vtype(emit, 1);
1097 
1098  if (vtype_base == VTYPE_PYOBJ) {
1099  // standard Python subscr
1100  // TODO factor this implicit cast code with other uses of it
1101  vtype_kind_t vtype_index = peek_vtype(emit, 0);
1102  if (vtype_index == VTYPE_PYOBJ) {
1103  emit_pre_pop_reg(emit, &vtype_index, REG_ARG_2);
1104  } else {
1105  emit_pre_pop_reg(emit, &vtype_index, REG_ARG_1);
1106  emit_call_with_imm_arg(emit, MP_F_CONVERT_NATIVE_TO_OBJ, vtype_index, REG_ARG_2); // arg2 = type
1107  ASM_MOV_REG_REG(emit->as, REG_ARG_2, REG_RET);
1108  }
1109  emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1);
1110  emit_call_with_imm_arg(emit, MP_F_OBJ_SUBSCR, (mp_uint_t)MP_OBJ_SENTINEL, REG_ARG_3);
1111  emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
1112  } else {
1113  // viper load
1114  // TODO The different machine architectures have very different
1115  // capabilities and requirements for loads, so probably best to
1116  // write a completely separate load-optimiser for each one.
1117  stack_info_t *top = peek_stack(emit, 0);
1118  if (top->vtype == VTYPE_INT && top->kind == STACK_IMM) {
1119  // index is an immediate
1120  mp_int_t index_value = top->data.u_imm;
1121  emit_pre_pop_discard(emit); // discard index
1122  int reg_base = REG_ARG_1;
1123  int reg_index = REG_ARG_2;
1124  emit_pre_pop_reg_flexible(emit, &vtype_base, &reg_base, reg_index, reg_index);
1125  switch (vtype_base) {
1126  case VTYPE_PTR8: {
1127  // pointer to 8-bit memory
1128  // TODO optimise to use thumb ldrb r1, [r2, r3]
1129  if (index_value != 0) {
1130  // index is non-zero
1131  #if N_THUMB
1132  if (index_value > 0 && index_value < 32) {
1133  asm_thumb_ldrb_rlo_rlo_i5(emit->as, REG_RET, reg_base, index_value);
1134  break;
1135  }
1136  #endif
1137  ASM_MOV_IMM_TO_REG(emit->as, index_value, reg_index);
1138  ASM_ADD_REG_REG(emit->as, reg_index, reg_base); // add index to base
1139  reg_base = reg_index;
1140  }
1141  ASM_LOAD8_REG_REG(emit->as, REG_RET, reg_base); // load from (base+index)
1142  break;
1143  }
1144  case VTYPE_PTR16: {
1145  // pointer to 16-bit memory
1146  if (index_value != 0) {
1147  // index is a non-zero immediate
1148  #if N_THUMB
1149  if (index_value > 0 && index_value < 32) {
1150  asm_thumb_ldrh_rlo_rlo_i5(emit->as, REG_RET, reg_base, index_value);
1151  break;
1152  }
1153  #endif
1154  ASM_MOV_IMM_TO_REG(emit->as, index_value << 1, reg_index);
1155  ASM_ADD_REG_REG(emit->as, reg_index, reg_base); // add 2*index to base
1156  reg_base = reg_index;
1157  }
1158  ASM_LOAD16_REG_REG(emit->as, REG_RET, reg_base); // load from (base+2*index)
1159  break;
1160  }
1161  case VTYPE_PTR32: {
1162  // pointer to 32-bit memory
1163  if (index_value != 0) {
1164  // index is a non-zero immediate
1165  #if N_THUMB
1166  if (index_value > 0 && index_value < 32) {
1167  asm_thumb_ldr_rlo_rlo_i5(emit->as, REG_RET, reg_base, index_value);
1168  break;
1169  }
1170  #endif
1171  ASM_MOV_IMM_TO_REG(emit->as, index_value << 2, reg_index);
1172  ASM_ADD_REG_REG(emit->as, reg_index, reg_base); // add 4*index to base
1173  reg_base = reg_index;
1174  }
1175  ASM_LOAD32_REG_REG(emit->as, REG_RET, reg_base); // load from (base+4*index)
1176  break;
1177  }
1178  default:
1179  EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
1180  "can't load from '%q'", vtype_to_qstr(vtype_base));
1181  }
1182  } else {
1183  // index is not an immediate
1184  vtype_kind_t vtype_index;
1185  int reg_index = REG_ARG_2;
1186  emit_pre_pop_reg_flexible(emit, &vtype_index, &reg_index, REG_ARG_1, REG_ARG_1);
1187  emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1);
1188  if (vtype_index != VTYPE_INT && vtype_index != VTYPE_UINT) {
1189  EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
1190  "can't load with '%q' index", vtype_to_qstr(vtype_index));
1191  }
1192  switch (vtype_base) {
1193  case VTYPE_PTR8: {
1194  // pointer to 8-bit memory
1195  // TODO optimise to use thumb ldrb r1, [r2, r3]
1196  ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
1197  ASM_LOAD8_REG_REG(emit->as, REG_RET, REG_ARG_1); // store value to (base+index)
1198  break;
1199  }
1200  case VTYPE_PTR16: {
1201  // pointer to 16-bit memory
1202  ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
1203  ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
1204  ASM_LOAD16_REG_REG(emit->as, REG_RET, REG_ARG_1); // load from (base+2*index)
1205  break;
1206  }
1207  case VTYPE_PTR32: {
1208  // pointer to word-size memory
1209  ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
1210  ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
1211  ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
1212  ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
1213  ASM_LOAD32_REG_REG(emit->as, REG_RET, REG_ARG_1); // load from (base+4*index)
1214  break;
1215  }
1216  default:
1217  EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
1218  "can't load from '%q'", vtype_to_qstr(vtype_base));
1219  }
1220  }
1221  emit_post_push_reg(emit, VTYPE_INT, REG_RET);
1222  }
1223 }
1224 
1225 STATIC void emit_native_store_fast(emit_t *emit, qstr qst, mp_uint_t local_num) {
1226  vtype_kind_t vtype;
1227  if (local_num == 0) {
1228  emit_pre_pop_reg(emit, &vtype, REG_LOCAL_1);
1229  } else if (local_num == 1) {
1230  emit_pre_pop_reg(emit, &vtype, REG_LOCAL_2);
1231  } else if (local_num == 2) {
1232  emit_pre_pop_reg(emit, &vtype, REG_LOCAL_3);
1233  } else {
1234  emit_pre_pop_reg(emit, &vtype, REG_TEMP0);
1235  if (emit->do_viper_types) {
1236  ASM_MOV_REG_TO_LOCAL(emit->as, REG_TEMP0, local_num - REG_LOCAL_NUM);
1237  } else {
1238  ASM_MOV_REG_TO_LOCAL(emit->as, REG_TEMP0, STATE_START + emit->n_state - 1 - local_num);
1239  }
1240  }
1241  emit_post(emit);
1242 
1243  // check types
1244  if (emit->local_vtype[local_num] == VTYPE_UNBOUND) {
1245  // first time this local is assigned, so give it a type of the object stored in it
1246  emit->local_vtype[local_num] = vtype;
1247  } else if (emit->local_vtype[local_num] != vtype) {
1248  // type of local is not the same as object stored in it
1249  EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
1250  "local '%q' has type '%q' but source is '%q'",
1251  qst, vtype_to_qstr(emit->local_vtype[local_num]), vtype_to_qstr(vtype));
1252  }
1253 }
1254 
1255 STATIC void emit_native_store_deref(emit_t *emit, qstr qst, mp_uint_t local_num) {
1256  DEBUG_printf("store_deref(%s, " UINT_FMT ")\n", qstr_str(qst), local_num);
1257  need_reg_single(emit, REG_TEMP0, 0);
1258  need_reg_single(emit, REG_TEMP1, 0);
1259  emit_native_load_fast(emit, qst, local_num);
1260  vtype_kind_t vtype;
1261  int reg_base = REG_TEMP0;
1262  emit_pre_pop_reg_flexible(emit, &vtype, &reg_base, -1, -1);
1263  int reg_src = REG_TEMP1;
1264  emit_pre_pop_reg_flexible(emit, &vtype, &reg_src, reg_base, reg_base);
1265  ASM_STORE_REG_REG_OFFSET(emit->as, reg_src, reg_base, 1);
1266  emit_post(emit);
1267 }
1268 
1269 STATIC void emit_native_store_name(emit_t *emit, qstr qst) {
1270  // mp_store_name, but needs conversion of object (maybe have mp_viper_store_name(obj, type))
1271  vtype_kind_t vtype;
1272  emit_pre_pop_reg(emit, &vtype, REG_ARG_2);
1273  assert(vtype == VTYPE_PYOBJ);
1274  emit_call_with_imm_arg(emit, MP_F_STORE_NAME, qst, REG_ARG_1); // arg1 = name
1275  emit_post(emit);
1276 }
1277 
1278 STATIC void emit_native_store_global(emit_t *emit, qstr qst) {
1279  vtype_kind_t vtype = peek_vtype(emit, 0);
1280  if (vtype == VTYPE_PYOBJ) {
1281  emit_pre_pop_reg(emit, &vtype, REG_ARG_2);
1282  } else {
1283  emit_pre_pop_reg(emit, &vtype, REG_ARG_1);
1284  emit_call_with_imm_arg(emit, MP_F_CONVERT_NATIVE_TO_OBJ, vtype, REG_ARG_2); // arg2 = type
1285  ASM_MOV_REG_REG(emit->as, REG_ARG_2, REG_RET);
1286  }
1287  emit_call_with_imm_arg(emit, MP_F_STORE_GLOBAL, qst, REG_ARG_1); // arg1 = name
1288  emit_post(emit);
1289 }
1290 
1291 STATIC void emit_native_store_attr(emit_t *emit, qstr qst) {
1292  vtype_kind_t vtype_base, vtype_val;
1293  emit_pre_pop_reg_reg(emit, &vtype_base, REG_ARG_1, &vtype_val, REG_ARG_3); // arg1 = base, arg3 = value
1294  assert(vtype_base == VTYPE_PYOBJ);
1295  assert(vtype_val == VTYPE_PYOBJ);
1296  emit_call_with_imm_arg(emit, MP_F_STORE_ATTR, qst, REG_ARG_2); // arg2 = attribute name
1297  emit_post(emit);
1298 }
1299 
1300 STATIC void emit_native_store_subscr(emit_t *emit) {
1301  DEBUG_printf("store_subscr\n");
1302  // need to compile: base[index] = value
1303 
1304  // pop: index, base, value
1305  // optimise case where index is an immediate
1306  vtype_kind_t vtype_base = peek_vtype(emit, 1);
1307 
1308  if (vtype_base == VTYPE_PYOBJ) {
1309  // standard Python subscr
1310  vtype_kind_t vtype_index = peek_vtype(emit, 0);
1311  vtype_kind_t vtype_value = peek_vtype(emit, 2);
1312  if (vtype_index != VTYPE_PYOBJ || vtype_value != VTYPE_PYOBJ) {
1313  // need to implicitly convert non-objects to objects
1314  // TODO do this properly
1315  emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_1, 3);
1316  adjust_stack(emit, 3);
1317  }
1318  emit_pre_pop_reg_reg_reg(emit, &vtype_index, REG_ARG_2, &vtype_base, REG_ARG_1, &vtype_value, REG_ARG_3);
1319  emit_call(emit, MP_F_OBJ_SUBSCR);
1320  } else {
1321  // viper store
1322  // TODO The different machine architectures have very different
1323  // capabilities and requirements for stores, so probably best to
1324  // write a completely separate store-optimiser for each one.
1325  stack_info_t *top = peek_stack(emit, 0);
1326  if (top->vtype == VTYPE_INT && top->kind == STACK_IMM) {
1327  // index is an immediate
1328  mp_int_t index_value = top->data.u_imm;
1329  emit_pre_pop_discard(emit); // discard index
1330  vtype_kind_t vtype_value;
1331  int reg_base = REG_ARG_1;
1332  int reg_index = REG_ARG_2;
1333  int reg_value = REG_ARG_3;
1334  emit_pre_pop_reg_flexible(emit, &vtype_base, &reg_base, reg_index, reg_value);
1335  #if N_X86
1336  // special case: x86 needs byte stores to be from lower 4 regs (REG_ARG_3 is EDX)
1337  emit_pre_pop_reg(emit, &vtype_value, reg_value);
1338  #else
1339  emit_pre_pop_reg_flexible(emit, &vtype_value, &reg_value, reg_base, reg_index);
1340  #endif
1341  if (vtype_value != VTYPE_BOOL && vtype_value != VTYPE_INT && vtype_value != VTYPE_UINT) {
1342  EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
1343  "can't store '%q'", vtype_to_qstr(vtype_value));
1344  }
1345  switch (vtype_base) {
1346  case VTYPE_PTR8: {
1347  // pointer to 8-bit memory
1348  // TODO optimise to use thumb strb r1, [r2, r3]
1349  if (index_value != 0) {
1350  // index is non-zero
1351  #if N_THUMB
1352  if (index_value > 0 && index_value < 32) {
1353  asm_thumb_strb_rlo_rlo_i5(emit->as, reg_value, reg_base, index_value);
1354  break;
1355  }
1356  #endif
1357  ASM_MOV_IMM_TO_REG(emit->as, index_value, reg_index);
1358  #if N_ARM
1359  asm_arm_strb_reg_reg_reg(emit->as, reg_value, reg_base, reg_index);
1360  return;
1361  #endif
1362  ASM_ADD_REG_REG(emit->as, reg_index, reg_base); // add index to base
1363  reg_base = reg_index;
1364  }
1365  ASM_STORE8_REG_REG(emit->as, reg_value, reg_base); // store value to (base+index)
1366  break;
1367  }
1368  case VTYPE_PTR16: {
1369  // pointer to 16-bit memory
1370  if (index_value != 0) {
1371  // index is a non-zero immediate
1372  #if N_THUMB
1373  if (index_value > 0 && index_value < 32) {
1374  asm_thumb_strh_rlo_rlo_i5(emit->as, reg_value, reg_base, index_value);
1375  break;
1376  }
1377  #endif
1378  ASM_MOV_IMM_TO_REG(emit->as, index_value << 1, reg_index);
1379  #if N_ARM
1380  asm_arm_strh_reg_reg_reg(emit->as, reg_value, reg_base, reg_index);
1381  return;
1382  #endif
1383  ASM_ADD_REG_REG(emit->as, reg_index, reg_base); // add 2*index to base
1384  reg_base = reg_index;
1385  }
1386  ASM_STORE16_REG_REG(emit->as, reg_value, reg_base); // store value to (base+2*index)
1387  break;
1388  }
1389  case VTYPE_PTR32: {
1390  // pointer to 32-bit memory
1391  if (index_value != 0) {
1392  // index is a non-zero immediate
1393  #if N_THUMB
1394  if (index_value > 0 && index_value < 32) {
1395  asm_thumb_str_rlo_rlo_i5(emit->as, reg_value, reg_base, index_value);
1396  break;
1397  }
1398  #endif
1399  ASM_MOV_IMM_TO_REG(emit->as, index_value << 2, reg_index);
1400  #if N_ARM
1401  asm_arm_str_reg_reg_reg(emit->as, reg_value, reg_base, reg_index);
1402  return;
1403  #endif
1404  ASM_ADD_REG_REG(emit->as, reg_index, reg_base); // add 4*index to base
1405  reg_base = reg_index;
1406  }
1407  ASM_STORE32_REG_REG(emit->as, reg_value, reg_base); // store value to (base+4*index)
1408  break;
1409  }
1410  default:
1411  EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
1412  "can't store to '%q'", vtype_to_qstr(vtype_base));
1413  }
1414  } else {
1415  // index is not an immediate
1416  vtype_kind_t vtype_index, vtype_value;
1417  int reg_index = REG_ARG_2;
1418  int reg_value = REG_ARG_3;
1419  emit_pre_pop_reg_flexible(emit, &vtype_index, &reg_index, REG_ARG_1, reg_value);
1420  emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1);
1421  if (vtype_index != VTYPE_INT && vtype_index != VTYPE_UINT) {
1422  EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
1423  "can't store with '%q' index", vtype_to_qstr(vtype_index));
1424  }
1425  #if N_X86
1426  // special case: x86 needs byte stores to be from lower 4 regs (REG_ARG_3 is EDX)
1427  emit_pre_pop_reg(emit, &vtype_value, reg_value);
1428  #else
1429  emit_pre_pop_reg_flexible(emit, &vtype_value, &reg_value, REG_ARG_1, reg_index);
1430  #endif
1431  if (vtype_value != VTYPE_BOOL && vtype_value != VTYPE_INT && vtype_value != VTYPE_UINT) {
1432  EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
1433  "can't store '%q'", vtype_to_qstr(vtype_value));
1434  }
1435  switch (vtype_base) {
1436  case VTYPE_PTR8: {
1437  // pointer to 8-bit memory
1438  // TODO optimise to use thumb strb r1, [r2, r3]
1439  #if N_ARM
1440  asm_arm_strb_reg_reg_reg(emit->as, reg_value, REG_ARG_1, reg_index);
1441  break;
1442  #endif
1443  ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
1444  ASM_STORE8_REG_REG(emit->as, reg_value, REG_ARG_1); // store value to (base+index)
1445  break;
1446  }
1447  case VTYPE_PTR16: {
1448  // pointer to 16-bit memory
1449  #if N_ARM
1450  asm_arm_strh_reg_reg_reg(emit->as, reg_value, REG_ARG_1, reg_index);
1451  break;
1452  #endif
1453  ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
1454  ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
1455  ASM_STORE16_REG_REG(emit->as, reg_value, REG_ARG_1); // store value to (base+2*index)
1456  break;
1457  }
1458  case VTYPE_PTR32: {
1459  // pointer to 32-bit memory
1460  #if N_ARM
1461  asm_arm_str_reg_reg_reg(emit->as, reg_value, REG_ARG_1, reg_index);
1462  break;
1463  #endif
1464  ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
1465  ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
1466  ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
1467  ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
1468  ASM_STORE32_REG_REG(emit->as, reg_value, REG_ARG_1); // store value to (base+4*index)
1469  break;
1470  }
1471  default:
1472  EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
1473  "can't store to '%q'", vtype_to_qstr(vtype_base));
1474  }
1475  }
1476 
1477  }
1478 }
1479 
1480 STATIC void emit_native_delete_fast(emit_t *emit, qstr qst, mp_uint_t local_num) {
1481  // TODO: This is not compliant implementation. We could use MP_OBJ_SENTINEL
1482  // to mark deleted vars but then every var would need to be checked on
1483  // each access. Very inefficient, so just set value to None to enable GC.
1484  emit_native_load_const_tok(emit, MP_TOKEN_KW_NONE);
1485  emit_native_store_fast(emit, qst, local_num);
1486 }
1487 
1488 STATIC void emit_native_delete_deref(emit_t *emit, qstr qst, mp_uint_t local_num) {
1489  // TODO implement me!
1490  (void)emit;
1491  (void)qst;
1492  (void)local_num;
1493 }
1494 
1495 STATIC void emit_native_delete_name(emit_t *emit, qstr qst) {
1496  emit_native_pre(emit);
1497  emit_call_with_imm_arg(emit, MP_F_DELETE_NAME, qst, REG_ARG_1);
1498  emit_post(emit);
1499 }
1500 
1501 STATIC void emit_native_delete_global(emit_t *emit, qstr qst) {
1502  emit_native_pre(emit);
1503  emit_call_with_imm_arg(emit, MP_F_DELETE_GLOBAL, qst, REG_ARG_1);
1504  emit_post(emit);
1505 }
1506 
1507 STATIC void emit_native_delete_attr(emit_t *emit, qstr qst) {
1508  vtype_kind_t vtype_base;
1509  emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1); // arg1 = base
1510  assert(vtype_base == VTYPE_PYOBJ);
1511  emit_call_with_2_imm_args(emit, MP_F_STORE_ATTR, qst, REG_ARG_2, (mp_uint_t)MP_OBJ_NULL, REG_ARG_3); // arg2 = attribute name, arg3 = value (null for delete)
1512  emit_post(emit);
1513 }
1514 
1515 STATIC void emit_native_delete_subscr(emit_t *emit) {
1516  vtype_kind_t vtype_index, vtype_base;
1517  emit_pre_pop_reg_reg(emit, &vtype_index, REG_ARG_2, &vtype_base, REG_ARG_1); // index, base
1518  assert(vtype_index == VTYPE_PYOBJ);
1519  assert(vtype_base == VTYPE_PYOBJ);
1520  emit_call_with_imm_arg(emit, MP_F_OBJ_SUBSCR, (mp_uint_t)MP_OBJ_NULL, REG_ARG_3);
1521 }
1522 
1523 STATIC void emit_native_dup_top(emit_t *emit) {
1524  DEBUG_printf("dup_top\n");
1525  vtype_kind_t vtype;
1526  int reg = REG_TEMP0;
1527  emit_pre_pop_reg_flexible(emit, &vtype, &reg, -1, -1);
1528  emit_post_push_reg_reg(emit, vtype, reg, vtype, reg);
1529 }
1530 
1531 STATIC void emit_native_dup_top_two(emit_t *emit) {
1532  vtype_kind_t vtype0, vtype1;
1533  emit_pre_pop_reg_reg(emit, &vtype0, REG_TEMP0, &vtype1, REG_TEMP1);
1534  emit_post_push_reg_reg_reg_reg(emit, vtype1, REG_TEMP1, vtype0, REG_TEMP0, vtype1, REG_TEMP1, vtype0, REG_TEMP0);
1535 }
1536 
1537 STATIC void emit_native_pop_top(emit_t *emit) {
1538  DEBUG_printf("pop_top\n");
1539  emit_pre_pop_discard(emit);
1540  emit_post(emit);
1541 }
1542 
1543 STATIC void emit_native_rot_two(emit_t *emit) {
1544  DEBUG_printf("rot_two\n");
1545  vtype_kind_t vtype0, vtype1;
1546  emit_pre_pop_reg_reg(emit, &vtype0, REG_TEMP0, &vtype1, REG_TEMP1);
1547  emit_post_push_reg_reg(emit, vtype0, REG_TEMP0, vtype1, REG_TEMP1);
1548 }
1549 
1550 STATIC void emit_native_rot_three(emit_t *emit) {
1551  DEBUG_printf("rot_three\n");
1552  vtype_kind_t vtype0, vtype1, vtype2;
1553  emit_pre_pop_reg_reg_reg(emit, &vtype0, REG_TEMP0, &vtype1, REG_TEMP1, &vtype2, REG_TEMP2);
1554  emit_post_push_reg_reg_reg(emit, vtype0, REG_TEMP0, vtype2, REG_TEMP2, vtype1, REG_TEMP1);
1555 }
1556 
1557 STATIC void emit_native_jump(emit_t *emit, mp_uint_t label) {
1558  DEBUG_printf("jump(label=" UINT_FMT ")\n", label);
1559  emit_native_pre(emit);
1560  // need to commit stack because we are jumping elsewhere
1561  need_stack_settled(emit);
1562  ASM_JUMP(emit->as, label);
1563  emit_post(emit);
1564 }
1565 
1566 STATIC void emit_native_jump_helper(emit_t *emit, bool pop) {
1567  vtype_kind_t vtype = peek_vtype(emit, 0);
1568  if (vtype == VTYPE_PYOBJ) {
1569  emit_pre_pop_reg(emit, &vtype, REG_ARG_1);
1570  if (!pop) {
1571  adjust_stack(emit, 1);
1572  }
1573  emit_call(emit, MP_F_OBJ_IS_TRUE);
1574  } else {
1575  emit_pre_pop_reg(emit, &vtype, REG_RET);
1576  if (!pop) {
1577  adjust_stack(emit, 1);
1578  }
1579  if (!(vtype == VTYPE_BOOL || vtype == VTYPE_INT || vtype == VTYPE_UINT)) {
1580  EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
1581  "can't implicitly convert '%q' to 'bool'", vtype_to_qstr(vtype));
1582  }
1583  }
1584  // For non-pop need to save the vtype so that emit_native_adjust_stack_size
1585  // can use it. This is a bit of a hack.
1586  if (!pop) {
1587  emit->saved_stack_vtype = vtype;
1588  }
1589  // need to commit stack because we may jump elsewhere
1590  need_stack_settled(emit);
1591 }
1592 
1593 STATIC void emit_native_pop_jump_if(emit_t *emit, bool cond, mp_uint_t label) {
1594  DEBUG_printf("pop_jump_if(cond=%u, label=" UINT_FMT ")\n", cond, label);
1595  emit_native_jump_helper(emit, true);
1596  if (cond) {
1597  ASM_JUMP_IF_REG_NONZERO(emit->as, REG_RET, label);
1598  } else {
1599  ASM_JUMP_IF_REG_ZERO(emit->as, REG_RET, label);
1600  }
1601  emit_post(emit);
1602 }
1603 
1604 STATIC void emit_native_jump_if_or_pop(emit_t *emit, bool cond, mp_uint_t label) {
1605  DEBUG_printf("jump_if_or_pop(cond=%u, label=" UINT_FMT ")\n", cond, label);
1606  emit_native_jump_helper(emit, false);
1607  if (cond) {
1608  ASM_JUMP_IF_REG_NONZERO(emit->as, REG_RET, label);
1609  } else {
1610  ASM_JUMP_IF_REG_ZERO(emit->as, REG_RET, label);
1611  }
1612  adjust_stack(emit, -1);
1613  emit_post(emit);
1614 }
1615 
1616 STATIC void emit_native_break_loop(emit_t *emit, mp_uint_t label, mp_uint_t except_depth) {
1617  (void)except_depth;
1618  emit_native_jump(emit, label & ~MP_EMIT_BREAK_FROM_FOR); // TODO properly
1619 }
1620 
1621 STATIC void emit_native_continue_loop(emit_t *emit, mp_uint_t label, mp_uint_t except_depth) {
1622  (void)except_depth;
1623  emit_native_jump(emit, label); // TODO properly
1624 }
1625 
1626 STATIC void emit_native_setup_with(emit_t *emit, mp_uint_t label) {
1627  // the context manager is on the top of the stack
1628  // stack: (..., ctx_mgr)
1629 
1630  // get __exit__ method
1631  vtype_kind_t vtype;
1632  emit_access_stack(emit, 1, &vtype, REG_ARG_1); // arg1 = ctx_mgr
1633  assert(vtype == VTYPE_PYOBJ);
1634  emit_get_stack_pointer_to_reg_for_push(emit, REG_ARG_3, 2); // arg3 = dest ptr
1635  emit_call_with_imm_arg(emit, MP_F_LOAD_METHOD, MP_QSTR___exit__, REG_ARG_2);
1636  // stack: (..., ctx_mgr, __exit__, self)
1637 
1638  emit_pre_pop_reg(emit, &vtype, REG_ARG_3); // self
1639  emit_pre_pop_reg(emit, &vtype, REG_ARG_2); // __exit__
1640  emit_pre_pop_reg(emit, &vtype, REG_ARG_1); // ctx_mgr
1641  emit_post_push_reg(emit, vtype, REG_ARG_2); // __exit__
1642  emit_post_push_reg(emit, vtype, REG_ARG_3); // self
1643  // stack: (..., __exit__, self)
1644  // REG_ARG_1=ctx_mgr
1645 
1646  // get __enter__ method
1647  emit_get_stack_pointer_to_reg_for_push(emit, REG_ARG_3, 2); // arg3 = dest ptr
1648  emit_call_with_imm_arg(emit, MP_F_LOAD_METHOD, MP_QSTR___enter__, REG_ARG_2); // arg2 = method name
1649  // stack: (..., __exit__, self, __enter__, self)
1650 
1651  // call __enter__ method
1652  emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, 2); // pointer to items, including meth and self
1653  emit_call_with_2_imm_args(emit, MP_F_CALL_METHOD_N_KW, 0, REG_ARG_1, 0, REG_ARG_2);
1654  emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET); // push return value of __enter__
1655  // stack: (..., __exit__, self, as_value)
1656 
1657  // need to commit stack because we may jump elsewhere
1658  need_stack_settled(emit);
1659  emit_get_stack_pointer_to_reg_for_push(emit, REG_ARG_1, sizeof(nlr_buf_t) / sizeof(mp_uint_t)); // arg1 = pointer to nlr buf
1660  emit_call(emit, MP_F_NLR_PUSH);
1661  ASM_JUMP_IF_REG_NONZERO(emit->as, REG_RET, label);
1662 
1663  emit_access_stack(emit, sizeof(nlr_buf_t) / sizeof(mp_uint_t) + 1, &vtype, REG_RET); // access return value of __enter__
1664  emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET); // push return value of __enter__
1665  // stack: (..., __exit__, self, as_value, nlr_buf, as_value)
1666 }
1667 
1668 STATIC void emit_native_with_cleanup(emit_t *emit, mp_uint_t label) {
1669  // note: label+1 is available as an auxiliary label
1670 
1671  // stack: (..., __exit__, self, as_value, nlr_buf)
1672  emit_native_pre(emit);
1673  emit_call(emit, MP_F_NLR_POP);
1674  adjust_stack(emit, -(mp_int_t)(sizeof(nlr_buf_t) / sizeof(mp_uint_t)) - 1);
1675  // stack: (..., __exit__, self)
1676 
1677  // call __exit__
1678  emit_post_push_imm(emit, VTYPE_PYOBJ, (mp_uint_t)mp_const_none);
1679  emit_post_push_imm(emit, VTYPE_PYOBJ, (mp_uint_t)mp_const_none);
1680  emit_post_push_imm(emit, VTYPE_PYOBJ, (mp_uint_t)mp_const_none);
1681  emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, 5);
1682  emit_call_with_2_imm_args(emit, MP_F_CALL_METHOD_N_KW, 3, REG_ARG_1, 0, REG_ARG_2);
1683 
1684  // jump to after with cleanup nlr_catch block
1685  adjust_stack(emit, 1); // dummy nlr_buf.prev
1686  emit_native_load_const_tok(emit, MP_TOKEN_KW_NONE); // nlr_buf.ret_val = no exception
1687  emit_native_jump(emit, label + 1);
1688 
1689  // nlr_catch
1690  emit_native_label_assign(emit, label);
1691 
1692  // adjust stack counter for: __exit__, self, as_value
1693  adjust_stack(emit, 3);
1694  // stack: (..., __exit__, self, as_value, nlr_buf.prev, nlr_buf.ret_val)
1695 
1696  vtype_kind_t vtype;
1697  emit_pre_pop_reg(emit, &vtype, REG_ARG_1); // get the thrown value (exc)
1698  adjust_stack(emit, -2); // discard nlr_buf.prev and as_value
1699  // stack: (..., __exit__, self)
1700  // REG_ARG_1=exc
1701 
1702  emit_pre_pop_reg(emit, &vtype, REG_ARG_2); // self
1703  emit_pre_pop_reg(emit, &vtype, REG_ARG_3); // __exit__
1704  adjust_stack(emit, 1); // dummy nlr_buf.prev
1705  emit_post_push_reg(emit, vtype, REG_ARG_1); // push exc to save it for later
1706  emit_post_push_reg(emit, vtype, REG_ARG_3); // __exit__
1707  emit_post_push_reg(emit, vtype, REG_ARG_2); // self
1708  // stack: (..., exc, __exit__, self)
1709  // REG_ARG_1=exc
1710 
1711  ASM_LOAD_REG_REG_OFFSET(emit->as, REG_ARG_2, REG_ARG_1, 0); // get type(exc)
1712  emit_post_push_reg(emit, VTYPE_PYOBJ, REG_ARG_2); // push type(exc)
1713  emit_post_push_reg(emit, VTYPE_PYOBJ, REG_ARG_1); // push exc value
1714  emit_post_push_imm(emit, VTYPE_PYOBJ, (mp_uint_t)mp_const_none); // traceback info
1715  // stack: (..., exc, __exit__, self, type(exc), exc, traceback)
1716 
1717  // call __exit__ method
1718  emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, 5);
1719  emit_call_with_2_imm_args(emit, MP_F_CALL_METHOD_N_KW, 3, REG_ARG_1, 0, REG_ARG_2);
1720  // stack: (..., exc)
1721 
1722  // if REG_RET is true then we need to replace top-of-stack with None (swallow exception)
1723  if (REG_ARG_1 != REG_RET) {
1724  ASM_MOV_REG_REG(emit->as, REG_ARG_1, REG_RET);
1725  }
1726  emit_call(emit, MP_F_OBJ_IS_TRUE);
1727  ASM_JUMP_IF_REG_ZERO(emit->as, REG_RET, label + 1);
1728 
1729  // replace exc with None
1730  emit_pre_pop_discard(emit);
1731  emit_post_push_imm(emit, VTYPE_PYOBJ, (mp_uint_t)mp_const_none);
1732 
1733  // end of with cleanup nlr_catch block
1734  emit_native_label_assign(emit, label + 1);
1735 }
1736 
1737 STATIC void emit_native_setup_except(emit_t *emit, mp_uint_t label) {
1738  emit_native_pre(emit);
1739  // need to commit stack because we may jump elsewhere
1740  need_stack_settled(emit);
1741  emit_get_stack_pointer_to_reg_for_push(emit, REG_ARG_1, sizeof(nlr_buf_t) / sizeof(mp_uint_t)); // arg1 = pointer to nlr buf
1742  emit_call(emit, MP_F_NLR_PUSH);
1743  ASM_JUMP_IF_REG_NONZERO(emit->as, REG_RET, label);
1744  emit_post(emit);
1745 }
1746 
1747 STATIC void emit_native_setup_finally(emit_t *emit, mp_uint_t label) {
1748  emit_native_setup_except(emit, label);
1749 }
1750 
1751 STATIC void emit_native_end_finally(emit_t *emit) {
1752  // logic:
1753  // exc = pop_stack
1754  // if exc == None: pass
1755  // else: raise exc
1756  // the check if exc is None is done in the MP_F_NATIVE_RAISE stub
1757  vtype_kind_t vtype;
1758  emit_pre_pop_reg(emit, &vtype, REG_ARG_1); // get nlr_buf.ret_val
1759  emit_pre_pop_discard(emit); // discard nlr_buf.prev
1760  emit_call(emit, MP_F_NATIVE_RAISE);
1761  emit_post(emit);
1762 }
1763 
1764 STATIC void emit_native_get_iter(emit_t *emit, bool use_stack) {
1765  // perhaps the difficult one, as we want to rewrite for loops using native code
1766  // in cases where we iterate over a Python object, can we use normal runtime calls?
1767 
1768  vtype_kind_t vtype;
1769  emit_pre_pop_reg(emit, &vtype, REG_ARG_1);
1770  assert(vtype == VTYPE_PYOBJ);
1771  if (use_stack) {
1772  emit_get_stack_pointer_to_reg_for_push(emit, REG_ARG_2, MP_OBJ_ITER_BUF_NSLOTS);
1773  emit_call(emit, MP_F_NATIVE_GETITER);
1774  } else {
1775  // mp_getiter will allocate the iter_buf on the heap
1776  ASM_MOV_IMM_TO_REG(emit->as, 0, REG_ARG_2);
1777  emit_call(emit, MP_F_NATIVE_GETITER);
1778  emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
1779  }
1780 }
1781 
1782 STATIC void emit_native_for_iter(emit_t *emit, mp_uint_t label) {
1783  emit_native_pre(emit);
1784  emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_1, MP_OBJ_ITER_BUF_NSLOTS);
1785  adjust_stack(emit, MP_OBJ_ITER_BUF_NSLOTS);
1786  emit_call(emit, MP_F_NATIVE_ITERNEXT);
1787  ASM_MOV_IMM_TO_REG(emit->as, (mp_uint_t)MP_OBJ_STOP_ITERATION, REG_TEMP1);
1788  ASM_JUMP_IF_REG_EQ(emit->as, REG_RET, REG_TEMP1, label);
1789  emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
1790 }
1791 
1792 STATIC void emit_native_for_iter_end(emit_t *emit) {
1793  // adjust stack counter (we get here from for_iter ending, which popped the value for us)
1794  emit_native_pre(emit);
1795  adjust_stack(emit, -MP_OBJ_ITER_BUF_NSLOTS);
1796  emit_post(emit);
1797 }
1798 
1799 STATIC void emit_native_pop_block(emit_t *emit) {
1800  emit_native_pre(emit);
1801  emit_call(emit, MP_F_NLR_POP);
1802  adjust_stack(emit, -(mp_int_t)(sizeof(nlr_buf_t) / sizeof(mp_uint_t)) + 1);
1803  emit_post(emit);
1804 }
1805 
1806 STATIC void emit_native_pop_except(emit_t *emit) {
1807  (void)emit;
1808 }
1809 
1810 STATIC void emit_native_unary_op(emit_t *emit, mp_unary_op_t op) {
1811  vtype_kind_t vtype;
1812  emit_pre_pop_reg(emit, &vtype, REG_ARG_2);
1813  if (vtype == VTYPE_PYOBJ) {
1814  emit_call_with_imm_arg(emit, MP_F_UNARY_OP, op, REG_ARG_1);
1815  emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
1816  } else {
1817  adjust_stack(emit, 1);
1818  EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
1819  "unary op %q not implemented", mp_unary_op_method_name[op]);
1820  }
1821 }
1822 
1823 STATIC void emit_native_binary_op(emit_t *emit, mp_binary_op_t op) {
1824  DEBUG_printf("binary_op(" UINT_FMT ")\n", op);
1825  vtype_kind_t vtype_lhs = peek_vtype(emit, 1);
1826  vtype_kind_t vtype_rhs = peek_vtype(emit, 0);
1827  if (vtype_lhs == VTYPE_INT && vtype_rhs == VTYPE_INT) {
1828  // for integers, inplace and normal ops are equivalent, so use just normal ops
1831  }
1832 
1833  #if N_X64 || N_X86
1834  // special cases for x86 and shifting
1835  if (op == MP_BINARY_OP_LSHIFT || op == MP_BINARY_OP_RSHIFT) {
1836  #if N_X64
1837  emit_pre_pop_reg_reg(emit, &vtype_rhs, ASM_X64_REG_RCX, &vtype_lhs, REG_RET);
1838  #else
1839  emit_pre_pop_reg_reg(emit, &vtype_rhs, ASM_X86_REG_ECX, &vtype_lhs, REG_RET);
1840  #endif
1841  if (op == MP_BINARY_OP_LSHIFT) {
1842  ASM_LSL_REG(emit->as, REG_RET);
1843  } else {
1844  ASM_ASR_REG(emit->as, REG_RET);
1845  }
1846  emit_post_push_reg(emit, VTYPE_INT, REG_RET);
1847  return;
1848  }
1849  #endif
1850 
1851  // special cases for floor-divide and module because we dispatch to helper functions
1852  if (op == MP_BINARY_OP_FLOOR_DIVIDE || op == MP_BINARY_OP_MODULO) {
1853  emit_pre_pop_reg_reg(emit, &vtype_rhs, REG_ARG_2, &vtype_lhs, REG_ARG_1);
1854  if (op == MP_BINARY_OP_FLOOR_DIVIDE) {
1855  emit_call(emit, MP_F_SMALL_INT_FLOOR_DIVIDE);
1856  } else {
1857  emit_call(emit, MP_F_SMALL_INT_MODULO);
1858  }
1859  emit_post_push_reg(emit, VTYPE_INT, REG_RET);
1860  return;
1861  }
1862 
1863  int reg_rhs = REG_ARG_3;
1864  emit_pre_pop_reg_flexible(emit, &vtype_rhs, &reg_rhs, REG_RET, REG_ARG_2);
1865  emit_pre_pop_reg(emit, &vtype_lhs, REG_ARG_2);
1866  if (0) {
1867  // dummy
1868  #if !(N_X64 || N_X86)
1869  } else if (op == MP_BINARY_OP_LSHIFT) {
1870  ASM_LSL_REG_REG(emit->as, REG_ARG_2, reg_rhs);
1871  emit_post_push_reg(emit, VTYPE_INT, REG_ARG_2);
1872  } else if (op == MP_BINARY_OP_RSHIFT) {
1873  ASM_ASR_REG_REG(emit->as, REG_ARG_2, reg_rhs);
1874  emit_post_push_reg(emit, VTYPE_INT, REG_ARG_2);
1875  #endif
1876  } else if (op == MP_BINARY_OP_OR) {
1877  ASM_OR_REG_REG(emit->as, REG_ARG_2, reg_rhs);
1878  emit_post_push_reg(emit, VTYPE_INT, REG_ARG_2);
1879  } else if (op == MP_BINARY_OP_XOR) {
1880  ASM_XOR_REG_REG(emit->as, REG_ARG_2, reg_rhs);
1881  emit_post_push_reg(emit, VTYPE_INT, REG_ARG_2);
1882  } else if (op == MP_BINARY_OP_AND) {
1883  ASM_AND_REG_REG(emit->as, REG_ARG_2, reg_rhs);
1884  emit_post_push_reg(emit, VTYPE_INT, REG_ARG_2);
1885  } else if (op == MP_BINARY_OP_ADD) {
1886  ASM_ADD_REG_REG(emit->as, REG_ARG_2, reg_rhs);
1887  emit_post_push_reg(emit, VTYPE_INT, REG_ARG_2);
1888  } else if (op == MP_BINARY_OP_SUBTRACT) {
1889  ASM_SUB_REG_REG(emit->as, REG_ARG_2, reg_rhs);
1890  emit_post_push_reg(emit, VTYPE_INT, REG_ARG_2);
1891  } else if (op == MP_BINARY_OP_MULTIPLY) {
1892  ASM_MUL_REG_REG(emit->as, REG_ARG_2, reg_rhs);
1893  emit_post_push_reg(emit, VTYPE_INT, REG_ARG_2);
1894  } else if (MP_BINARY_OP_LESS <= op && op <= MP_BINARY_OP_NOT_EQUAL) {
1895  // comparison ops are (in enum order):
1896  // MP_BINARY_OP_LESS
1897  // MP_BINARY_OP_MORE
1898  // MP_BINARY_OP_EQUAL
1899  // MP_BINARY_OP_LESS_EQUAL
1900  // MP_BINARY_OP_MORE_EQUAL
1901  // MP_BINARY_OP_NOT_EQUAL
1902  need_reg_single(emit, REG_RET, 0);
1903  #if N_X64
1904  asm_x64_xor_r64_r64(emit->as, REG_RET, REG_RET);
1905  asm_x64_cmp_r64_with_r64(emit->as, reg_rhs, REG_ARG_2);
1906  static byte ops[6] = {
1907  ASM_X64_CC_JL,
1908  ASM_X64_CC_JG,
1909  ASM_X64_CC_JE,
1913  };
1914  asm_x64_setcc_r8(emit->as, ops[op - MP_BINARY_OP_LESS], REG_RET);
1915  #elif N_X86
1916  asm_x86_xor_r32_r32(emit->as, REG_RET, REG_RET);
1917  asm_x86_cmp_r32_with_r32(emit->as, reg_rhs, REG_ARG_2);
1918  static byte ops[6] = {
1919  ASM_X86_CC_JL,
1920  ASM_X86_CC_JG,
1921  ASM_X86_CC_JE,
1925  };
1926  asm_x86_setcc_r8(emit->as, ops[op - MP_BINARY_OP_LESS], REG_RET);
1927  #elif N_THUMB
1928  asm_thumb_cmp_rlo_rlo(emit->as, REG_ARG_2, reg_rhs);
1929  static uint16_t ops[6] = {
1936  };
1937  static byte ret[6] = { 0, 1, 1, 0, 1, 0, };
1938  asm_thumb_op16(emit->as, ops[op - MP_BINARY_OP_LESS]);
1939  asm_thumb_mov_rlo_i8(emit->as, REG_RET, ret[op - MP_BINARY_OP_LESS]);
1940  asm_thumb_mov_rlo_i8(emit->as, REG_RET, ret[op - MP_BINARY_OP_LESS] ^ 1);
1941  #elif N_ARM
1942  asm_arm_cmp_reg_reg(emit->as, REG_ARG_2, reg_rhs);
1943  static uint ccs[6] = {
1944  ASM_ARM_CC_LT,
1945  ASM_ARM_CC_GT,
1946  ASM_ARM_CC_EQ,
1947  ASM_ARM_CC_LE,
1948  ASM_ARM_CC_GE,
1949  ASM_ARM_CC_NE,
1950  };
1951  asm_arm_setcc_reg(emit->as, REG_RET, ccs[op - MP_BINARY_OP_LESS]);
1952  #elif N_XTENSA
1953  static uint8_t ccs[6] = {
1955  0x80 | ASM_XTENSA_CC_LT, // for GT we'll swap args
1957  0x80 | ASM_XTENSA_CC_GE, // for LE we'll swap args
1960  };
1961  uint8_t cc = ccs[op - MP_BINARY_OP_LESS];
1962  if ((cc & 0x80) == 0) {
1963  asm_xtensa_setcc_reg_reg_reg(emit->as, cc, REG_RET, REG_ARG_2, reg_rhs);
1964  } else {
1965  asm_xtensa_setcc_reg_reg_reg(emit->as, cc & ~0x80, REG_RET, reg_rhs, REG_ARG_2);
1966  }
1967  #else
1968  #error not implemented
1969  #endif
1970  emit_post_push_reg(emit, VTYPE_BOOL, REG_RET);
1971  } else {
1972  // TODO other ops not yet implemented
1973  adjust_stack(emit, 1);
1974  EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
1975  "binary op %q not implemented", mp_binary_op_method_name[op]);
1976  }
1977  } else if (vtype_lhs == VTYPE_PYOBJ && vtype_rhs == VTYPE_PYOBJ) {
1978  emit_pre_pop_reg_reg(emit, &vtype_rhs, REG_ARG_3, &vtype_lhs, REG_ARG_2);
1979  bool invert = false;
1980  if (op == MP_BINARY_OP_NOT_IN) {
1981  invert = true;
1982  op = MP_BINARY_OP_IN;
1983  } else if (op == MP_BINARY_OP_IS_NOT) {
1984  invert = true;
1985  op = MP_BINARY_OP_IS;
1986  }
1987  emit_call_with_imm_arg(emit, MP_F_BINARY_OP, op, REG_ARG_1);
1988  if (invert) {
1989  ASM_MOV_REG_REG(emit->as, REG_ARG_2, REG_RET);
1990  emit_call_with_imm_arg(emit, MP_F_UNARY_OP, MP_UNARY_OP_NOT, REG_ARG_1);
1991  }
1992  emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
1993  } else {
1994  adjust_stack(emit, -1);
1995  EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
1996  "can't do binary op between '%q' and '%q'",
1997  vtype_to_qstr(vtype_lhs), vtype_to_qstr(vtype_rhs));
1998  }
1999 }
2000 
2001 STATIC void emit_native_build_tuple(emit_t *emit, mp_uint_t n_args) {
2002  // for viper: call runtime, with types of args
2003  // if wrapped in byte_array, or something, allocates memory and fills it
2004  emit_native_pre(emit);
2005  emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_2, n_args); // pointer to items
2006  emit_call_with_imm_arg(emit, MP_F_BUILD_TUPLE, n_args, REG_ARG_1);
2007  emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET); // new tuple
2008 }
2009 
2010 STATIC void emit_native_build_list(emit_t *emit, mp_uint_t n_args) {
2011  emit_native_pre(emit);
2012  emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_2, n_args); // pointer to items
2013  emit_call_with_imm_arg(emit, MP_F_BUILD_LIST, n_args, REG_ARG_1);
2014  emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET); // new list
2015 }
2016 
2017 STATIC void emit_native_build_map(emit_t *emit, mp_uint_t n_args) {
2018  emit_native_pre(emit);
2019  emit_call_with_imm_arg(emit, MP_F_BUILD_MAP, n_args, REG_ARG_1);
2020  emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET); // new map
2021 }
2022 
2023 STATIC void emit_native_store_map(emit_t *emit) {
2024  vtype_kind_t vtype_key, vtype_value, vtype_map;
2025  emit_pre_pop_reg_reg_reg(emit, &vtype_key, REG_ARG_2, &vtype_value, REG_ARG_3, &vtype_map, REG_ARG_1); // key, value, map
2026  assert(vtype_key == VTYPE_PYOBJ);
2027  assert(vtype_value == VTYPE_PYOBJ);
2028  assert(vtype_map == VTYPE_PYOBJ);
2029  emit_call(emit, MP_F_STORE_MAP);
2030  emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET); // map
2031 }
2032 
2033 #if MICROPY_PY_BUILTINS_SET
2034 STATIC void emit_native_build_set(emit_t *emit, mp_uint_t n_args) {
2035  emit_native_pre(emit);
2036  emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_2, n_args); // pointer to items
2037  emit_call_with_imm_arg(emit, MP_F_BUILD_SET, n_args, REG_ARG_1);
2038  emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET); // new set
2039 }
2040 #endif
2041 
2042 #if MICROPY_PY_BUILTINS_SLICE
2043 STATIC void emit_native_build_slice(emit_t *emit, mp_uint_t n_args) {
2044  DEBUG_printf("build_slice %d\n", n_args);
2045  if (n_args == 2) {
2046  vtype_kind_t vtype_start, vtype_stop;
2047  emit_pre_pop_reg_reg(emit, &vtype_stop, REG_ARG_2, &vtype_start, REG_ARG_1); // arg1 = start, arg2 = stop
2048  assert(vtype_start == VTYPE_PYOBJ);
2049  assert(vtype_stop == VTYPE_PYOBJ);
2050  emit_call_with_imm_arg(emit, MP_F_NEW_SLICE, (mp_uint_t)mp_const_none, REG_ARG_3); // arg3 = step
2051  emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
2052  } else {
2053  assert(n_args == 3);
2054  vtype_kind_t vtype_start, vtype_stop, vtype_step;
2055  emit_pre_pop_reg_reg_reg(emit, &vtype_step, REG_ARG_3, &vtype_stop, REG_ARG_2, &vtype_start, REG_ARG_1); // arg1 = start, arg2 = stop, arg3 = step
2056  assert(vtype_start == VTYPE_PYOBJ);
2057  assert(vtype_stop == VTYPE_PYOBJ);
2058  assert(vtype_step == VTYPE_PYOBJ);
2059  emit_call(emit, MP_F_NEW_SLICE);
2060  emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
2061  }
2062 }
2063 #endif
2064 
2065 STATIC void emit_native_store_comp(emit_t *emit, scope_kind_t kind, mp_uint_t collection_index) {
2066  mp_fun_kind_t f;
2067  if (kind == SCOPE_LIST_COMP) {
2068  vtype_kind_t vtype_item;
2069  emit_pre_pop_reg(emit, &vtype_item, REG_ARG_2);
2070  assert(vtype_item == VTYPE_PYOBJ);
2071  f = MP_F_LIST_APPEND;
2072  #if MICROPY_PY_BUILTINS_SET
2073  } else if (kind == SCOPE_SET_COMP) {
2074  vtype_kind_t vtype_item;
2075  emit_pre_pop_reg(emit, &vtype_item, REG_ARG_2);
2076  assert(vtype_item == VTYPE_PYOBJ);
2077  f = MP_F_STORE_SET;
2078  #endif
2079  } else {
2080  // SCOPE_DICT_COMP
2081  vtype_kind_t vtype_key, vtype_value;
2082  emit_pre_pop_reg_reg(emit, &vtype_key, REG_ARG_2, &vtype_value, REG_ARG_3);
2083  assert(vtype_key == VTYPE_PYOBJ);
2084  assert(vtype_value == VTYPE_PYOBJ);
2085  f = MP_F_STORE_MAP;
2086  }
2087  vtype_kind_t vtype_collection;
2088  emit_access_stack(emit, collection_index, &vtype_collection, REG_ARG_1);
2089  assert(vtype_collection == VTYPE_PYOBJ);
2090  emit_call(emit, f);
2091  emit_post(emit);
2092 }
2093 
2094 STATIC void emit_native_unpack_sequence(emit_t *emit, mp_uint_t n_args) {
2095  DEBUG_printf("unpack_sequence %d\n", n_args);
2096  vtype_kind_t vtype_base;
2097  emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1); // arg1 = seq
2098  assert(vtype_base == VTYPE_PYOBJ);
2099  emit_get_stack_pointer_to_reg_for_push(emit, REG_ARG_3, n_args); // arg3 = dest ptr
2100  emit_call_with_imm_arg(emit, MP_F_UNPACK_SEQUENCE, n_args, REG_ARG_2); // arg2 = n_args
2101 }
2102 
2103 STATIC void emit_native_unpack_ex(emit_t *emit, mp_uint_t n_left, mp_uint_t n_right) {
2104  DEBUG_printf("unpack_ex %d %d\n", n_left, n_right);
2105  vtype_kind_t vtype_base;
2106  emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1); // arg1 = seq
2107  assert(vtype_base == VTYPE_PYOBJ);
2108  emit_get_stack_pointer_to_reg_for_push(emit, REG_ARG_3, n_left + n_right + 1); // arg3 = dest ptr
2109  emit_call_with_imm_arg(emit, MP_F_UNPACK_EX, n_left | (n_right << 8), REG_ARG_2); // arg2 = n_left + n_right
2110 }
2111 
2112 STATIC void emit_native_make_function(emit_t *emit, scope_t *scope, mp_uint_t n_pos_defaults, mp_uint_t n_kw_defaults) {
2113  // call runtime, with type info for args, or don't support dict/default params, or only support Python objects for them
2114  emit_native_pre(emit);
2115  if (n_pos_defaults == 0 && n_kw_defaults == 0) {
2116  emit_call_with_3_imm_args_and_first_aligned(emit, MP_F_MAKE_FUNCTION_FROM_RAW_CODE, (mp_uint_t)scope->raw_code, REG_ARG_1, (mp_uint_t)MP_OBJ_NULL, REG_ARG_2, (mp_uint_t)MP_OBJ_NULL, REG_ARG_3);
2117  } else {
2118  vtype_kind_t vtype_def_tuple, vtype_def_dict;
2119  emit_pre_pop_reg_reg(emit, &vtype_def_dict, REG_ARG_3, &vtype_def_tuple, REG_ARG_2);
2120  assert(vtype_def_tuple == VTYPE_PYOBJ);
2121  assert(vtype_def_dict == VTYPE_PYOBJ);
2122  emit_call_with_imm_arg_aligned(emit, MP_F_MAKE_FUNCTION_FROM_RAW_CODE, (mp_uint_t)scope->raw_code, REG_ARG_1);
2123  }
2124  emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
2125 }
2126 
2127 STATIC void emit_native_make_closure(emit_t *emit, scope_t *scope, mp_uint_t n_closed_over, mp_uint_t n_pos_defaults, mp_uint_t n_kw_defaults) {
2128  emit_native_pre(emit);
2129  if (n_pos_defaults == 0 && n_kw_defaults == 0) {
2130  emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, n_closed_over);
2131  ASM_MOV_IMM_TO_REG(emit->as, n_closed_over, REG_ARG_2);
2132  } else {
2133  emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, n_closed_over + 2);
2134  ASM_MOV_IMM_TO_REG(emit->as, 0x100 | n_closed_over, REG_ARG_2);
2135  }
2136  ASM_MOV_ALIGNED_IMM_TO_REG(emit->as, (mp_uint_t)scope->raw_code, REG_ARG_1);
2138  emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
2139 }
2140 
2141 STATIC void emit_native_call_function(emit_t *emit, mp_uint_t n_positional, mp_uint_t n_keyword, mp_uint_t star_flags) {
2142  DEBUG_printf("call_function(n_pos=" UINT_FMT ", n_kw=" UINT_FMT ", star_flags=" UINT_FMT ")\n", n_positional, n_keyword, star_flags);
2143 
2144  // TODO: in viper mode, call special runtime routine with type info for args,
2145  // and wanted type info for return, to remove need for boxing/unboxing
2146 
2147  emit_native_pre(emit);
2148  vtype_kind_t vtype_fun = peek_vtype(emit, n_positional + 2 * n_keyword);
2149  if (vtype_fun == VTYPE_BUILTIN_CAST) {
2150  // casting operator
2151  assert(n_positional == 1 && n_keyword == 0);
2152  assert(!star_flags);
2153  DEBUG_printf(" cast to %d\n", vtype_fun);
2154  vtype_kind_t vtype_cast = peek_stack(emit, 1)->data.u_imm;
2155  switch (peek_vtype(emit, 0)) {
2156  case VTYPE_PYOBJ: {
2157  vtype_kind_t vtype;
2158  emit_pre_pop_reg(emit, &vtype, REG_ARG_1);
2159  emit_pre_pop_discard(emit);
2160  emit_call_with_imm_arg(emit, MP_F_CONVERT_OBJ_TO_NATIVE, vtype_cast, REG_ARG_2); // arg2 = type
2161  emit_post_push_reg(emit, vtype_cast, REG_RET);
2162  break;
2163  }
2164  case VTYPE_BOOL:
2165  case VTYPE_INT:
2166  case VTYPE_UINT:
2167  case VTYPE_PTR:
2168  case VTYPE_PTR8:
2169  case VTYPE_PTR16:
2170  case VTYPE_PTR32:
2171  case VTYPE_PTR_NONE:
2172  emit_fold_stack_top(emit, REG_ARG_1);
2173  emit_post_top_set_vtype(emit, vtype_cast);
2174  break;
2175  default:
2176  // this can happen when casting a cast: int(int)
2177  mp_raise_NotImplementedError("casting");
2178  }
2179  } else {
2180  assert(vtype_fun == VTYPE_PYOBJ);
2181  if (star_flags) {
2182  emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, n_positional + 2 * n_keyword + 3); // pointer to args
2183  emit_call_with_2_imm_args(emit, MP_F_CALL_METHOD_N_KW_VAR, 0, REG_ARG_1, n_positional | (n_keyword << 8), REG_ARG_2);
2184  emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
2185  } else {
2186  if (n_positional != 0 || n_keyword != 0) {
2187  emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, n_positional + 2 * n_keyword); // pointer to args
2188  }
2189  emit_pre_pop_reg(emit, &vtype_fun, REG_ARG_1); // the function
2190  emit_call_with_imm_arg(emit, MP_F_NATIVE_CALL_FUNCTION_N_KW, n_positional | (n_keyword << 8), REG_ARG_2);
2191  emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
2192  }
2193  }
2194 }
2195 
2196 STATIC void emit_native_call_method(emit_t *emit, mp_uint_t n_positional, mp_uint_t n_keyword, mp_uint_t star_flags) {
2197  if (star_flags) {
2198  emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, n_positional + 2 * n_keyword + 4); // pointer to args
2199  emit_call_with_2_imm_args(emit, MP_F_CALL_METHOD_N_KW_VAR, 1, REG_ARG_1, n_positional | (n_keyword << 8), REG_ARG_2);
2200  emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
2201  } else {
2202  emit_native_pre(emit);
2203  emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, 2 + n_positional + 2 * n_keyword); // pointer to items, including meth and self
2204  emit_call_with_2_imm_args(emit, MP_F_CALL_METHOD_N_KW, n_positional, REG_ARG_1, n_keyword, REG_ARG_2);
2205  emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
2206  }
2207 }
2208 
2209 STATIC void emit_native_return_value(emit_t *emit) {
2210  DEBUG_printf("return_value\n");
2211  if (emit->do_viper_types) {
2212  if (peek_vtype(emit, 0) == VTYPE_PTR_NONE) {
2213  emit_pre_pop_discard(emit);
2214  if (emit->return_vtype == VTYPE_PYOBJ) {
2215  ASM_MOV_IMM_TO_REG(emit->as, (mp_uint_t)mp_const_none, REG_RET);
2216  } else {
2217  ASM_MOV_IMM_TO_REG(emit->as, 0, REG_RET);
2218  }
2219  } else {
2220  vtype_kind_t vtype;
2221  emit_pre_pop_reg(emit, &vtype, REG_RET);
2222  if (vtype != emit->return_vtype) {
2223  EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
2224  "return expected '%q' but got '%q'",
2225  vtype_to_qstr(emit->return_vtype), vtype_to_qstr(vtype));
2226  }
2227  }
2228  } else {
2229  vtype_kind_t vtype;
2230  emit_pre_pop_reg(emit, &vtype, REG_RET);
2231  assert(vtype == VTYPE_PYOBJ);
2232  }
2233  emit->last_emit_was_return_value = true;
2234  ASM_EXIT(emit->as);
2235 }
2236 
2237 STATIC void emit_native_raise_varargs(emit_t *emit, mp_uint_t n_args) {
2238  assert(n_args == 1);
2239  vtype_kind_t vtype_exc;
2240  emit_pre_pop_reg(emit, &vtype_exc, REG_ARG_1); // arg1 = object to raise
2241  if (vtype_exc != VTYPE_PYOBJ) {
2242  EMIT_NATIVE_VIPER_TYPE_ERROR(emit, "must raise an object");
2243  }
2244  // TODO probably make this 1 call to the runtime (which could even call convert, native_raise(obj, type))
2245  emit_call(emit, MP_F_NATIVE_RAISE);
2246 }
2247 
2248 STATIC void emit_native_yield_value(emit_t *emit) {
2249  // not supported (for now)
2250  (void)emit;
2251  mp_raise_NotImplementedError("native yield");
2252 }
2253 STATIC void emit_native_yield_from(emit_t *emit) {
2254  // not supported (for now)
2255  (void)emit;
2256  mp_raise_NotImplementedError("native yield from");
2257 }
2258 
2259 STATIC void emit_native_start_except_handler(emit_t *emit) {
2260  // This instruction follows an nlr_pop, so the stack counter is back to zero, when really
2261  // it should be up by a whole nlr_buf_t. We then want to pop the nlr_buf_t here, but save
2262  // the first 2 elements, so we can get the thrown value.
2263  adjust_stack(emit, 1);
2264  vtype_kind_t vtype_nlr;
2265  emit_pre_pop_reg(emit, &vtype_nlr, REG_ARG_1); // get the thrown value
2266  emit_pre_pop_discard(emit); // discard the linked-list pointer in the nlr_buf
2267  emit_post_push_reg_reg_reg(emit, VTYPE_PYOBJ, REG_ARG_1, VTYPE_PYOBJ, REG_ARG_1, VTYPE_PYOBJ, REG_ARG_1); // push the 3 exception items
2268 }
2269 
2270 STATIC void emit_native_end_except_handler(emit_t *emit) {
2271  adjust_stack(emit, -1);
2272 }
2273 
2274 const emit_method_table_t EXPORT_FUN(method_table) = {
2275  emit_native_set_native_type,
2276  emit_native_start_pass,
2277  emit_native_end_pass,
2278  emit_native_last_emit_was_return_value,
2279  emit_native_adjust_stack_size,
2280  emit_native_set_source_line,
2281 
2282  {
2283  emit_native_load_fast,
2284  emit_native_load_deref,
2285  emit_native_load_name,
2286  emit_native_load_global,
2287  },
2288  {
2289  emit_native_store_fast,
2290  emit_native_store_deref,
2291  emit_native_store_name,
2292  emit_native_store_global,
2293  },
2294  {
2295  emit_native_delete_fast,
2296  emit_native_delete_deref,
2297  emit_native_delete_name,
2298  emit_native_delete_global,
2299  },
2300 
2301  emit_native_label_assign,
2302  emit_native_import_name,
2303  emit_native_import_from,
2304  emit_native_import_star,
2305  emit_native_load_const_tok,
2306  emit_native_load_const_small_int,
2307  emit_native_load_const_str,
2308  emit_native_load_const_obj,
2309  emit_native_load_null,
2310  emit_native_load_attr,
2311  emit_native_load_method,
2312  emit_native_load_build_class,
2313  emit_native_load_subscr,
2314  emit_native_store_attr,
2315  emit_native_store_subscr,
2316  emit_native_delete_attr,
2317  emit_native_delete_subscr,
2318  emit_native_dup_top,
2319  emit_native_dup_top_two,
2320  emit_native_pop_top,
2321  emit_native_rot_two,
2322  emit_native_rot_three,
2323  emit_native_jump,
2324  emit_native_pop_jump_if,
2325  emit_native_jump_if_or_pop,
2326  emit_native_break_loop,
2327  emit_native_continue_loop,
2328  emit_native_setup_with,
2329  emit_native_with_cleanup,
2330  emit_native_setup_except,
2331  emit_native_setup_finally,
2332  emit_native_end_finally,
2333  emit_native_get_iter,
2334  emit_native_for_iter,
2335  emit_native_for_iter_end,
2336  emit_native_pop_block,
2337  emit_native_pop_except,
2338  emit_native_unary_op,
2339  emit_native_binary_op,
2340  emit_native_build_tuple,
2341  emit_native_build_list,
2342  emit_native_build_map,
2343  emit_native_store_map,
2344  #if MICROPY_PY_BUILTINS_SET
2345  emit_native_build_set,
2346  #endif
2347  #if MICROPY_PY_BUILTINS_SLICE
2348  emit_native_build_slice,
2349  #endif
2350  emit_native_store_comp,
2351  emit_native_unpack_sequence,
2352  emit_native_unpack_ex,
2353  emit_native_make_function,
2354  emit_native_make_closure,
2355  emit_native_call_function,
2356  emit_native_call_method,
2357  emit_native_return_value,
2358  emit_native_raise_varargs,
2359  emit_native_yield_value,
2360  emit_native_yield_from,
2361 
2362  emit_native_start_except_handler,
2363  emit_native_end_except_handler,
2364 };
2365 
2366 #endif
uint16_t num_locals
Definition: scope.h:82
#define MP_ASM_PASS_COMPUTE
Definition: asmbase.h:32
#define ASM_XTENSA_CC_GE
Definition: asmxtensa.h:69
void asm_arm_cmp_reg_reg(asm_arm_t *as, uint rd, uint rn)
void asm_x64_cmp_r64_with_r64(asm_x64_t *as, int src_r64_a, int src_r64_b)
#define ASM_THUMB_OP_ITE_GT
Definition: asmthumb.h:86
intptr_t mp_int_t
Definition: mpconfigport.h:73
#define MP_NATIVE_TYPE_PTR
Definition: runtime0.h:40
void asm_arm_mov_reg_i32(asm_arm_t *as, uint rd, int imm)
uintptr_t mp_uint_t
Definition: mpconfigport.h:74
#define INT_FMT
Definition: mpconfigport.h:72
const char * qstr_str(qstr q)
Definition: qstr.c:278
uint16_t id_info_len
Definition: scope.h:86
qstr qst
Definition: scope.h:52
#define ASM_THUMB_OP_ITE_GE
Definition: asmthumb.h:85
#define assert(e)
Definition: assert.h:9
#define MP_SCOPE_FLAG_VARKEYWORDS
Definition: runtime0.h:31
#define mp_const_none
Definition: obj.h:614
NORETURN void mp_raise_NotImplementedError(const char *msg)
Definition: runtime.c:1468
def data
Definition: i18n.py:176
void mp_asm_base_label_assign(mp_asm_base_t *as, size_t label)
#define m_del(type, ptr, num)
Definition: misc.h:77
#define ASM_XTENSA_CC_EQ
Definition: asmxtensa.h:62
#define ASM_XTENSA_CC_NE
Definition: asmxtensa.h:68
#define ASM_X64_CC_JNE
Definition: asmx64.h:67
#define ASM_X64_CC_JE
Definition: asmx64.h:65
void asm_x86_setcc_r8(asm_x86_t *as, mp_uint_t jcc_type, int dest_r8)
#define MP_OBJ_SENTINEL
Definition: obj.h:75
unsigned int uintptr_t
Definition: stdint.h:14
#define MP_SCOPE_FLAG_VARARGS
Definition: runtime0.h:30
#define MP_OBJ_NEW_QSTR(qst)
Definition: obj.h:92
unsigned short uint16_t
Definition: stdint.h:5
#define offsetof(type, field)
Definition: stddef.h:9
LIBA_BEGIN_DECLS void free(void *ptr)
Definition: malloc.c:33
#define MP_NATIVE_TYPE_OBJ
Definition: runtime0.h:36
#define mp_const_true
Definition: obj.h:616
void asm_x64_setcc_r8(asm_x64_t *as, int jcc_type, int dest_r8)
unsigned char uint8_t
Definition: stdint.h:4
mp_unary_op_t
Definition: runtime0.h:45
void mp_asm_base_init(mp_asm_base_t *as, size_t max_num_labels)
uint8_t scope_flags
Definition: scope.h:77
#define ASM_X64_CC_JLE
Definition: asmx64.h:70
#define MP_ASM_PASS_EMIT
Definition: asmbase.h:33
#define ASM_X64_CC_JG
Definition: asmx64.h:71
#define ASM_THUMB_REG_R7
Definition: asmthumb.h:39
#define STATIC
Definition: mpconfig.h:1178
void asm_x86_cmp_r32_with_r32(asm_x86_t *as, int src_r32_a, int src_r32_b)
#define MP_EMIT_BREAK_FROM_FOR
Definition: emit.h:52
enum _mp_token_kind_t mp_token_kind_t
void mp_asm_base_align(mp_asm_base_t *as, unsigned int align)
#define ASM_ARM_REG_R4
Definition: asmarm.h:37
void asm_arm_setcc_reg(asm_arm_t *as, uint rd, uint cond)
#define MP_NATIVE_TYPE_PTR8
Definition: runtime0.h:41
void asm_thumb_mov_reg_i32(asm_thumb_t *as, uint reg_dest, mp_uint_t i32_src)
void mp_asm_base_data(mp_asm_base_t *as, unsigned int bytesize, uintptr_t val)
#define MP_OBJ_NEW_SMALL_INT(small_int)
Definition: obj.h:87
#define MP_EMIT_NATIVE_TYPE_RETURN
Definition: emit.h:55
id_info_t * id_info
Definition: scope.h:87
void mp_emit_glue_assign_native(mp_raw_code_t *rc, mp_raw_code_kind_t kind, void *fun_data, mp_uint_t fun_len, const mp_uint_t *const_table, mp_uint_t n_pos_args, mp_uint_t scope_flags, mp_uint_t type_sig)
#define m_del_obj(type, ptr)
Definition: misc.h:80
void asm_x86_mov_arg_to_r32(asm_x86_t *as, int src_arg_num, int dest_r32)
#define ASM_X86_CC_JG
Definition: asmx86.h:74
void mp_asm_base_start_pass(mp_asm_base_t *as, int pass)
#define MP_NATIVE_TYPE_PTR16
Definition: runtime0.h:42
#define ASM_THUMB_OP_ITE_EQ
Definition: asmthumb.h:80
#define MP_OBJ_ITER_BUF_NSLOTS
Definition: obj.h:428
#define NULL
Definition: stddef.h:4
void asm_xtensa_setcc_reg_reg_reg(asm_xtensa_t *as, uint cond, uint reg_dest, uint reg_src1, uint reg_src2)
#define MP_OBJ_NULL
Definition: obj.h:73
const byte mp_binary_op_method_name[MP_BINARY_OP_NUM_RUNTIME]
Definition: objtype.c:416
#define ASM_ARM_CC_GE
Definition: asmarm.h:63
mp_fun_kind_t
Definition: runtime0.h:141
#define ASM_XTENSA_CC_LT
Definition: asmxtensa.h:63
void asm_x64_xor_r64_r64(asm_x64_t *as, int dest_r64, int src_r64)
mp_raw_code_t * raw_code
Definition: scope.h:76
size_t qstr
Definition: qstr.h:48
#define MP_EMIT_NATIVE_TYPE_ENABLE
Definition: emit.h:54
#define ASM_ARM_CC_LE
Definition: asmarm.h:66
#define UINT_FMT
Definition: mpconfigport.h:71
mp_binary_op_t
Definition: runtime0.h:67
#define MP_NATIVE_TYPE_UINT
Definition: runtime0.h:39
uint16_t num_pos_args
Definition: scope.h:79
#define ASM_X86_CC_JE
Definition: asmx86.h:68
#define ASM_ARM_CC_LT
Definition: asmarm.h:64
void asm_arm_str_reg_reg_reg(asm_arm_t *as, uint rd, uint rm, uint rn)
#define ASM_ARM_REG_R7
Definition: asmarm.h:40
#define MP_NATIVE_TYPE_BOOL
Definition: runtime0.h:37
tok(NEWLINE)
#define ASM_X86_REG_ECX
Definition: asmx86.h:47
#define ASM_X86_CC_JLE
Definition: asmx86.h:73
scope_kind_t
Definition: scope.h:58
unsigned char byte
Definition: misc.h:37
#define ASM_X86_CC_JGE
Definition: asmx86.h:72
void asm_thumb_op16(asm_thumb_t *as, uint op)
#define m_new0(type, num)
Definition: misc.h:59
#define m_renew(type, ptr, old_num, new_num)
Definition: misc.h:75
Definition: scope.h:69
#define DEBUG_printf(...)
Definition: emitnative.c:56
uint8_t kind
Definition: scope.h:47
const struct _mp_obj_singleton_t mp_const_ellipsis_obj
Definition: objsingleton.c:52
void asm_arm_bl_ind(asm_arm_t *as, void *fun_ptr, uint fun_id, uint reg_temp)
#define ASM_X64_REG_RCX
Definition: asmx64.h:46
void asm_arm_strb_reg_reg_reg(asm_arm_t *as, uint rd, uint rm, uint rn)
#define ASM_X86_CC_JNE
Definition: asmx86.h:70
#define ASM_X86_CC_JL
Definition: asmx86.h:71
Definition: nlr.h:39
#define ASM_THUMB_REG_R4
Definition: asmthumb.h:36
uint8_t flags
Definition: scope.h:48
#define ASM_ARM_CC_NE
Definition: asmarm.h:54
#define MP_OBJ_STOP_ITERATION
Definition: obj.h:74
void asm_x86_xor_r32_r32(asm_x86_t *as, int dest_r32, int src_r32)
const byte mp_unary_op_method_name[MP_UNARY_OP_NUM_RUNTIME]
Definition: objtype.c:337
uint64_t mp_obj_t
Definition: obj.h:39
void *const mp_fun_table[MP_F_NUMBER_OF]
void asm_arm_strh_reg_reg_reg(asm_arm_t *as, uint rd, uint rm, uint rn)
uint16_t stack_size
Definition: scope.h:83
#define ASM_ARM_CC_EQ
Definition: asmarm.h:53
#define ASM_ARM_CC_GT
Definition: asmarm.h:65
void asm_thumb_bl_ind(asm_thumb_t *as, void *fun_ptr, uint fun_id, uint reg_temp)
#define ASM_X64_CC_JL
Definition: asmx64.h:68
uint16_t local_num
Definition: scope.h:51
void mp_asm_base_deinit(mp_asm_base_t *as, bool free_code)
#define ASM_X64_CC_JGE
Definition: asmx64.h:69
#define MP_NATIVE_TYPE_INT
Definition: runtime0.h:38
#define MP_NATIVE_TYPE_PTR32
Definition: runtime0.h:43
struct _emit_t emit_t
Definition: emit.h:58
pass_kind_t
Definition: emit.h:42
#define m_new(type, num)
Definition: misc.h:57
#define mp_const_false
Definition: obj.h:615
unsigned int uint
Definition: misc.h:38
void asm_x86_mov_r32_to_local(asm_x86_t *as, int src_r32, int dest_local_num)