52 #if MICROPY_DEBUG_VERBOSE // print debugging info 53 #define DEBUG_PRINT (1) 54 #define DEBUG_printf DEBUG_printf 55 #else // don't print debugging info 56 #define DEBUG_printf(...) (void)0 60 #if (MICROPY_EMIT_X64 && N_X64) \ 61 || (MICROPY_EMIT_X86 && N_X86) \ 62 || (MICROPY_EMIT_THUMB && N_THUMB) \ 63 || (MICROPY_EMIT_ARM && N_ARM) \ 64 || (MICROPY_EMIT_XTENSA && N_XTENSA) \ 67 #define GENERIC_ASM_API (1) 73 #define EXPORT_FUN(name) emit_native_x64_##name 100 #if MICROPY_PY_BUILTINS_SET 101 [MP_F_BUILD_SET] = 2,
102 [MP_F_STORE_SET] = 2,
116 #if MICROPY_PY_BUILTINS_SLICE 117 [MP_F_NEW_SLICE] = 3,
131 #define EXPORT_FUN(name) emit_native_x86_##name 137 #define EXPORT_FUN(name) emit_native_thumb_##name 143 #define EXPORT_FUN(name) emit_native_arm_##name 149 #define EXPORT_FUN(name) emit_native_xtensa_##name 153 #error unknown native emitter 157 #define EMIT_NATIVE_VIPER_TYPE_ERROR(emit, ...) do { \ 158 *emit->error_slot = mp_obj_new_exception_msg_varg(&mp_type_ViperTypeError, __VA_ARGS__); \ 185 STATIC qstr vtype_to_qstr(vtype_kind_t vtype) {
187 case VTYPE_PYOBJ:
return MP_QSTR_object;
188 case VTYPE_BOOL:
return MP_QSTR_bool;
189 case VTYPE_INT:
return MP_QSTR_int;
190 case VTYPE_UINT:
return MP_QSTR_uint;
191 case VTYPE_PTR:
return MP_QSTR_ptr;
192 case VTYPE_PTR8:
return MP_QSTR_ptr8;
193 case VTYPE_PTR16:
return MP_QSTR_ptr16;
194 case VTYPE_PTR32:
return MP_QSTR_ptr32;
195 case VTYPE_PTR_NONE:
default:
return MP_QSTR_None;
199 typedef struct _stack_info_t {
201 stack_info_kind_t kind;
214 vtype_kind_t return_vtype;
217 vtype_kind_t *local_vtype;
220 stack_info_t *stack_info;
221 vtype_kind_t saved_stack_vtype;
224 int const_table_offset;
229 bool last_emit_was_return_value;
238 emit->error_slot = error_slot;
239 emit->as =
m_new0(ASM_T, 1);
247 m_del(vtype_kind_t, emit->local_vtype, emit->local_vtype_alloc);
248 m_del(stack_info_t, emit->stack_info, emit->stack_info_alloc);
255 emit->do_viper_types = arg1;
261 case MP_QSTR_object: type = VTYPE_PYOBJ;
break;
262 case MP_QSTR_bool: type = VTYPE_BOOL;
break;
263 case MP_QSTR_int: type = VTYPE_INT;
break;
264 case MP_QSTR_uint: type = VTYPE_UINT;
break;
265 case MP_QSTR_ptr: type = VTYPE_PTR;
break;
266 case MP_QSTR_ptr8: type = VTYPE_PTR8;
break;
267 case MP_QSTR_ptr16: type = VTYPE_PTR16;
break;
268 case MP_QSTR_ptr32: type = VTYPE_PTR32;
break;
269 default: EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
"unknown type '%q'", arg2);
return;
272 emit->return_vtype = type;
274 assert(arg1 < emit->local_vtype_alloc);
275 emit->local_vtype[arg1] = type;
282 STATIC void emit_pre_pop_reg(
emit_t *emit, vtype_kind_t *vtype,
int reg_dest);
283 STATIC void emit_post_push_reg(
emit_t *emit, vtype_kind_t vtype,
int reg);
287 #define STATE_START (sizeof(mp_code_state_t) / sizeof(mp_uint_t)) 290 DEBUG_printf(
"start_pass(pass=%u, scope=%p)\n", pass, scope);
293 emit->stack_start = 0;
294 emit->stack_size = 0;
295 emit->last_emit_was_return_value =
false;
299 if (emit->local_vtype_alloc < scope->
num_locals) {
300 emit->local_vtype =
m_renew(vtype_kind_t, emit->local_vtype, emit->local_vtype_alloc, scope->
num_locals);
307 if (emit->stack_info ==
NULL) {
308 emit->stack_info_alloc = scope->
stack_size + 200;
309 emit->stack_info =
m_new(stack_info_t, emit->stack_info_alloc);
313 emit->return_vtype = VTYPE_PYOBJ;
316 mp_uint_t num_args = emit->scope->num_pos_args + emit->scope->num_kwonly_args;
323 for (
mp_uint_t i = 0; i < num_args; i++) {
324 emit->local_vtype[i] = VTYPE_PYOBJ;
328 for (
mp_uint_t i = num_args; i < emit->local_vtype_alloc; i++) {
329 emit->local_vtype[i] = VTYPE_UNBOUND;
333 for (
mp_uint_t i = 0; i < emit->stack_info_alloc; i++) {
334 emit->stack_info[i].kind = STACK_VALUE;
335 emit->stack_info[i].vtype = VTYPE_UNBOUND;
342 if (emit->do_viper_types) {
346 EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
"Viper functions don't currently support more than 4 arguments");
353 num_locals = scope->
num_locals - REG_LOCAL_NUM;
354 if (num_locals < 0) {
357 emit->stack_start = num_locals;
360 ASM_ENTRY(emit->as, num_locals);
385 ASM_MOV_REG_REG(emit->as, REG_LOCAL_1, REG_ARG_1);
387 ASM_MOV_REG_REG(emit->as, REG_LOCAL_2, REG_ARG_2);
389 ASM_MOV_REG_REG(emit->as, REG_LOCAL_3, REG_ARG_3);
392 ASM_MOV_REG_TO_LOCAL(emit->as, REG_ARG_4, i - REG_LOCAL_NUM);
402 ASM_ENTRY(emit->as, STATE_START + emit->n_state);
428 ASM_MOV_LOCAL_ADDR_TO_REG(emit->as, 0, REG_ARG_1);
441 ASM_MOV_LOCAL_TO_REG(emit->as, STATE_START + emit->n_state - 1 - 0, REG_LOCAL_1);
443 ASM_MOV_LOCAL_TO_REG(emit->as, STATE_START + emit->n_state - 1 - 1, REG_LOCAL_2);
445 ASM_MOV_LOCAL_TO_REG(emit->as, STATE_START + emit->n_state - 1 - 2, REG_LOCAL_3);
454 emit->local_vtype[
id->local_num] = VTYPE_PYOBJ;
462 if (!emit->last_emit_was_return_value) {
466 if (!emit->do_viper_types) {
467 emit->prelude_offset = mp_asm_base_get_code_pos(&emit->as->base);
477 #if MICROPY_PERSISTENT_CODE 488 for (
int i = 0; i < emit->scope->id_info_len; i++) {
489 id_info_t *
id = &emit->scope->id_info[i];
498 emit->const_table_offset = mp_asm_base_get_code_pos(&emit->as->base);
502 for (
int i = 0; i < emit->scope->num_pos_args + emit->scope->num_kwonly_args; i++) {
503 qstr qst = MP_QSTR__star_;
504 for (
int j = 0; j < emit->scope->id_info_len; ++j) {
505 id_info_t *
id = &emit->scope->id_info[j];
516 ASM_END_PASS(emit->as);
519 assert(emit->stack_size == 0);
522 void *f = mp_asm_base_get_code(&emit->as->base);
523 mp_uint_t f_len = mp_asm_base_get_code_size(&emit->as->base);
527 mp_uint_t type_sig = emit->return_vtype & 0xf;
528 for (
mp_uint_t i = 0; i < emit->scope->num_pos_args; i++) {
529 type_sig |= (emit->local_vtype[i] & 0xf) << (i * 4 + 4);
535 emit->scope->num_pos_args, emit->scope->scope_flags, type_sig);
539 STATIC bool emit_native_last_emit_was_return_value(
emit_t *emit) {
540 return emit->last_emit_was_return_value;
545 emit->stack_size += stack_size_delta;
546 if (emit->pass >
MP_PASS_SCOPE && emit->stack_size > emit->scope->stack_size) {
547 emit->scope->stack_size = emit->stack_size;
550 DEBUG_printf(
" adjust_stack; stack_size=%d+%d; stack now:", emit->stack_size - stack_size_delta, stack_size_delta);
551 for (
int i = 0; i < emit->stack_size; i++) {
552 stack_info_t *si = &emit->stack_info[i];
553 DEBUG_printf(
" (v=%d k=%d %d)", si->vtype, si->kind, si->data.u_reg);
566 for (
mp_int_t i = 0; i < delta; i++) {
567 stack_info_t *si = &emit->stack_info[emit->stack_size + i];
568 si->kind = STACK_VALUE;
572 si->vtype = emit->saved_stack_vtype;
574 si->vtype = VTYPE_PYOBJ;
577 adjust_stack(emit, delta);
587 emit->last_emit_was_return_value =
false;
592 return &emit->stack_info[emit->stack_size - 1 - depth];
597 return peek_stack(emit, depth)->vtype;
602 STATIC void need_reg_single(
emit_t *emit,
int reg_needed,
int skip_stack_pos) {
603 skip_stack_pos = emit->stack_size - skip_stack_pos;
604 for (
int i = 0; i < emit->stack_size; i++) {
605 if (i != skip_stack_pos) {
606 stack_info_t *si = &emit->stack_info[i];
607 if (si->kind == STACK_REG && si->data.u_reg == reg_needed) {
608 si->kind = STACK_VALUE;
609 ASM_MOV_REG_TO_LOCAL(emit->as, si->data.u_reg, emit->stack_start + i);
616 for (
int i = 0; i < emit->stack_size; i++) {
617 stack_info_t *si = &emit->stack_info[i];
618 if (si->kind == STACK_REG) {
619 si->kind = STACK_VALUE;
620 ASM_MOV_REG_TO_LOCAL(emit->as, si->data.u_reg, emit->stack_start + i);
626 DEBUG_printf(
" need_stack_settled; stack_size=%d\n", emit->stack_size);
627 for (
int i = 0; i < emit->stack_size; i++) {
628 stack_info_t *si = &emit->stack_info[i];
629 if (si->kind == STACK_REG) {
630 DEBUG_printf(
" reg(%u) to local(%u)\n", si->data.u_reg, emit->stack_start + i);
631 si->kind = STACK_VALUE;
632 ASM_MOV_REG_TO_LOCAL(emit->as, si->data.u_reg, emit->stack_start + i);
635 for (
int i = 0; i < emit->stack_size; i++) {
636 stack_info_t *si = &emit->stack_info[i];
637 if (si->kind == STACK_IMM) {
639 si->kind = STACK_VALUE;
640 ASM_MOV_IMM_TO_LOCAL_USING(emit->as, si->data.u_imm, emit->stack_start + i, REG_TEMP0);
646 STATIC void emit_access_stack(
emit_t *emit,
int pos, vtype_kind_t *vtype,
int reg_dest) {
647 need_reg_single(emit, reg_dest, pos);
648 stack_info_t *si = &emit->stack_info[emit->stack_size - pos];
652 ASM_MOV_LOCAL_TO_REG(emit->as, emit->stack_start + emit->stack_size - pos, reg_dest);
656 if (si->data.u_reg != reg_dest) {
657 ASM_MOV_REG_REG(emit->as, reg_dest, si->data.u_reg);
662 ASM_MOV_IMM_TO_REG(emit->as, si->data.u_imm, reg_dest);
669 STATIC void emit_fold_stack_top(
emit_t *emit,
int reg_dest) {
670 stack_info_t *si = &emit->stack_info[emit->stack_size - 2];
672 if (si->kind == STACK_VALUE) {
674 ASM_MOV_LOCAL_TO_REG(emit->as, emit->stack_start + emit->stack_size - 1, reg_dest);
675 si->kind = STACK_REG;
676 si->data.u_reg = reg_dest;
678 adjust_stack(emit, -1);
683 STATIC void emit_pre_pop_reg_flexible(
emit_t *emit, vtype_kind_t *vtype,
int *reg_dest,
int not_r1,
int not_r2) {
684 emit->last_emit_was_return_value =
false;
685 stack_info_t *si = peek_stack(emit, 0);
686 if (si->kind == STACK_REG && si->data.u_reg != not_r1 && si->data.u_reg != not_r2) {
688 *reg_dest = si->data.u_reg;
689 need_reg_single(emit, *reg_dest, 1);
691 emit_access_stack(emit, 1, vtype, *reg_dest);
693 adjust_stack(emit, -1);
697 emit->last_emit_was_return_value =
false;
698 adjust_stack(emit, -1);
701 STATIC void emit_pre_pop_reg(
emit_t *emit, vtype_kind_t *vtype,
int reg_dest) {
702 emit->last_emit_was_return_value =
false;
703 emit_access_stack(emit, 1, vtype, reg_dest);
704 adjust_stack(emit, -1);
707 STATIC void emit_pre_pop_reg_reg(
emit_t *emit, vtype_kind_t *vtypea,
int rega, vtype_kind_t *vtypeb,
int regb) {
708 emit_pre_pop_reg(emit, vtypea, rega);
709 emit_pre_pop_reg(emit, vtypeb, regb);
712 STATIC void emit_pre_pop_reg_reg_reg(
emit_t *emit, vtype_kind_t *vtypea,
int rega, vtype_kind_t *vtypeb,
int regb, vtype_kind_t *vtypec,
int regc) {
713 emit_pre_pop_reg(emit, vtypea, rega);
714 emit_pre_pop_reg(emit, vtypeb, regb);
715 emit_pre_pop_reg(emit, vtypec, regc);
722 STATIC void emit_post_top_set_vtype(
emit_t *emit, vtype_kind_t new_vtype) {
723 stack_info_t *si = &emit->stack_info[emit->stack_size - 1];
724 si->vtype = new_vtype;
727 STATIC void emit_post_push_reg(
emit_t *emit, vtype_kind_t vtype,
int reg) {
728 stack_info_t *si = &emit->stack_info[emit->stack_size];
730 si->kind = STACK_REG;
731 si->data.u_reg = reg;
732 adjust_stack(emit, 1);
736 stack_info_t *si = &emit->stack_info[emit->stack_size];
738 si->kind = STACK_IMM;
739 si->data.u_imm = imm;
740 adjust_stack(emit, 1);
743 STATIC void emit_post_push_reg_reg(
emit_t *emit, vtype_kind_t vtypea,
int rega, vtype_kind_t vtypeb,
int regb) {
744 emit_post_push_reg(emit, vtypea, rega);
745 emit_post_push_reg(emit, vtypeb, regb);
748 STATIC void emit_post_push_reg_reg_reg(
emit_t *emit, vtype_kind_t vtypea,
int rega, vtype_kind_t vtypeb,
int regb, vtype_kind_t vtypec,
int regc) {
749 emit_post_push_reg(emit, vtypea, rega);
750 emit_post_push_reg(emit, vtypeb, regb);
751 emit_post_push_reg(emit, vtypec, regc);
754 STATIC void emit_post_push_reg_reg_reg_reg(
emit_t *emit, vtype_kind_t vtypea,
int rega, vtype_kind_t vtypeb,
int regb, vtype_kind_t vtypec,
int regc, vtype_kind_t vtyped,
int regd) {
755 emit_post_push_reg(emit, vtypea, rega);
756 emit_post_push_reg(emit, vtypeb, regb);
757 emit_post_push_reg(emit, vtypec, regc);
758 emit_post_push_reg(emit, vtyped, regd);
763 ASM_CALL_IND(emit->as,
mp_fun_table[fun_kind], fun_kind);
768 ASM_MOV_IMM_TO_REG(emit->as, arg_val, arg_reg);
769 ASM_CALL_IND(emit->as,
mp_fun_table[fun_kind], fun_kind);
775 ASM_MOV_ALIGNED_IMM_TO_REG(emit->as, arg_val, arg_reg);
776 ASM_CALL_IND(emit->as,
mp_fun_table[fun_kind], fun_kind);
781 ASM_MOV_IMM_TO_REG(emit->as, arg_val1, arg_reg1);
782 ASM_MOV_IMM_TO_REG(emit->as, arg_val2, arg_reg2);
783 ASM_CALL_IND(emit->as,
mp_fun_table[fun_kind], fun_kind);
789 ASM_MOV_ALIGNED_IMM_TO_REG(emit->as, arg_val1, arg_reg1);
790 ASM_MOV_IMM_TO_REG(emit->as, arg_val2, arg_reg2);
791 ASM_MOV_IMM_TO_REG(emit->as, arg_val3, arg_reg3);
792 ASM_CALL_IND(emit->as,
mp_fun_table[fun_kind], fun_kind);
804 stack_info_t *si = &emit->stack_info[emit->stack_size - 1 - i];
807 if (si->kind == STACK_IMM) {
808 si->kind = STACK_VALUE;
811 ASM_MOV_IMM_TO_LOCAL_USING(emit->as, si->data.u_imm, emit->stack_start + emit->stack_size - 1 - i, reg_dest);
814 if (si->data.u_imm == 0) {
815 ASM_MOV_IMM_TO_LOCAL_USING(emit->as, (
mp_uint_t)
mp_const_false, emit->stack_start + emit->stack_size - 1 - i, reg_dest);
817 ASM_MOV_IMM_TO_LOCAL_USING(emit->as, (
mp_uint_t)
mp_const_true, emit->stack_start + emit->stack_size - 1 - i, reg_dest);
819 si->vtype = VTYPE_PYOBJ;
823 ASM_MOV_IMM_TO_LOCAL_USING(emit->as, (
uintptr_t)
MP_OBJ_NEW_SMALL_INT(si->data.u_imm), emit->stack_start + emit->stack_size - 1 - i, reg_dest);
824 si->vtype = VTYPE_PYOBJ;
833 assert(si->kind == STACK_VALUE);
838 stack_info_t *si = &emit->stack_info[emit->stack_size - 1 - i];
839 if (si->vtype != VTYPE_PYOBJ) {
840 mp_uint_t local_num = emit->stack_start + emit->stack_size - 1 - i;
841 ASM_MOV_LOCAL_TO_REG(emit->as, local_num, REG_ARG_1);
843 ASM_MOV_REG_TO_LOCAL(emit->as, REG_RET, local_num);
844 si->vtype = VTYPE_PYOBJ;
850 adjust_stack(emit, -n_pop);
851 ASM_MOV_LOCAL_ADDR_TO_REG(emit->as, emit->stack_start + emit->stack_size, reg_dest);
858 emit->stack_info[emit->stack_size + i].kind = STACK_VALUE;
859 emit->stack_info[emit->stack_size + i].vtype = VTYPE_PYOBJ;
861 ASM_MOV_LOCAL_ADDR_TO_REG(emit->as, emit->stack_start + emit->stack_size, reg_dest);
862 adjust_stack(emit, n_push);
867 emit_native_pre(emit);
869 need_stack_settled(emit);
879 if (emit->do_viper_types) {
881 stack_info_t *top = peek_stack(emit, 0);
882 if (top->vtype == VTYPE_PTR_NONE) {
883 emit_pre_pop_discard(emit);
886 vtype_kind_t vtype_fromlist;
887 emit_pre_pop_reg(emit, &vtype_fromlist, REG_ARG_2);
888 assert(vtype_fromlist == VTYPE_PYOBJ);
892 top = peek_stack(emit, 0);
893 assert(top->vtype == VTYPE_INT && top->kind == STACK_IMM);
895 emit_pre_pop_discard(emit);
898 vtype_kind_t vtype_fromlist;
899 vtype_kind_t vtype_level;
900 emit_pre_pop_reg_reg(emit, &vtype_fromlist, REG_ARG_2, &vtype_level, REG_ARG_3);
901 assert(vtype_fromlist == VTYPE_PYOBJ);
902 assert(vtype_level == VTYPE_PYOBJ);
906 emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
911 emit_native_pre(emit);
912 vtype_kind_t vtype_module;
913 emit_access_stack(emit, 1, &vtype_module, REG_ARG_1);
914 assert(vtype_module == VTYPE_PYOBJ);
916 emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
921 vtype_kind_t vtype_module;
922 emit_pre_pop_reg(emit, &vtype_module, REG_ARG_1);
923 assert(vtype_module == VTYPE_PYOBJ);
930 emit_native_pre(emit);
933 if (emit->do_viper_types) {
953 emit_post_push_imm(emit, vtype, val);
958 emit_native_pre(emit);
959 if (emit->do_viper_types) {
960 emit_post_push_imm(emit, VTYPE_INT, arg);
967 emit_native_pre(emit);
982 emit_native_pre(emit);
983 need_reg_single(emit, REG_RET, 0);
984 ASM_MOV_ALIGNED_IMM_TO_REG(emit->as, (
mp_uint_t)obj, REG_RET);
985 emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
989 emit_native_pre(emit);
990 emit_post_push_imm(emit, VTYPE_PYOBJ, 0);
995 vtype_kind_t vtype = emit->local_vtype[local_num];
996 if (vtype == VTYPE_UNBOUND) {
997 EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
"local '%q' used before type known", qst);
999 emit_native_pre(emit);
1000 if (local_num == 0) {
1001 emit_post_push_reg(emit, vtype, REG_LOCAL_1);
1002 }
else if (local_num == 1) {
1003 emit_post_push_reg(emit, vtype, REG_LOCAL_2);
1004 }
else if (local_num == 2) {
1005 emit_post_push_reg(emit, vtype, REG_LOCAL_3);
1007 need_reg_single(emit, REG_TEMP0, 0);
1008 if (emit->do_viper_types) {
1009 ASM_MOV_LOCAL_TO_REG(emit->as, local_num - REG_LOCAL_NUM, REG_TEMP0);
1011 ASM_MOV_LOCAL_TO_REG(emit->as, STATE_START + emit->n_state - 1 - local_num, REG_TEMP0);
1013 emit_post_push_reg(emit, vtype, REG_TEMP0);
1019 need_reg_single(emit, REG_RET, 0);
1020 emit_native_load_fast(emit, qst, local_num);
1022 int reg_base = REG_RET;
1023 emit_pre_pop_reg_flexible(emit, &vtype, ®_base, -1, -1);
1024 ASM_LOAD_REG_REG_OFFSET(emit->as, REG_RET, reg_base, 1);
1026 emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
1031 emit_native_pre(emit);
1033 emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
1038 emit_native_pre(emit);
1040 if (emit->do_viper_types && qst == MP_QSTR_int) {
1041 emit_post_push_imm(emit, VTYPE_BUILTIN_CAST, VTYPE_INT);
1042 }
else if (emit->do_viper_types && qst == MP_QSTR_uint) {
1043 emit_post_push_imm(emit, VTYPE_BUILTIN_CAST, VTYPE_UINT);
1044 }
else if (emit->do_viper_types && qst == MP_QSTR_ptr) {
1045 emit_post_push_imm(emit, VTYPE_BUILTIN_CAST, VTYPE_PTR);
1046 }
else if (emit->do_viper_types && qst == MP_QSTR_ptr8) {
1047 emit_post_push_imm(emit, VTYPE_BUILTIN_CAST, VTYPE_PTR8);
1048 }
else if (emit->do_viper_types && qst == MP_QSTR_ptr16) {
1049 emit_post_push_imm(emit, VTYPE_BUILTIN_CAST, VTYPE_PTR16);
1050 }
else if (emit->do_viper_types && qst == MP_QSTR_ptr32) {
1051 emit_post_push_imm(emit, VTYPE_BUILTIN_CAST, VTYPE_PTR32);
1054 emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
1063 vtype_kind_t vtype_base;
1064 emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1);
1065 assert(vtype_base == VTYPE_PYOBJ);
1067 emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
1070 STATIC void emit_native_load_method(
emit_t *emit,
qstr qst,
bool is_super) {
1072 emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_2, 3);
1073 emit_get_stack_pointer_to_reg_for_push(emit, REG_ARG_2, 2);
1076 vtype_kind_t vtype_base;
1077 emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1);
1078 assert(vtype_base == VTYPE_PYOBJ);
1079 emit_get_stack_pointer_to_reg_for_push(emit, REG_ARG_3, 2);
1084 STATIC void emit_native_load_build_class(
emit_t *emit) {
1085 emit_native_pre(emit);
1087 emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
1096 vtype_kind_t vtype_base = peek_vtype(emit, 1);
1098 if (vtype_base == VTYPE_PYOBJ) {
1101 vtype_kind_t vtype_index = peek_vtype(emit, 0);
1102 if (vtype_index == VTYPE_PYOBJ) {
1103 emit_pre_pop_reg(emit, &vtype_index, REG_ARG_2);
1105 emit_pre_pop_reg(emit, &vtype_index, REG_ARG_1);
1107 ASM_MOV_REG_REG(emit->as, REG_ARG_2, REG_RET);
1109 emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1);
1111 emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
1117 stack_info_t *top = peek_stack(emit, 0);
1118 if (top->vtype == VTYPE_INT && top->kind == STACK_IMM) {
1120 mp_int_t index_value = top->data.u_imm;
1121 emit_pre_pop_discard(emit);
1122 int reg_base = REG_ARG_1;
1123 int reg_index = REG_ARG_2;
1124 emit_pre_pop_reg_flexible(emit, &vtype_base, ®_base, reg_index, reg_index);
1125 switch (vtype_base) {
1129 if (index_value != 0) {
1132 if (index_value > 0 && index_value < 32) {
1133 asm_thumb_ldrb_rlo_rlo_i5(emit->as, REG_RET, reg_base, index_value);
1137 ASM_MOV_IMM_TO_REG(emit->as, index_value, reg_index);
1138 ASM_ADD_REG_REG(emit->as, reg_index, reg_base);
1139 reg_base = reg_index;
1141 ASM_LOAD8_REG_REG(emit->as, REG_RET, reg_base);
1146 if (index_value != 0) {
1149 if (index_value > 0 && index_value < 32) {
1150 asm_thumb_ldrh_rlo_rlo_i5(emit->as, REG_RET, reg_base, index_value);
1154 ASM_MOV_IMM_TO_REG(emit->as, index_value << 1, reg_index);
1155 ASM_ADD_REG_REG(emit->as, reg_index, reg_base);
1156 reg_base = reg_index;
1158 ASM_LOAD16_REG_REG(emit->as, REG_RET, reg_base);
1163 if (index_value != 0) {
1166 if (index_value > 0 && index_value < 32) {
1167 asm_thumb_ldr_rlo_rlo_i5(emit->as, REG_RET, reg_base, index_value);
1171 ASM_MOV_IMM_TO_REG(emit->as, index_value << 2, reg_index);
1172 ASM_ADD_REG_REG(emit->as, reg_index, reg_base);
1173 reg_base = reg_index;
1175 ASM_LOAD32_REG_REG(emit->as, REG_RET, reg_base);
1179 EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
1180 "can't load from '%q'", vtype_to_qstr(vtype_base));
1184 vtype_kind_t vtype_index;
1185 int reg_index = REG_ARG_2;
1186 emit_pre_pop_reg_flexible(emit, &vtype_index, ®_index, REG_ARG_1, REG_ARG_1);
1187 emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1);
1188 if (vtype_index != VTYPE_INT && vtype_index != VTYPE_UINT) {
1189 EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
1190 "can't load with '%q' index", vtype_to_qstr(vtype_index));
1192 switch (vtype_base) {
1196 ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index);
1197 ASM_LOAD8_REG_REG(emit->as, REG_RET, REG_ARG_1);
1202 ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index);
1203 ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index);
1204 ASM_LOAD16_REG_REG(emit->as, REG_RET, REG_ARG_1);
1209 ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index);
1210 ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index);
1211 ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index);
1212 ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index);
1213 ASM_LOAD32_REG_REG(emit->as, REG_RET, REG_ARG_1);
1217 EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
1218 "can't load from '%q'", vtype_to_qstr(vtype_base));
1221 emit_post_push_reg(emit, VTYPE_INT, REG_RET);
1227 if (local_num == 0) {
1228 emit_pre_pop_reg(emit, &vtype, REG_LOCAL_1);
1229 }
else if (local_num == 1) {
1230 emit_pre_pop_reg(emit, &vtype, REG_LOCAL_2);
1231 }
else if (local_num == 2) {
1232 emit_pre_pop_reg(emit, &vtype, REG_LOCAL_3);
1234 emit_pre_pop_reg(emit, &vtype, REG_TEMP0);
1235 if (emit->do_viper_types) {
1236 ASM_MOV_REG_TO_LOCAL(emit->as, REG_TEMP0, local_num - REG_LOCAL_NUM);
1238 ASM_MOV_REG_TO_LOCAL(emit->as, REG_TEMP0, STATE_START + emit->n_state - 1 - local_num);
1244 if (emit->local_vtype[local_num] == VTYPE_UNBOUND) {
1246 emit->local_vtype[local_num] = vtype;
1247 }
else if (emit->local_vtype[local_num] != vtype) {
1249 EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
1250 "local '%q' has type '%q' but source is '%q'",
1251 qst, vtype_to_qstr(emit->local_vtype[local_num]), vtype_to_qstr(vtype));
1257 need_reg_single(emit, REG_TEMP0, 0);
1258 need_reg_single(emit, REG_TEMP1, 0);
1259 emit_native_load_fast(emit, qst, local_num);
1261 int reg_base = REG_TEMP0;
1262 emit_pre_pop_reg_flexible(emit, &vtype, ®_base, -1, -1);
1263 int reg_src = REG_TEMP1;
1264 emit_pre_pop_reg_flexible(emit, &vtype, ®_src, reg_base, reg_base);
1265 ASM_STORE_REG_REG_OFFSET(emit->as, reg_src, reg_base, 1);
1272 emit_pre_pop_reg(emit, &vtype, REG_ARG_2);
1273 assert(vtype == VTYPE_PYOBJ);
1279 vtype_kind_t vtype = peek_vtype(emit, 0);
1280 if (vtype == VTYPE_PYOBJ) {
1281 emit_pre_pop_reg(emit, &vtype, REG_ARG_2);
1283 emit_pre_pop_reg(emit, &vtype, REG_ARG_1);
1285 ASM_MOV_REG_REG(emit->as, REG_ARG_2, REG_RET);
1292 vtype_kind_t vtype_base, vtype_val;
1293 emit_pre_pop_reg_reg(emit, &vtype_base, REG_ARG_1, &vtype_val, REG_ARG_3);
1294 assert(vtype_base == VTYPE_PYOBJ);
1295 assert(vtype_val == VTYPE_PYOBJ);
1306 vtype_kind_t vtype_base = peek_vtype(emit, 1);
1308 if (vtype_base == VTYPE_PYOBJ) {
1310 vtype_kind_t vtype_index = peek_vtype(emit, 0);
1311 vtype_kind_t vtype_value = peek_vtype(emit, 2);
1312 if (vtype_index != VTYPE_PYOBJ || vtype_value != VTYPE_PYOBJ) {
1315 emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_1, 3);
1316 adjust_stack(emit, 3);
1318 emit_pre_pop_reg_reg_reg(emit, &vtype_index, REG_ARG_2, &vtype_base, REG_ARG_1, &vtype_value, REG_ARG_3);
1325 stack_info_t *top = peek_stack(emit, 0);
1326 if (top->vtype == VTYPE_INT && top->kind == STACK_IMM) {
1328 mp_int_t index_value = top->data.u_imm;
1329 emit_pre_pop_discard(emit);
1330 vtype_kind_t vtype_value;
1331 int reg_base = REG_ARG_1;
1332 int reg_index = REG_ARG_2;
1333 int reg_value = REG_ARG_3;
1334 emit_pre_pop_reg_flexible(emit, &vtype_base, ®_base, reg_index, reg_value);
1337 emit_pre_pop_reg(emit, &vtype_value, reg_value);
1339 emit_pre_pop_reg_flexible(emit, &vtype_value, ®_value, reg_base, reg_index);
1341 if (vtype_value != VTYPE_BOOL && vtype_value != VTYPE_INT && vtype_value != VTYPE_UINT) {
1342 EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
1343 "can't store '%q'", vtype_to_qstr(vtype_value));
1345 switch (vtype_base) {
1349 if (index_value != 0) {
1352 if (index_value > 0 && index_value < 32) {
1353 asm_thumb_strb_rlo_rlo_i5(emit->as, reg_value, reg_base, index_value);
1357 ASM_MOV_IMM_TO_REG(emit->as, index_value, reg_index);
1362 ASM_ADD_REG_REG(emit->as, reg_index, reg_base);
1363 reg_base = reg_index;
1365 ASM_STORE8_REG_REG(emit->as, reg_value, reg_base);
1370 if (index_value != 0) {
1373 if (index_value > 0 && index_value < 32) {
1374 asm_thumb_strh_rlo_rlo_i5(emit->as, reg_value, reg_base, index_value);
1378 ASM_MOV_IMM_TO_REG(emit->as, index_value << 1, reg_index);
1383 ASM_ADD_REG_REG(emit->as, reg_index, reg_base);
1384 reg_base = reg_index;
1386 ASM_STORE16_REG_REG(emit->as, reg_value, reg_base);
1391 if (index_value != 0) {
1394 if (index_value > 0 && index_value < 32) {
1395 asm_thumb_str_rlo_rlo_i5(emit->as, reg_value, reg_base, index_value);
1399 ASM_MOV_IMM_TO_REG(emit->as, index_value << 2, reg_index);
1404 ASM_ADD_REG_REG(emit->as, reg_index, reg_base);
1405 reg_base = reg_index;
1407 ASM_STORE32_REG_REG(emit->as, reg_value, reg_base);
1411 EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
1412 "can't store to '%q'", vtype_to_qstr(vtype_base));
1416 vtype_kind_t vtype_index, vtype_value;
1417 int reg_index = REG_ARG_2;
1418 int reg_value = REG_ARG_3;
1419 emit_pre_pop_reg_flexible(emit, &vtype_index, ®_index, REG_ARG_1, reg_value);
1420 emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1);
1421 if (vtype_index != VTYPE_INT && vtype_index != VTYPE_UINT) {
1422 EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
1423 "can't store with '%q' index", vtype_to_qstr(vtype_index));
1427 emit_pre_pop_reg(emit, &vtype_value, reg_value);
1429 emit_pre_pop_reg_flexible(emit, &vtype_value, ®_value, REG_ARG_1, reg_index);
1431 if (vtype_value != VTYPE_BOOL && vtype_value != VTYPE_INT && vtype_value != VTYPE_UINT) {
1432 EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
1433 "can't store '%q'", vtype_to_qstr(vtype_value));
1435 switch (vtype_base) {
1443 ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index);
1444 ASM_STORE8_REG_REG(emit->as, reg_value, REG_ARG_1);
1453 ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index);
1454 ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index);
1455 ASM_STORE16_REG_REG(emit->as, reg_value, REG_ARG_1);
1464 ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index);
1465 ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index);
1466 ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index);
1467 ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index);
1468 ASM_STORE32_REG_REG(emit->as, reg_value, REG_ARG_1);
1472 EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
1473 "can't store to '%q'", vtype_to_qstr(vtype_base));
1485 emit_native_store_fast(emit, qst, local_num);
1496 emit_native_pre(emit);
1502 emit_native_pre(emit);
1508 vtype_kind_t vtype_base;
1509 emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1);
1510 assert(vtype_base == VTYPE_PYOBJ);
1516 vtype_kind_t vtype_index, vtype_base;
1517 emit_pre_pop_reg_reg(emit, &vtype_index, REG_ARG_2, &vtype_base, REG_ARG_1);
1518 assert(vtype_index == VTYPE_PYOBJ);
1519 assert(vtype_base == VTYPE_PYOBJ);
1526 int reg = REG_TEMP0;
1527 emit_pre_pop_reg_flexible(emit, &vtype, ®, -1, -1);
1528 emit_post_push_reg_reg(emit, vtype, reg, vtype, reg);
1532 vtype_kind_t vtype0, vtype1;
1533 emit_pre_pop_reg_reg(emit, &vtype0, REG_TEMP0, &vtype1, REG_TEMP1);
1534 emit_post_push_reg_reg_reg_reg(emit, vtype1, REG_TEMP1, vtype0, REG_TEMP0, vtype1, REG_TEMP1, vtype0, REG_TEMP0);
1539 emit_pre_pop_discard(emit);
1545 vtype_kind_t vtype0, vtype1;
1546 emit_pre_pop_reg_reg(emit, &vtype0, REG_TEMP0, &vtype1, REG_TEMP1);
1547 emit_post_push_reg_reg(emit, vtype0, REG_TEMP0, vtype1, REG_TEMP1);
1552 vtype_kind_t vtype0, vtype1, vtype2;
1553 emit_pre_pop_reg_reg_reg(emit, &vtype0, REG_TEMP0, &vtype1, REG_TEMP1, &vtype2, REG_TEMP2);
1554 emit_post_push_reg_reg_reg(emit, vtype0, REG_TEMP0, vtype2, REG_TEMP2, vtype1, REG_TEMP1);
1559 emit_native_pre(emit);
1561 need_stack_settled(emit);
1562 ASM_JUMP(emit->as, label);
1566 STATIC void emit_native_jump_helper(
emit_t *emit,
bool pop) {
1567 vtype_kind_t vtype = peek_vtype(emit, 0);
1568 if (vtype == VTYPE_PYOBJ) {
1569 emit_pre_pop_reg(emit, &vtype, REG_ARG_1);
1571 adjust_stack(emit, 1);
1575 emit_pre_pop_reg(emit, &vtype, REG_RET);
1577 adjust_stack(emit, 1);
1579 if (!(vtype == VTYPE_BOOL || vtype == VTYPE_INT || vtype == VTYPE_UINT)) {
1580 EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
1581 "can't implicitly convert '%q' to 'bool'", vtype_to_qstr(vtype));
1587 emit->saved_stack_vtype = vtype;
1590 need_stack_settled(emit);
1595 emit_native_jump_helper(emit,
true);
1597 ASM_JUMP_IF_REG_NONZERO(emit->as, REG_RET, label);
1599 ASM_JUMP_IF_REG_ZERO(emit->as, REG_RET, label);
1606 emit_native_jump_helper(emit,
false);
1608 ASM_JUMP_IF_REG_NONZERO(emit->as, REG_RET, label);
1610 ASM_JUMP_IF_REG_ZERO(emit->as, REG_RET, label);
1612 adjust_stack(emit, -1);
1623 emit_native_jump(emit, label);
1632 emit_access_stack(emit, 1, &vtype, REG_ARG_1);
1633 assert(vtype == VTYPE_PYOBJ);
1634 emit_get_stack_pointer_to_reg_for_push(emit, REG_ARG_3, 2);
1635 emit_call_with_imm_arg(emit,
MP_F_LOAD_METHOD, MP_QSTR___exit__, REG_ARG_2);
1638 emit_pre_pop_reg(emit, &vtype, REG_ARG_3);
1639 emit_pre_pop_reg(emit, &vtype, REG_ARG_2);
1640 emit_pre_pop_reg(emit, &vtype, REG_ARG_1);
1641 emit_post_push_reg(emit, vtype, REG_ARG_2);
1642 emit_post_push_reg(emit, vtype, REG_ARG_3);
1647 emit_get_stack_pointer_to_reg_for_push(emit, REG_ARG_3, 2);
1648 emit_call_with_imm_arg(emit,
MP_F_LOAD_METHOD, MP_QSTR___enter__, REG_ARG_2);
1652 emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, 2);
1654 emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
1658 need_stack_settled(emit);
1659 emit_get_stack_pointer_to_reg_for_push(emit, REG_ARG_1,
sizeof(
nlr_buf_t) /
sizeof(
mp_uint_t));
1661 ASM_JUMP_IF_REG_NONZERO(emit->as, REG_RET, label);
1664 emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
1672 emit_native_pre(emit);
1681 emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, 5);
1685 adjust_stack(emit, 1);
1687 emit_native_jump(emit, label + 1);
1690 emit_native_label_assign(emit, label);
1693 adjust_stack(emit, 3);
1697 emit_pre_pop_reg(emit, &vtype, REG_ARG_1);
1698 adjust_stack(emit, -2);
1702 emit_pre_pop_reg(emit, &vtype, REG_ARG_2);
1703 emit_pre_pop_reg(emit, &vtype, REG_ARG_3);
1704 adjust_stack(emit, 1);
1705 emit_post_push_reg(emit, vtype, REG_ARG_1);
1706 emit_post_push_reg(emit, vtype, REG_ARG_3);
1707 emit_post_push_reg(emit, vtype, REG_ARG_2);
1711 ASM_LOAD_REG_REG_OFFSET(emit->as, REG_ARG_2, REG_ARG_1, 0);
1712 emit_post_push_reg(emit, VTYPE_PYOBJ, REG_ARG_2);
1713 emit_post_push_reg(emit, VTYPE_PYOBJ, REG_ARG_1);
1718 emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, 5);
1723 if (REG_ARG_1 != REG_RET) {
1724 ASM_MOV_REG_REG(emit->as, REG_ARG_1, REG_RET);
1727 ASM_JUMP_IF_REG_ZERO(emit->as, REG_RET, label + 1);
1730 emit_pre_pop_discard(emit);
1734 emit_native_label_assign(emit, label + 1);
1738 emit_native_pre(emit);
1740 need_stack_settled(emit);
1741 emit_get_stack_pointer_to_reg_for_push(emit, REG_ARG_1,
sizeof(
nlr_buf_t) /
sizeof(
mp_uint_t));
1743 ASM_JUMP_IF_REG_NONZERO(emit->as, REG_RET, label);
1748 emit_native_setup_except(emit, label);
1758 emit_pre_pop_reg(emit, &vtype, REG_ARG_1);
1759 emit_pre_pop_discard(emit);
1764 STATIC void emit_native_get_iter(
emit_t *emit,
bool use_stack) {
1769 emit_pre_pop_reg(emit, &vtype, REG_ARG_1);
1770 assert(vtype == VTYPE_PYOBJ);
1776 ASM_MOV_IMM_TO_REG(emit->as, 0, REG_ARG_2);
1778 emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
1783 emit_native_pre(emit);
1788 ASM_JUMP_IF_REG_EQ(emit->as, REG_RET, REG_TEMP1, label);
1789 emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
1794 emit_native_pre(emit);
1800 emit_native_pre(emit);
1812 emit_pre_pop_reg(emit, &vtype, REG_ARG_2);
1813 if (vtype == VTYPE_PYOBJ) {
1815 emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
1817 adjust_stack(emit, 1);
1818 EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
1825 vtype_kind_t vtype_lhs = peek_vtype(emit, 1);
1826 vtype_kind_t vtype_rhs = peek_vtype(emit, 0);
1827 if (vtype_lhs == VTYPE_INT && vtype_rhs == VTYPE_INT) {
1837 emit_pre_pop_reg_reg(emit, &vtype_rhs,
ASM_X64_REG_RCX, &vtype_lhs, REG_RET);
1839 emit_pre_pop_reg_reg(emit, &vtype_rhs,
ASM_X86_REG_ECX, &vtype_lhs, REG_RET);
1842 ASM_LSL_REG(emit->as, REG_RET);
1844 ASM_ASR_REG(emit->as, REG_RET);
1846 emit_post_push_reg(emit, VTYPE_INT, REG_RET);
1853 emit_pre_pop_reg_reg(emit, &vtype_rhs, REG_ARG_2, &vtype_lhs, REG_ARG_1);
1859 emit_post_push_reg(emit, VTYPE_INT, REG_RET);
1863 int reg_rhs = REG_ARG_3;
1864 emit_pre_pop_reg_flexible(emit, &vtype_rhs, ®_rhs, REG_RET, REG_ARG_2);
1865 emit_pre_pop_reg(emit, &vtype_lhs, REG_ARG_2);
1868 #if !(N_X64 || N_X86) 1870 ASM_LSL_REG_REG(emit->as, REG_ARG_2, reg_rhs);
1871 emit_post_push_reg(emit, VTYPE_INT, REG_ARG_2);
1873 ASM_ASR_REG_REG(emit->as, REG_ARG_2, reg_rhs);
1874 emit_post_push_reg(emit, VTYPE_INT, REG_ARG_2);
1877 ASM_OR_REG_REG(emit->as, REG_ARG_2, reg_rhs);
1878 emit_post_push_reg(emit, VTYPE_INT, REG_ARG_2);
1880 ASM_XOR_REG_REG(emit->as, REG_ARG_2, reg_rhs);
1881 emit_post_push_reg(emit, VTYPE_INT, REG_ARG_2);
1883 ASM_AND_REG_REG(emit->as, REG_ARG_2, reg_rhs);
1884 emit_post_push_reg(emit, VTYPE_INT, REG_ARG_2);
1886 ASM_ADD_REG_REG(emit->as, REG_ARG_2, reg_rhs);
1887 emit_post_push_reg(emit, VTYPE_INT, REG_ARG_2);
1889 ASM_SUB_REG_REG(emit->as, REG_ARG_2, reg_rhs);
1890 emit_post_push_reg(emit, VTYPE_INT, REG_ARG_2);
1892 ASM_MUL_REG_REG(emit->as, REG_ARG_2, reg_rhs);
1893 emit_post_push_reg(emit, VTYPE_INT, REG_ARG_2);
1902 need_reg_single(emit, REG_RET, 0);
1906 static byte ops[6] = {
1918 static byte ops[6] = {
1928 asm_thumb_cmp_rlo_rlo(emit->as, REG_ARG_2, reg_rhs);
1937 static byte ret[6] = { 0, 1, 1, 0, 1, 0, };
1943 static uint ccs[6] = {
1962 if ((cc & 0x80) == 0) {
1968 #error not implemented 1970 emit_post_push_reg(emit, VTYPE_BOOL, REG_RET);
1973 adjust_stack(emit, 1);
1974 EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
1977 }
else if (vtype_lhs == VTYPE_PYOBJ && vtype_rhs == VTYPE_PYOBJ) {
1978 emit_pre_pop_reg_reg(emit, &vtype_rhs, REG_ARG_3, &vtype_lhs, REG_ARG_2);
1979 bool invert =
false;
1989 ASM_MOV_REG_REG(emit->as, REG_ARG_2, REG_RET);
1992 emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
1994 adjust_stack(emit, -1);
1995 EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
1996 "can't do binary op between '%q' and '%q'",
1997 vtype_to_qstr(vtype_lhs), vtype_to_qstr(vtype_rhs));
2004 emit_native_pre(emit);
2005 emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_2, n_args);
2007 emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
2011 emit_native_pre(emit);
2012 emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_2, n_args);
2014 emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
2018 emit_native_pre(emit);
2020 emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
2024 vtype_kind_t vtype_key, vtype_value, vtype_map;
2025 emit_pre_pop_reg_reg_reg(emit, &vtype_key, REG_ARG_2, &vtype_value, REG_ARG_3, &vtype_map, REG_ARG_1);
2026 assert(vtype_key == VTYPE_PYOBJ);
2027 assert(vtype_value == VTYPE_PYOBJ);
2028 assert(vtype_map == VTYPE_PYOBJ);
2030 emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
2033 #if MICROPY_PY_BUILTINS_SET 2035 emit_native_pre(emit);
2036 emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_2, n_args);
2037 emit_call_with_imm_arg(emit, MP_F_BUILD_SET, n_args, REG_ARG_1);
2038 emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
2042 #if MICROPY_PY_BUILTINS_SLICE 2046 vtype_kind_t vtype_start, vtype_stop;
2047 emit_pre_pop_reg_reg(emit, &vtype_stop, REG_ARG_2, &vtype_start, REG_ARG_1);
2048 assert(vtype_start == VTYPE_PYOBJ);
2049 assert(vtype_stop == VTYPE_PYOBJ);
2051 emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
2054 vtype_kind_t vtype_start, vtype_stop, vtype_step;
2055 emit_pre_pop_reg_reg_reg(emit, &vtype_step, REG_ARG_3, &vtype_stop, REG_ARG_2, &vtype_start, REG_ARG_1);
2056 assert(vtype_start == VTYPE_PYOBJ);
2057 assert(vtype_stop == VTYPE_PYOBJ);
2058 assert(vtype_step == VTYPE_PYOBJ);
2059 emit_call(emit, MP_F_NEW_SLICE);
2060 emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
2068 vtype_kind_t vtype_item;
2069 emit_pre_pop_reg(emit, &vtype_item, REG_ARG_2);
2070 assert(vtype_item == VTYPE_PYOBJ);
2072 #if MICROPY_PY_BUILTINS_SET 2074 vtype_kind_t vtype_item;
2075 emit_pre_pop_reg(emit, &vtype_item, REG_ARG_2);
2076 assert(vtype_item == VTYPE_PYOBJ);
2081 vtype_kind_t vtype_key, vtype_value;
2082 emit_pre_pop_reg_reg(emit, &vtype_key, REG_ARG_2, &vtype_value, REG_ARG_3);
2083 assert(vtype_key == VTYPE_PYOBJ);
2084 assert(vtype_value == VTYPE_PYOBJ);
2087 vtype_kind_t vtype_collection;
2088 emit_access_stack(emit, collection_index, &vtype_collection, REG_ARG_1);
2089 assert(vtype_collection == VTYPE_PYOBJ);
2096 vtype_kind_t vtype_base;
2097 emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1);
2098 assert(vtype_base == VTYPE_PYOBJ);
2099 emit_get_stack_pointer_to_reg_for_push(emit, REG_ARG_3, n_args);
2105 vtype_kind_t vtype_base;
2106 emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1);
2107 assert(vtype_base == VTYPE_PYOBJ);
2108 emit_get_stack_pointer_to_reg_for_push(emit, REG_ARG_3, n_left + n_right + 1);
2109 emit_call_with_imm_arg(emit,
MP_F_UNPACK_EX, n_left | (n_right << 8), REG_ARG_2);
2114 emit_native_pre(emit);
2115 if (n_pos_defaults == 0 && n_kw_defaults == 0) {
2118 vtype_kind_t vtype_def_tuple, vtype_def_dict;
2119 emit_pre_pop_reg_reg(emit, &vtype_def_dict, REG_ARG_3, &vtype_def_tuple, REG_ARG_2);
2120 assert(vtype_def_tuple == VTYPE_PYOBJ);
2121 assert(vtype_def_dict == VTYPE_PYOBJ);
2124 emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
2128 emit_native_pre(emit);
2129 if (n_pos_defaults == 0 && n_kw_defaults == 0) {
2130 emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, n_closed_over);
2131 ASM_MOV_IMM_TO_REG(emit->as, n_closed_over, REG_ARG_2);
2133 emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, n_closed_over + 2);
2134 ASM_MOV_IMM_TO_REG(emit->as, 0x100 | n_closed_over, REG_ARG_2);
2138 emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
2147 emit_native_pre(emit);
2148 vtype_kind_t vtype_fun = peek_vtype(emit, n_positional + 2 * n_keyword);
2149 if (vtype_fun == VTYPE_BUILTIN_CAST) {
2151 assert(n_positional == 1 && n_keyword == 0);
2154 vtype_kind_t vtype_cast = peek_stack(emit, 1)->data.u_imm;
2155 switch (peek_vtype(emit, 0)) {
2158 emit_pre_pop_reg(emit, &vtype, REG_ARG_1);
2159 emit_pre_pop_discard(emit);
2161 emit_post_push_reg(emit, vtype_cast, REG_RET);
2171 case VTYPE_PTR_NONE:
2172 emit_fold_stack_top(emit, REG_ARG_1);
2173 emit_post_top_set_vtype(emit, vtype_cast);
2180 assert(vtype_fun == VTYPE_PYOBJ);
2182 emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, n_positional + 2 * n_keyword + 3);
2184 emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
2186 if (n_positional != 0 || n_keyword != 0) {
2187 emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, n_positional + 2 * n_keyword);
2189 emit_pre_pop_reg(emit, &vtype_fun, REG_ARG_1);
2191 emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
2198 emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, n_positional + 2 * n_keyword + 4);
2200 emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
2202 emit_native_pre(emit);
2203 emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, 2 + n_positional + 2 * n_keyword);
2204 emit_call_with_2_imm_args(emit,
MP_F_CALL_METHOD_N_KW, n_positional, REG_ARG_1, n_keyword, REG_ARG_2);
2205 emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
2211 if (emit->do_viper_types) {
2212 if (peek_vtype(emit, 0) == VTYPE_PTR_NONE) {
2213 emit_pre_pop_discard(emit);
2214 if (emit->return_vtype == VTYPE_PYOBJ) {
2217 ASM_MOV_IMM_TO_REG(emit->as, 0, REG_RET);
2221 emit_pre_pop_reg(emit, &vtype, REG_RET);
2222 if (vtype != emit->return_vtype) {
2223 EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
2224 "return expected '%q' but got '%q'",
2225 vtype_to_qstr(emit->return_vtype), vtype_to_qstr(vtype));
2230 emit_pre_pop_reg(emit, &vtype, REG_RET);
2231 assert(vtype == VTYPE_PYOBJ);
2233 emit->last_emit_was_return_value =
true;
2239 vtype_kind_t vtype_exc;
2240 emit_pre_pop_reg(emit, &vtype_exc, REG_ARG_1);
2241 if (vtype_exc != VTYPE_PYOBJ) {
2242 EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
"must raise an object");
2259 STATIC void emit_native_start_except_handler(
emit_t *emit) {
2263 adjust_stack(emit, 1);
2264 vtype_kind_t vtype_nlr;
2265 emit_pre_pop_reg(emit, &vtype_nlr, REG_ARG_1);
2266 emit_pre_pop_discard(emit);
2267 emit_post_push_reg_reg_reg(emit, VTYPE_PYOBJ, REG_ARG_1, VTYPE_PYOBJ, REG_ARG_1, VTYPE_PYOBJ, REG_ARG_1);
2270 STATIC void emit_native_end_except_handler(
emit_t *emit) {
2271 adjust_stack(emit, -1);
2275 emit_native_set_native_type,
2276 emit_native_start_pass,
2277 emit_native_end_pass,
2278 emit_native_last_emit_was_return_value,
2279 emit_native_adjust_stack_size,
2280 emit_native_set_source_line,
2283 emit_native_load_fast,
2284 emit_native_load_deref,
2285 emit_native_load_name,
2286 emit_native_load_global,
2289 emit_native_store_fast,
2290 emit_native_store_deref,
2291 emit_native_store_name,
2292 emit_native_store_global,
2295 emit_native_delete_fast,
2296 emit_native_delete_deref,
2297 emit_native_delete_name,
2298 emit_native_delete_global,
2301 emit_native_label_assign,
2302 emit_native_import_name,
2303 emit_native_import_from,
2304 emit_native_import_star,
2305 emit_native_load_const_tok,
2306 emit_native_load_const_small_int,
2307 emit_native_load_const_str,
2308 emit_native_load_const_obj,
2309 emit_native_load_null,
2310 emit_native_load_attr,
2311 emit_native_load_method,
2312 emit_native_load_build_class,
2313 emit_native_load_subscr,
2314 emit_native_store_attr,
2315 emit_native_store_subscr,
2316 emit_native_delete_attr,
2317 emit_native_delete_subscr,
2318 emit_native_dup_top,
2319 emit_native_dup_top_two,
2320 emit_native_pop_top,
2321 emit_native_rot_two,
2322 emit_native_rot_three,
2324 emit_native_pop_jump_if,
2325 emit_native_jump_if_or_pop,
2326 emit_native_break_loop,
2327 emit_native_continue_loop,
2328 emit_native_setup_with,
2329 emit_native_with_cleanup,
2330 emit_native_setup_except,
2331 emit_native_setup_finally,
2332 emit_native_end_finally,
2333 emit_native_get_iter,
2334 emit_native_for_iter,
2335 emit_native_for_iter_end,
2336 emit_native_pop_block,
2337 emit_native_pop_except,
2338 emit_native_unary_op,
2339 emit_native_binary_op,
2340 emit_native_build_tuple,
2341 emit_native_build_list,
2342 emit_native_build_map,
2343 emit_native_store_map,
2344 #if MICROPY_PY_BUILTINS_SET 2345 emit_native_build_set,
2347 #if MICROPY_PY_BUILTINS_SLICE 2348 emit_native_build_slice,
2350 emit_native_store_comp,
2351 emit_native_unpack_sequence,
2352 emit_native_unpack_ex,
2353 emit_native_make_function,
2354 emit_native_make_closure,
2355 emit_native_call_function,
2356 emit_native_call_method,
2357 emit_native_return_value,
2358 emit_native_raise_varargs,
2359 emit_native_yield_value,
2360 emit_native_yield_from,
2362 emit_native_start_except_handler,
2363 emit_native_end_except_handler,
#define MP_ASM_PASS_COMPUTE
void asm_arm_cmp_reg_reg(asm_arm_t *as, uint rd, uint rn)
void asm_x64_cmp_r64_with_r64(asm_x64_t *as, int src_r64_a, int src_r64_b)
#define ASM_THUMB_OP_ITE_GT
#define MP_NATIVE_TYPE_PTR
void asm_arm_mov_reg_i32(asm_arm_t *as, uint rd, int imm)
const char * qstr_str(qstr q)
#define ASM_THUMB_OP_ITE_GE
#define MP_SCOPE_FLAG_VARKEYWORDS
NORETURN void mp_raise_NotImplementedError(const char *msg)
void mp_asm_base_label_assign(mp_asm_base_t *as, size_t label)
#define m_del(type, ptr, num)
void asm_x86_setcc_r8(asm_x86_t *as, mp_uint_t jcc_type, int dest_r8)
#define MP_SCOPE_FLAG_VARARGS
#define MP_OBJ_NEW_QSTR(qst)
#define offsetof(type, field)
LIBA_BEGIN_DECLS void free(void *ptr)
#define MP_NATIVE_TYPE_OBJ
void asm_x64_setcc_r8(asm_x64_t *as, int jcc_type, int dest_r8)
void mp_asm_base_init(mp_asm_base_t *as, size_t max_num_labels)
void asm_x86_cmp_r32_with_r32(asm_x86_t *as, int src_r32_a, int src_r32_b)
#define MP_EMIT_BREAK_FROM_FOR
enum _mp_token_kind_t mp_token_kind_t
void mp_asm_base_align(mp_asm_base_t *as, unsigned int align)
void asm_arm_setcc_reg(asm_arm_t *as, uint rd, uint cond)
#define MP_NATIVE_TYPE_PTR8
void asm_thumb_mov_reg_i32(asm_thumb_t *as, uint reg_dest, mp_uint_t i32_src)
void mp_asm_base_data(mp_asm_base_t *as, unsigned int bytesize, uintptr_t val)
#define MP_OBJ_NEW_SMALL_INT(small_int)
#define MP_EMIT_NATIVE_TYPE_RETURN
void mp_emit_glue_assign_native(mp_raw_code_t *rc, mp_raw_code_kind_t kind, void *fun_data, mp_uint_t fun_len, const mp_uint_t *const_table, mp_uint_t n_pos_args, mp_uint_t scope_flags, mp_uint_t type_sig)
#define m_del_obj(type, ptr)
void asm_x86_mov_arg_to_r32(asm_x86_t *as, int src_arg_num, int dest_r32)
void mp_asm_base_start_pass(mp_asm_base_t *as, int pass)
#define MP_NATIVE_TYPE_PTR16
#define ASM_THUMB_OP_ITE_EQ
#define MP_OBJ_ITER_BUF_NSLOTS
void asm_xtensa_setcc_reg_reg_reg(asm_xtensa_t *as, uint cond, uint reg_dest, uint reg_src1, uint reg_src2)
const byte mp_binary_op_method_name[MP_BINARY_OP_NUM_RUNTIME]
void asm_x64_xor_r64_r64(asm_x64_t *as, int dest_r64, int src_r64)
#define MP_EMIT_NATIVE_TYPE_ENABLE
#define MP_NATIVE_TYPE_UINT
void asm_arm_str_reg_reg_reg(asm_arm_t *as, uint rd, uint rm, uint rn)
#define MP_NATIVE_TYPE_BOOL
void asm_thumb_op16(asm_thumb_t *as, uint op)
#define m_new0(type, num)
#define m_renew(type, ptr, old_num, new_num)
#define DEBUG_printf(...)
const struct _mp_obj_singleton_t mp_const_ellipsis_obj
void asm_arm_bl_ind(asm_arm_t *as, void *fun_ptr, uint fun_id, uint reg_temp)
void asm_arm_strb_reg_reg_reg(asm_arm_t *as, uint rd, uint rm, uint rn)
#define MP_OBJ_STOP_ITERATION
void asm_x86_xor_r32_r32(asm_x86_t *as, int dest_r32, int src_r32)
const byte mp_unary_op_method_name[MP_UNARY_OP_NUM_RUNTIME]
void *const mp_fun_table[MP_F_NUMBER_OF]
void asm_arm_strh_reg_reg_reg(asm_arm_t *as, uint rd, uint rm, uint rn)
void asm_thumb_bl_ind(asm_thumb_t *as, void *fun_ptr, uint fun_id, uint reg_temp)
void mp_asm_base_deinit(mp_asm_base_t *as, bool free_code)
#define MP_NATIVE_TYPE_INT
#define MP_NATIVE_TYPE_PTR32
void asm_x86_mov_r32_to_local(asm_x86_t *as, int src_r32, int dest_local_num)