Branch data Line data Source code
1 : : /*
2 : : * This file is part of the MicroPython project, http://micropython.org/
3 : : *
4 : : * The MIT License (MIT)
5 : : *
6 : : * Copyright (c) 2013, 2014 Damien P. George
7 : : *
8 : : * Permission is hereby granted, free of charge, to any person obtaining a copy
9 : : * of this software and associated documentation files (the "Software"), to deal
10 : : * in the Software without restriction, including without limitation the rights
11 : : * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
12 : : * copies of the Software, and to permit persons to whom the Software is
13 : : * furnished to do so, subject to the following conditions:
14 : : *
15 : : * The above copyright notice and this permission notice shall be included in
16 : : * all copies or substantial portions of the Software.
17 : : *
18 : : * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
19 : : * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
20 : : * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
21 : : * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
22 : : * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
23 : : * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
24 : : * THE SOFTWARE.
25 : : */
26 : :
27 : : // Essentially normal Python has 1 type: Python objects
28 : : // Viper has more than 1 type, and is just a more complicated (a superset of) Python.
29 : : // If you declare everything in Viper as a Python object (ie omit type decls) then
30 : : // it should in principle be exactly the same as Python native.
31 : : // Having types means having more opcodes, like binary_op_nat_nat, binary_op_nat_obj etc.
32 : : // In practice we won't have a VM but rather do this in asm which is actually very minimal.
33 : :
34 : : // Because it breaks strict Python equivalence it should be a completely separate
35 : : // decorator. It breaks equivalence because overflow on integers wraps around.
36 : : // It shouldn't break equivalence if you don't use the new types, but since the
37 : : // type decls might be used in normal Python for other reasons, it's probably safest,
38 : : // cleanest and clearest to make it a separate decorator.
39 : :
40 : : // Actually, it does break equivalence because integers default to native integers,
41 : : // not Python objects.
42 : :
43 : : // for x in l[0:8]: can be compiled into a native loop if l has pointer type
44 : :
45 : : #include <stdio.h>
46 : : #include <string.h>
47 : : #include <assert.h>
48 : :
49 : : #include "py/emit.h"
50 : : #include "py/nativeglue.h"
51 : : #include "py/objfun.h"
52 : : #include "py/objstr.h"
53 : :
54 : : #if MICROPY_DEBUG_VERBOSE // print debugging info
55 : : #define DEBUG_PRINT (1)
56 : : #define DEBUG_printf DEBUG_printf
57 : : #else // don't print debugging info
58 : : #define DEBUG_printf(...) (void)0
59 : : #endif
60 : :
61 : : // wrapper around everything in this file
62 : : #if N_X64 || N_X86 || N_THUMB || N_ARM || N_XTENSA || N_XTENSAWIN || N_RV32 || N_DEBUG
63 : :
64 : : // C stack layout for native functions:
65 : : // 0: nlr_buf_t [optional]
66 : : // return_value [optional word]
67 : : // exc_handler_unwind [optional word]
68 : : // emit->code_state_start: mp_code_state_native_t
69 : : // emit->stack_start: Python object stack | emit->n_state
70 : : // locals (reversed, L0 at end) |
71 : : //
72 : : // C stack layout for native generator functions:
73 : : // 0=emit->stack_start: nlr_buf_t
74 : : // return_value
75 : : // exc_handler_unwind [optional word]
76 : : //
77 : : // Then REG_GENERATOR_STATE points to:
78 : : // 0=emit->code_state_start: mp_code_state_native_t
79 : : // emit->stack_start: Python object stack | emit->n_state
80 : : // locals (reversed, L0 at end) |
81 : : //
82 : : // C stack layout for viper functions:
83 : : // 0: nlr_buf_t [optional]
84 : : // return_value [optional word]
85 : : // exc_handler_unwind [optional word]
86 : : // emit->code_state_start: fun_obj, old_globals [optional]
87 : : // emit->stack_start: Python object stack | emit->n_state
88 : : // locals (reversed, L0 at end) |
89 : : // (L0-L2 may be in regs instead)
90 : :
91 : : // Native emitter needs to know the following sizes and offsets of C structs (on the target):
92 : : #if MICROPY_DYNAMIC_COMPILER
93 : : #define SIZEOF_NLR_BUF (2 + mp_dynamic_compiler.nlr_buf_num_regs + 1) // the +1 is conservative in case MICROPY_ENABLE_PYSTACK enabled
94 : : #else
95 : : #define SIZEOF_NLR_BUF (sizeof(nlr_buf_t) / sizeof(uintptr_t))
96 : : #endif
97 : : #define SIZEOF_CODE_STATE (sizeof(mp_code_state_native_t) / sizeof(uintptr_t))
98 : : #define OFFSETOF_CODE_STATE_STATE (offsetof(mp_code_state_native_t, state) / sizeof(uintptr_t))
99 : : #define OFFSETOF_CODE_STATE_FUN_BC (offsetof(mp_code_state_native_t, fun_bc) / sizeof(uintptr_t))
100 : : #define OFFSETOF_CODE_STATE_IP (offsetof(mp_code_state_native_t, ip) / sizeof(uintptr_t))
101 : : #define OFFSETOF_CODE_STATE_SP (offsetof(mp_code_state_native_t, sp) / sizeof(uintptr_t))
102 : : #define OFFSETOF_CODE_STATE_N_STATE (offsetof(mp_code_state_native_t, n_state) / sizeof(uintptr_t))
103 : : #define OFFSETOF_OBJ_FUN_BC_CONTEXT (offsetof(mp_obj_fun_bc_t, context) / sizeof(uintptr_t))
104 : : #define OFFSETOF_OBJ_FUN_BC_CHILD_TABLE (offsetof(mp_obj_fun_bc_t, child_table) / sizeof(uintptr_t))
105 : : #define OFFSETOF_OBJ_FUN_BC_BYTECODE (offsetof(mp_obj_fun_bc_t, bytecode) / sizeof(uintptr_t))
106 : : #define OFFSETOF_MODULE_CONTEXT_QSTR_TABLE (offsetof(mp_module_context_t, constants.qstr_table) / sizeof(uintptr_t))
107 : : #define OFFSETOF_MODULE_CONTEXT_OBJ_TABLE (offsetof(mp_module_context_t, constants.obj_table) / sizeof(uintptr_t))
108 : : #define OFFSETOF_MODULE_CONTEXT_GLOBALS (offsetof(mp_module_context_t, module.globals) / sizeof(uintptr_t))
109 : :
110 : : // If not already defined, set parent args to same as child call registers
111 : : #ifndef REG_PARENT_RET
112 : : #define REG_PARENT_RET REG_RET
113 : : #define REG_PARENT_ARG_1 REG_ARG_1
114 : : #define REG_PARENT_ARG_2 REG_ARG_2
115 : : #define REG_PARENT_ARG_3 REG_ARG_3
116 : : #define REG_PARENT_ARG_4 REG_ARG_4
117 : : #endif
118 : :
119 : : // Word index of nlr_buf_t.ret_val
120 : : #define NLR_BUF_IDX_RET_VAL (1)
121 : :
122 : : // Whether the viper function needs access to fun_obj
123 : : #define NEED_FUN_OBJ(emit) ((emit)->scope->exc_stack_size > 0 \
124 : : || ((emit)->scope->scope_flags & (MP_SCOPE_FLAG_REFGLOBALS | MP_SCOPE_FLAG_HASCONSTS)))
125 : :
126 : : // Whether the native/viper function needs to be wrapped in an exception handler
127 : : #define NEED_GLOBAL_EXC_HANDLER(emit) ((emit)->scope->exc_stack_size > 0 \
128 : : || ((emit)->scope->scope_flags & (MP_SCOPE_FLAG_GENERATOR | MP_SCOPE_FLAG_REFGLOBALS)))
129 : :
130 : : // Whether a slot is needed to store LOCAL_IDX_EXC_HANDLER_UNWIND
131 : : #define NEED_EXC_HANDLER_UNWIND(emit) ((emit)->scope->exc_stack_size > 0)
132 : : #define NEED_THROW_VAL(emit) ((emit)->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR)
133 : :
134 : : // Whether registers can be used to store locals (only true if there are no
135 : : // exception handlers, because otherwise an nlr_jump will restore registers to
136 : : // their state at the start of the function and updates to locals will be lost)
137 : : #define CAN_USE_REGS_FOR_LOCALS(emit) ((emit)->scope->exc_stack_size == 0 && !(emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR))
138 : :
139 : : // Indices within the local C stack for various variables
140 : : #define LOCAL_IDX_EXC_VAL(emit) (NLR_BUF_IDX_RET_VAL)
141 : : #define LOCAL_IDX_EXC_HANDLER_PC(emit) (NLR_BUF_IDX_LOCAL_1)
142 : : #define LOCAL_IDX_EXC_HANDLER_UNWIND(emit) (SIZEOF_NLR_BUF + 1) // this needs a dedicated variable outside nlr_buf_t
143 : : #define LOCAL_IDX_THROW_VAL(emit) (SIZEOF_NLR_BUF + 2) // needs a dedicated variable outside nlr_buf_t, following inject_exc in py/vm.c
144 : : #define LOCAL_IDX_RET_VAL(emit) (SIZEOF_NLR_BUF) // needed when NEED_GLOBAL_EXC_HANDLER is true
145 : : #define LOCAL_IDX_FUN_OBJ(emit) ((emit)->code_state_start + OFFSETOF_CODE_STATE_FUN_BC)
146 : : #define LOCAL_IDX_OLD_GLOBALS(emit) ((emit)->code_state_start + OFFSETOF_CODE_STATE_IP)
147 : : #define LOCAL_IDX_GEN_PC(emit) ((emit)->code_state_start + OFFSETOF_CODE_STATE_IP)
148 : : #define LOCAL_IDX_LOCAL_VAR(emit, local_num) ((emit)->stack_start + (emit)->n_state - 1 - (local_num))
149 : :
150 : : #if MICROPY_PERSISTENT_CODE_SAVE
151 : :
152 : : // When building with the ability to save native code to .mpy files:
153 : : // - Qstrs are indirect via qstr_table, and REG_LOCAL_3 always points to qstr_table.
154 : : // - In a generator no registers are used to store locals, and REG_LOCAL_2 points to the generator state.
155 : : // - At most 2 registers hold local variables (see CAN_USE_REGS_FOR_LOCALS for when this is possible).
156 : :
157 : : #define REG_GENERATOR_STATE (REG_LOCAL_2)
158 : : #define REG_QSTR_TABLE (REG_LOCAL_3)
159 : : #define MAX_REGS_FOR_LOCAL_VARS (2)
160 : :
161 : : static const uint8_t reg_local_table[MAX_REGS_FOR_LOCAL_VARS] = {REG_LOCAL_1, REG_LOCAL_2};
162 : :
163 : : #else
164 : :
165 : : // When building without the ability to save native code to .mpy files:
166 : : // - Qstrs values are written directly into the machine code.
167 : : // - In a generator no registers are used to store locals, and REG_LOCAL_3 points to the generator state.
168 : : // - At most 3 registers hold local variables (see CAN_USE_REGS_FOR_LOCALS for when this is possible).
169 : :
170 : : #define REG_GENERATOR_STATE (REG_LOCAL_3)
171 : : #define MAX_REGS_FOR_LOCAL_VARS (3)
172 : :
173 : : static const uint8_t reg_local_table[MAX_REGS_FOR_LOCAL_VARS] = {REG_LOCAL_1, REG_LOCAL_2, REG_LOCAL_3};
174 : :
175 : : #endif
176 : :
177 : : #define REG_LOCAL_LAST (reg_local_table[MAX_REGS_FOR_LOCAL_VARS - 1])
178 : :
179 : : #define EMIT_NATIVE_VIPER_TYPE_ERROR(emit, ...) do { \
180 : : *emit->error_slot = mp_obj_new_exception_msg_varg(&mp_type_ViperTypeError, __VA_ARGS__); \
181 : : } while (0)
182 : :
183 : : #if N_RV32
184 : : #define FIT_SIGNED(value, bits) \
185 : : ((((value) & ~((1U << ((bits) - 1)) - 1)) == 0) || \
186 : : (((value) & ~((1U << ((bits) - 1)) - 1)) == ~((1U << ((bits) - 1)) - 1)))
187 : : #endif
188 : :
189 : : typedef enum {
190 : : STACK_VALUE,
191 : : STACK_REG,
192 : : STACK_IMM,
193 : : } stack_info_kind_t;
194 : :
195 : : // these enums must be distinct and the bottom 4 bits
196 : : // must correspond to the correct MP_NATIVE_TYPE_xxx value
197 : : typedef enum {
198 : : VTYPE_PYOBJ = 0x00 | MP_NATIVE_TYPE_OBJ,
199 : : VTYPE_BOOL = 0x00 | MP_NATIVE_TYPE_BOOL,
200 : : VTYPE_INT = 0x00 | MP_NATIVE_TYPE_INT,
201 : : VTYPE_UINT = 0x00 | MP_NATIVE_TYPE_UINT,
202 : : VTYPE_PTR = 0x00 | MP_NATIVE_TYPE_PTR,
203 : : VTYPE_PTR8 = 0x00 | MP_NATIVE_TYPE_PTR8,
204 : : VTYPE_PTR16 = 0x00 | MP_NATIVE_TYPE_PTR16,
205 : : VTYPE_PTR32 = 0x00 | MP_NATIVE_TYPE_PTR32,
206 : :
207 : : VTYPE_PTR_NONE = 0x50 | MP_NATIVE_TYPE_PTR,
208 : :
209 : : VTYPE_UNBOUND = 0x60 | MP_NATIVE_TYPE_OBJ,
210 : : VTYPE_BUILTIN_CAST = 0x70 | MP_NATIVE_TYPE_OBJ,
211 : : } vtype_kind_t;
212 : :
213 : 72 : static qstr vtype_to_qstr(vtype_kind_t vtype) {
214 [ - + - + : 72 : switch (vtype) {
- - + +
+ ]
215 : : case VTYPE_PYOBJ:
216 : : return MP_QSTR_object;
217 : 0 : case VTYPE_BOOL:
218 : 0 : return MP_QSTR_bool;
219 : 32 : case VTYPE_INT:
220 : 32 : return MP_QSTR_int;
221 : 0 : case VTYPE_UINT:
222 : 0 : return MP_QSTR_uint;
223 : 8 : case VTYPE_PTR:
224 : 8 : return MP_QSTR_ptr;
225 : 0 : case VTYPE_PTR8:
226 : 0 : return MP_QSTR_ptr8;
227 : 0 : case VTYPE_PTR16:
228 : 0 : return MP_QSTR_ptr16;
229 : 4 : case VTYPE_PTR32:
230 : 4 : return MP_QSTR_ptr32;
231 : 8 : case VTYPE_PTR_NONE:
232 : : default:
233 : 8 : return MP_QSTR_None;
234 : : }
235 : : }
236 : :
237 : : typedef struct _stack_info_t {
238 : : vtype_kind_t vtype;
239 : : stack_info_kind_t kind;
240 : : union {
241 : : int u_reg;
242 : : mp_int_t u_imm;
243 : : } data;
244 : : } stack_info_t;
245 : :
246 : : #define UNWIND_LABEL_UNUSED (0x7fff)
247 : : #define UNWIND_LABEL_DO_FINAL_UNWIND (0x7ffe)
248 : :
249 : : typedef struct _exc_stack_entry_t {
250 : : uint16_t label : 15;
251 : : uint16_t is_finally : 1;
252 : : uint16_t unwind_label : 15;
253 : : uint16_t is_active : 1;
254 : : } exc_stack_entry_t;
255 : :
256 : : struct _emit_t {
257 : : mp_emit_common_t *emit_common;
258 : : mp_obj_t *error_slot;
259 : : uint *label_slot;
260 : : uint exit_label;
261 : : int pass;
262 : :
263 : : bool do_viper_types;
264 : :
265 : : mp_uint_t local_vtype_alloc;
266 : : vtype_kind_t *local_vtype;
267 : :
268 : : mp_uint_t stack_info_alloc;
269 : : stack_info_t *stack_info;
270 : : vtype_kind_t saved_stack_vtype;
271 : :
272 : : size_t exc_stack_alloc;
273 : : size_t exc_stack_size;
274 : : exc_stack_entry_t *exc_stack;
275 : :
276 : : int prelude_offset;
277 : : int prelude_ptr_index;
278 : : int start_offset;
279 : : int n_state;
280 : : uint16_t code_state_start;
281 : : uint16_t stack_start;
282 : : int stack_size;
283 : : uint16_t n_info;
284 : : uint16_t n_cell;
285 : :
286 : : scope_t *scope;
287 : :
288 : : ASM_T *as;
289 : : };
290 : :
291 : : static void emit_load_reg_with_object(emit_t *emit, int reg, mp_obj_t obj);
292 : : static void emit_native_global_exc_entry(emit_t *emit);
293 : : static void emit_native_global_exc_exit(emit_t *emit);
294 : : static void emit_native_load_const_obj(emit_t *emit, mp_obj_t obj);
295 : :
296 : 1483 : emit_t *EXPORT_FUN(new)(mp_emit_common_t * emit_common, mp_obj_t *error_slot, uint *label_slot, mp_uint_t max_num_labels) {
297 : 1483 : emit_t *emit = m_new0(emit_t, 1);
298 : 1483 : emit->emit_common = emit_common;
299 : 1483 : emit->error_slot = error_slot;
300 : 1483 : emit->label_slot = label_slot;
301 : 1483 : emit->stack_info_alloc = 8;
302 : 1483 : emit->stack_info = m_new(stack_info_t, emit->stack_info_alloc);
303 : 1483 : emit->exc_stack_alloc = 8;
304 : 1483 : emit->exc_stack = m_new(exc_stack_entry_t, emit->exc_stack_alloc);
305 : 1483 : emit->as = m_new0(ASM_T, 1);
306 : 1483 : mp_asm_base_init(&emit->as->base, max_num_labels);
307 : 1483 : return emit;
308 : : }
309 : :
310 : 1467 : void EXPORT_FUN(free)(emit_t * emit) {
311 : 1467 : mp_asm_base_deinit(&emit->as->base, false);
312 : 1467 : m_del_obj(ASM_T, emit->as);
313 : 1467 : m_del(exc_stack_entry_t, emit->exc_stack, emit->exc_stack_alloc);
314 : 1467 : m_del(vtype_kind_t, emit->local_vtype, emit->local_vtype_alloc);
315 : 1467 : m_del(stack_info_t, emit->stack_info, emit->stack_info_alloc);
316 : 1467 : m_del_obj(emit_t, emit);
317 : 1467 : }
318 : :
319 : : static void emit_call_with_imm_arg(emit_t *emit, mp_fun_kind_t fun_kind, mp_int_t arg_val, int arg_reg);
320 : :
321 : 27945 : static void emit_native_mov_reg_const(emit_t *emit, int reg_dest, int const_val) {
322 : 27945 : ASM_LOAD_REG_REG_OFFSET(emit->as, reg_dest, REG_FUN_TABLE, const_val);
323 : 27945 : }
324 : :
325 : 207155 : static void emit_native_mov_state_reg(emit_t *emit, int local_num, int reg_src) {
326 [ + + ]: 207155 : if (emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR) {
327 : 20013 : ASM_STORE_REG_REG_OFFSET(emit->as, reg_src, REG_GENERATOR_STATE, local_num);
328 : : } else {
329 : 187142 : ASM_MOV_LOCAL_REG(emit->as, local_num, reg_src);
330 : : }
331 : 207155 : }
332 : :
333 : 143647 : static void emit_native_mov_reg_state(emit_t *emit, int reg_dest, int local_num) {
334 [ + + ]: 143647 : if (emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR) {
335 : 12935 : ASM_LOAD_REG_REG_OFFSET(emit->as, reg_dest, REG_GENERATOR_STATE, local_num);
336 : : } else {
337 : 130712 : ASM_MOV_REG_LOCAL(emit->as, reg_dest, local_num);
338 : : }
339 : 143647 : }
340 : :
341 : 88972 : static void emit_native_mov_reg_state_addr(emit_t *emit, int reg_dest, int local_num) {
342 [ + + ]: 88972 : if (emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR) {
343 : 9567 : ASM_MOV_REG_IMM(emit->as, reg_dest, local_num * ASM_WORD_SIZE);
344 : 9567 : ASM_ADD_REG_REG(emit->as, reg_dest, REG_GENERATOR_STATE);
345 : : } else {
346 : 79405 : ASM_MOV_REG_LOCAL_ADDR(emit->as, reg_dest, local_num);
347 : : }
348 : 88972 : }
349 : :
350 : 142187 : static void emit_native_mov_reg_qstr(emit_t *emit, int arg_reg, qstr qst) {
351 : : #if MICROPY_PERSISTENT_CODE_SAVE
352 : : ASM_LOAD16_REG_REG_OFFSET(emit->as, arg_reg, REG_QSTR_TABLE, mp_emit_common_use_qstr(emit->emit_common, qst));
353 : : #elif defined(ASM_MOV_REG_QSTR)
354 : : ASM_MOV_REG_QSTR(emit->as, arg_reg, qst);
355 : : #else
356 : 142187 : ASM_MOV_REG_IMM(emit->as, arg_reg, qst);
357 : : #endif
358 : 142187 : }
359 : :
360 : : // This function may clobber REG_TEMP0 (and `reg_dest` can be REG_TEMP0).
361 : 25077 : static void emit_native_mov_reg_qstr_obj(emit_t *emit, int reg_dest, qstr qst) {
362 : : #if MICROPY_PERSISTENT_CODE_SAVE
363 : : emit_load_reg_with_object(emit, reg_dest, MP_OBJ_NEW_QSTR(qst));
364 : : #else
365 : 25077 : ASM_MOV_REG_IMM(emit->as, reg_dest, (mp_uint_t)MP_OBJ_NEW_QSTR(qst));
366 : : #endif
367 : 25077 : }
368 : :
369 : : #define emit_native_mov_state_imm_via(emit, local_num, imm, reg_temp) \
370 : : do { \
371 : : ASM_MOV_REG_IMM((emit)->as, (reg_temp), (imm)); \
372 : : emit_native_mov_state_reg((emit), (local_num), (reg_temp)); \
373 : : } while (false)
374 : :
375 : 13889 : static void emit_native_start_pass(emit_t *emit, pass_kind_t pass, scope_t *scope) {
376 : 13889 : DEBUG_printf("start_pass(pass=%u, scope=%p)\n", pass, scope);
377 : :
378 : 13889 : emit->pass = pass;
379 : 13889 : emit->do_viper_types = scope->emit_options == MP_EMIT_OPT_VIPER;
380 : 13889 : emit->stack_size = 0;
381 : 13889 : emit->scope = scope;
382 : :
383 : : // allocate memory for keeping track of the types of locals
384 [ + + ]: 13889 : if (emit->local_vtype_alloc < scope->num_locals) {
385 : 796 : emit->local_vtype = m_renew(vtype_kind_t, emit->local_vtype, emit->local_vtype_alloc, scope->num_locals);
386 : 796 : emit->local_vtype_alloc = scope->num_locals;
387 : : }
388 : :
389 : : // set default type for arguments
390 : 13889 : mp_uint_t num_args = emit->scope->num_pos_args + emit->scope->num_kwonly_args;
391 [ + + ]: 13889 : if (scope->scope_flags & MP_SCOPE_FLAG_VARARGS) {
392 : 168 : num_args += 1;
393 : : }
394 [ + + ]: 13889 : if (scope->scope_flags & MP_SCOPE_FLAG_VARKEYWORDS) {
395 : 75 : num_args += 1;
396 : : }
397 [ + + ]: 24785 : for (mp_uint_t i = 0; i < num_args; i++) {
398 : 10896 : emit->local_vtype[i] = VTYPE_PYOBJ;
399 : : }
400 : :
401 : : // Set viper type for arguments
402 [ + + ]: 13889 : if (emit->do_viper_types) {
403 [ + + ]: 2456 : for (int i = 0; i < emit->scope->id_info_len; ++i) {
404 : 1734 : id_info_t *id = &emit->scope->id_info[i];
405 [ + + ]: 1734 : if (id->flags & ID_FLAG_IS_PARAM) {
406 [ - + ]: 996 : assert(id->local_num < emit->local_vtype_alloc);
407 : 996 : emit->local_vtype[id->local_num] = id->flags >> ID_FLAG_VIPER_TYPE_POS;
408 : : }
409 : : }
410 : : }
411 : :
412 : : // local variables begin unbound, and have unknown type
413 [ + + ]: 42084 : for (mp_uint_t i = num_args; i < emit->local_vtype_alloc; i++) {
414 [ + + ]: 55822 : emit->local_vtype[i] = emit->do_viper_types ? VTYPE_UNBOUND : VTYPE_PYOBJ;
415 : : }
416 : :
417 : : // values on stack begin unbound
418 [ + + ]: 138017 : for (mp_uint_t i = 0; i < emit->stack_info_alloc; i++) {
419 : 124128 : emit->stack_info[i].kind = STACK_VALUE;
420 : 124128 : emit->stack_info[i].vtype = VTYPE_UNBOUND;
421 : : }
422 : :
423 [ + + ]: 23179 : mp_asm_base_start_pass(&emit->as->base, pass == MP_PASS_EMIT ? MP_ASM_PASS_EMIT : MP_ASM_PASS_COMPUTE);
424 : :
425 : : // generate code for entry to function
426 : :
427 : : // Work out start of code state (mp_code_state_native_t or reduced version for viper)
428 : 13889 : emit->code_state_start = 0;
429 [ + + + + ]: 13889 : if (NEED_GLOBAL_EXC_HANDLER(emit)) {
430 : 9789 : emit->code_state_start = SIZEOF_NLR_BUF; // for nlr_buf_t
431 : 9789 : emit->code_state_start += 1; // for return_value
432 [ + + ]: 9789 : if (NEED_THROW_VAL(emit)) {
433 : 1031 : emit->code_state_start += 2;
434 [ + + ]: 8758 : } else if (NEED_EXC_HANDLER_UNWIND(emit)) {
435 : 2259 : emit->code_state_start += 1;
436 : : }
437 : : }
438 : :
439 : 13889 : size_t fun_table_off = mp_emit_common_use_const_obj(emit->emit_common, MP_OBJ_FROM_PTR(&mp_fun_table));
440 : :
441 [ + + ]: 13889 : if (emit->do_viper_types) {
442 : : // Work out size of state (locals plus stack)
443 : : // n_state counts all stack and locals, even those in registers
444 : 722 : emit->n_state = scope->num_locals + scope->stack_size;
445 : 722 : int num_locals_in_regs = 0;
446 [ + + + + ]: 722 : if (CAN_USE_REGS_FOR_LOCALS(emit)) {
447 : 684 : num_locals_in_regs = scope->num_locals;
448 [ + + ]: 684 : if (num_locals_in_regs > MAX_REGS_FOR_LOCAL_VARS) {
449 : 120 : num_locals_in_regs = MAX_REGS_FOR_LOCAL_VARS;
450 : : }
451 : : // Need a spot for REG_LOCAL_LAST (see below)
452 [ + + ]: 684 : if (scope->num_pos_args >= MAX_REGS_FOR_LOCAL_VARS + 1) {
453 : 24 : --num_locals_in_regs;
454 : : }
455 : : }
456 : :
457 : : // Work out where the locals and Python stack start within the C stack
458 [ + + + + ]: 722 : if (NEED_GLOBAL_EXC_HANDLER(emit)) {
459 : : // Reserve 2 words for function object and old globals
460 : 288 : emit->stack_start = emit->code_state_start + 2;
461 [ + + ]: 434 : } else if (scope->scope_flags & MP_SCOPE_FLAG_HASCONSTS) {
462 : : // Reserve 1 word for function object, to access const table
463 : 12 : emit->stack_start = emit->code_state_start + 1;
464 : : } else {
465 : 422 : emit->stack_start = emit->code_state_start + 0;
466 : : }
467 : :
468 : : // Entry to function
469 : 722 : ASM_ENTRY(emit->as, emit->stack_start + emit->n_state - num_locals_in_regs);
470 : :
471 : : #if N_X86
472 : : asm_x86_mov_arg_to_r32(emit->as, 0, REG_PARENT_ARG_1);
473 : : #endif
474 : :
475 : : // Load REG_FUN_TABLE with a pointer to mp_fun_table, found in the const_table
476 : 722 : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_FUN_TABLE, REG_PARENT_ARG_1, OFFSETOF_OBJ_FUN_BC_CONTEXT);
477 : : #if MICROPY_PERSISTENT_CODE_SAVE
478 : : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_QSTR_TABLE, REG_FUN_TABLE, OFFSETOF_MODULE_CONTEXT_QSTR_TABLE);
479 : : #endif
480 : 722 : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_FUN_TABLE, REG_FUN_TABLE, OFFSETOF_MODULE_CONTEXT_OBJ_TABLE);
481 : 722 : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_FUN_TABLE, REG_FUN_TABLE, fun_table_off);
482 : :
483 : : // Store function object (passed as first arg) to stack if needed
484 [ + + + + ]: 722 : if (NEED_FUN_OBJ(emit)) {
485 : 296 : ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_FUN_OBJ(emit), REG_PARENT_ARG_1);
486 : : }
487 : :
488 : : // Put n_args in REG_ARG_1, n_kw in REG_ARG_2, args array in REG_LOCAL_LAST
489 : : #if N_X86
490 : : asm_x86_mov_arg_to_r32(emit->as, 1, REG_ARG_1);
491 : : asm_x86_mov_arg_to_r32(emit->as, 2, REG_ARG_2);
492 : : asm_x86_mov_arg_to_r32(emit->as, 3, REG_LOCAL_LAST);
493 : : #else
494 : 722 : ASM_MOV_REG_REG(emit->as, REG_ARG_1, REG_PARENT_ARG_2);
495 : 722 : ASM_MOV_REG_REG(emit->as, REG_ARG_2, REG_PARENT_ARG_3);
496 : 722 : ASM_MOV_REG_REG(emit->as, REG_LOCAL_LAST, REG_PARENT_ARG_4);
497 : : #endif
498 : :
499 : : // Check number of args matches this function, and call mp_arg_check_num_sig if not
500 : 722 : ASM_JUMP_IF_REG_NONZERO(emit->as, REG_ARG_2, *emit->label_slot + 4, true);
501 : 722 : ASM_MOV_REG_IMM(emit->as, REG_ARG_3, scope->num_pos_args);
502 : 722 : ASM_JUMP_IF_REG_EQ(emit->as, REG_ARG_1, REG_ARG_3, *emit->label_slot + 5);
503 : 722 : mp_asm_base_label_assign(&emit->as->base, *emit->label_slot + 4);
504 : 722 : ASM_MOV_REG_IMM(emit->as, REG_ARG_3, MP_OBJ_FUN_MAKE_SIG(scope->num_pos_args, scope->num_pos_args, false));
505 : 722 : ASM_CALL_IND(emit->as, MP_F_ARG_CHECK_NUM_SIG);
506 : 722 : mp_asm_base_label_assign(&emit->as->base, *emit->label_slot + 5);
507 : :
508 : : // Store arguments into locals (reg or stack), converting to native if needed
509 [ + + ]: 1706 : for (int i = 0; i < emit->scope->num_pos_args; i++) {
510 : 984 : int r = REG_ARG_1;
511 : 984 : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_ARG_1, REG_LOCAL_LAST, i);
512 [ + + ]: 984 : if (emit->local_vtype[i] != VTYPE_PYOBJ) {
513 : 876 : emit_call_with_imm_arg(emit, MP_F_CONVERT_OBJ_TO_NATIVE, emit->local_vtype[i], REG_ARG_2);
514 : 876 : r = REG_RET;
515 : : }
516 : : // REG_LOCAL_LAST points to the args array so be sure not to overwrite it if it's still needed
517 [ + + + - : 984 : if (i < MAX_REGS_FOR_LOCAL_VARS && CAN_USE_REGS_FOR_LOCALS(emit) && (i != MAX_REGS_FOR_LOCAL_VARS - 1 || emit->scope->num_pos_args == MAX_REGS_FOR_LOCAL_VARS)) {
+ - + + +
+ ]
518 : 918 : ASM_MOV_REG_REG(emit->as, reg_local_table[i], r);
519 : : } else {
520 : 66 : emit_native_mov_state_reg(emit, LOCAL_IDX_LOCAL_VAR(emit, i), r);
521 : : }
522 : : }
523 : : // Get local from the stack back into REG_LOCAL_LAST if this reg couldn't be written to above
524 [ + + + - : 722 : if (emit->scope->num_pos_args >= MAX_REGS_FOR_LOCAL_VARS + 1 && CAN_USE_REGS_FOR_LOCALS(emit)) {
+ - ]
525 : 24 : ASM_MOV_REG_LOCAL(emit->as, REG_LOCAL_LAST, LOCAL_IDX_LOCAL_VAR(emit, MAX_REGS_FOR_LOCAL_VARS - 1));
526 : : }
527 : :
528 : 722 : emit_native_global_exc_entry(emit);
529 : :
530 : : } else {
531 : : // work out size of state (locals plus stack)
532 : 13167 : emit->n_state = scope->num_locals + scope->stack_size;
533 : :
534 : : // Store in the first machine-word an index used to the function's prelude.
535 : : // This is used at runtime by mp_obj_fun_native_get_prelude_ptr().
536 : 13167 : mp_asm_base_data(&emit->as->base, ASM_WORD_SIZE, (uintptr_t)emit->prelude_ptr_index);
537 : :
538 [ + + ]: 13167 : if (emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR) {
539 : 1023 : mp_asm_base_data(&emit->as->base, ASM_WORD_SIZE, (uintptr_t)emit->start_offset);
540 : 1023 : ASM_ENTRY(emit->as, emit->code_state_start);
541 : :
542 : : // Reset the state size for the state pointed to by REG_GENERATOR_STATE
543 : 1023 : emit->code_state_start = 0;
544 : 1023 : emit->stack_start = SIZEOF_CODE_STATE;
545 : :
546 : : // Put address of code_state into REG_GENERATOR_STATE
547 : : #if N_X86
548 : : asm_x86_mov_arg_to_r32(emit->as, 0, REG_GENERATOR_STATE);
549 : : #else
550 : 1023 : ASM_MOV_REG_REG(emit->as, REG_GENERATOR_STATE, REG_PARENT_ARG_1);
551 : : #endif
552 : :
553 : : // Put throw value into LOCAL_IDX_THROW_VAL slot, for yield/yield-from
554 : : #if N_X86
555 : : asm_x86_mov_arg_to_r32(emit->as, 1, REG_PARENT_ARG_2);
556 : : #endif
557 : 1023 : ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_THROW_VAL(emit), REG_PARENT_ARG_2);
558 : :
559 : : // Load REG_FUN_TABLE with a pointer to mp_fun_table, found in the const_table
560 : 1023 : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_TEMP0, REG_GENERATOR_STATE, LOCAL_IDX_FUN_OBJ(emit));
561 : 1023 : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_TEMP0, REG_TEMP0, OFFSETOF_OBJ_FUN_BC_CONTEXT);
562 : : #if MICROPY_PERSISTENT_CODE_SAVE
563 : : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_QSTR_TABLE, REG_TEMP0, OFFSETOF_MODULE_CONTEXT_QSTR_TABLE);
564 : : #endif
565 : 1023 : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_TEMP0, REG_TEMP0, OFFSETOF_MODULE_CONTEXT_OBJ_TABLE);
566 : 1023 : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_FUN_TABLE, REG_TEMP0, fun_table_off);
567 : : } else {
568 : : // The locals and stack start after the code_state structure
569 : 12144 : emit->stack_start = emit->code_state_start + SIZEOF_CODE_STATE;
570 : :
571 : : // Allocate space on C-stack for code_state structure, which includes state
572 : 12144 : ASM_ENTRY(emit->as, emit->stack_start + emit->n_state);
573 : :
574 : : // Prepare incoming arguments for call to mp_setup_code_state
575 : :
576 : : #if N_X86
577 : : asm_x86_mov_arg_to_r32(emit->as, 0, REG_PARENT_ARG_1);
578 : : asm_x86_mov_arg_to_r32(emit->as, 1, REG_PARENT_ARG_2);
579 : : asm_x86_mov_arg_to_r32(emit->as, 2, REG_PARENT_ARG_3);
580 : : asm_x86_mov_arg_to_r32(emit->as, 3, REG_PARENT_ARG_4);
581 : : #endif
582 : :
583 : : // Load REG_FUN_TABLE with a pointer to mp_fun_table, found in the const_table
584 : 12144 : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_FUN_TABLE, REG_PARENT_ARG_1, OFFSETOF_OBJ_FUN_BC_CONTEXT);
585 : : #if MICROPY_PERSISTENT_CODE_SAVE
586 : : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_QSTR_TABLE, REG_FUN_TABLE, OFFSETOF_MODULE_CONTEXT_QSTR_TABLE);
587 : : #endif
588 : 12144 : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_FUN_TABLE, REG_FUN_TABLE, OFFSETOF_MODULE_CONTEXT_OBJ_TABLE);
589 : 12144 : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_FUN_TABLE, REG_FUN_TABLE, fun_table_off);
590 : :
591 : : // Set code_state.fun_bc
592 : 12144 : ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_FUN_OBJ(emit), REG_PARENT_ARG_1);
593 : :
594 : : // Set code_state.n_state (only works on little endian targets due to n_state being uint16_t)
595 : 12144 : emit_native_mov_state_imm_via(emit, emit->code_state_start + OFFSETOF_CODE_STATE_N_STATE, emit->n_state, REG_ARG_1);
596 : :
597 : : // Put address of code_state into first arg
598 : 12144 : ASM_MOV_REG_LOCAL_ADDR(emit->as, REG_ARG_1, emit->code_state_start);
599 : :
600 : : // Copy next 3 args if needed
601 : : #if REG_ARG_2 != REG_PARENT_ARG_2
602 : : ASM_MOV_REG_REG(emit->as, REG_ARG_2, REG_PARENT_ARG_2);
603 : : #endif
604 : : #if REG_ARG_3 != REG_PARENT_ARG_3
605 : : ASM_MOV_REG_REG(emit->as, REG_ARG_3, REG_PARENT_ARG_3);
606 : : #endif
607 : : #if REG_ARG_4 != REG_PARENT_ARG_4
608 : : ASM_MOV_REG_REG(emit->as, REG_ARG_4, REG_PARENT_ARG_4);
609 : : #endif
610 : :
611 : : // Call mp_setup_code_state to prepare code_state structure
612 : : #if N_THUMB
613 : : asm_thumb_bl_ind(emit->as, MP_F_SETUP_CODE_STATE, ASM_THUMB_REG_R4);
614 : : #elif N_ARM
615 : : asm_arm_bl_ind(emit->as, MP_F_SETUP_CODE_STATE, ASM_ARM_REG_R4);
616 : : #else
617 : 12144 : ASM_CALL_IND(emit->as, MP_F_SETUP_CODE_STATE);
618 : : #endif
619 : : }
620 : :
621 : 13167 : emit_native_global_exc_entry(emit);
622 : :
623 : : // cache some locals in registers, but only if no exception handlers
624 [ + + + + ]: 13167 : if (CAN_USE_REGS_FOR_LOCALS(emit)) {
625 [ + + + + ]: 19251 : for (int i = 0; i < MAX_REGS_FOR_LOCAL_VARS && i < scope->num_locals; ++i) {
626 : 9336 : ASM_MOV_REG_LOCAL(emit->as, reg_local_table[i], LOCAL_IDX_LOCAL_VAR(emit, i));
627 : : }
628 : : }
629 : :
630 : : // set the type of closed over variables
631 [ + + ]: 72870 : for (mp_uint_t i = 0; i < scope->id_info_len; i++) {
632 : 59703 : id_info_t *id = &scope->id_info[i];
633 [ + + ]: 59703 : if (id->kind == ID_INFO_KIND_CELL) {
634 : 327 : emit->local_vtype[id->local_num] = VTYPE_PYOBJ;
635 : : }
636 : : }
637 : : }
638 : 13889 : }
639 : :
640 : 98453 : static inline void emit_native_write_code_info_byte(emit_t *emit, byte val) {
641 : 98453 : mp_asm_base_data(&emit->as->base, 1, val);
642 : : }
643 : :
644 : 22836 : static inline void emit_native_write_code_info_qstr(emit_t *emit, qstr qst) {
645 : 22836 : mp_encode_uint(&emit->as->base, mp_asm_base_get_cur_to_write_bytes, mp_emit_common_use_qstr(emit->emit_common, qst));
646 : 22836 : }
647 : :
648 : 13873 : static bool emit_native_end_pass(emit_t *emit) {
649 : 13873 : emit_native_global_exc_exit(emit);
650 : :
651 [ + + ]: 13873 : if (!emit->do_viper_types) {
652 : 13167 : emit->prelude_offset = mp_asm_base_get_code_pos(&emit->as->base);
653 : 13167 : emit->prelude_ptr_index = emit->emit_common->ct_cur_child;
654 : :
655 : 13167 : size_t n_state = emit->n_state;
656 : 13167 : size_t n_exc_stack = 0; // exc-stack not needed for native code
657 [ + + ]: 85226 : MP_BC_PRELUDE_SIG_ENCODE(n_state, n_exc_stack, emit->scope, emit_native_write_code_info_byte, emit);
658 : :
659 : 13167 : size_t n_info = emit->n_info;
660 : 13167 : size_t n_cell = emit->n_cell;
661 [ + + + + ]: 13227 : MP_BC_PRELUDE_SIZE_ENCODE(n_info, n_cell, emit_native_write_code_info_byte, emit);
662 : :
663 : : // bytecode prelude: source info (function and argument qstrs)
664 : 13167 : size_t info_start = mp_asm_base_get_code_pos(&emit->as->base);
665 : 13167 : emit_native_write_code_info_qstr(emit, emit->scope->simple_name);
666 [ + + ]: 22836 : for (int i = 0; i < emit->scope->num_pos_args + emit->scope->num_kwonly_args; i++) {
667 : 17766 : qstr qst = MP_QSTR__star_;
668 [ + + ]: 17766 : for (int j = 0; j < emit->scope->id_info_len; ++j) {
669 : 17097 : id_info_t *id = &emit->scope->id_info[j];
670 [ + + + + ]: 17097 : if ((id->flags & ID_FLAG_IS_PARAM) && id->local_num == i) {
671 : 9000 : qst = id->qst;
672 : 9000 : break;
673 : : }
674 : : }
675 : 9669 : emit_native_write_code_info_qstr(emit, qst);
676 : : }
677 : 13167 : emit->n_info = mp_asm_base_get_code_pos(&emit->as->base) - info_start;
678 : :
679 : : // bytecode prelude: initialise closed over variables
680 : 13167 : size_t cell_start = mp_asm_base_get_code_pos(&emit->as->base);
681 [ + + ]: 72870 : for (int i = 0; i < emit->scope->id_info_len; i++) {
682 : 59703 : id_info_t *id = &emit->scope->id_info[i];
683 [ + + ]: 59703 : if (id->kind == ID_INFO_KIND_CELL) {
684 [ - + ]: 327 : assert(id->local_num <= 255);
685 : 327 : mp_asm_base_data(&emit->as->base, 1, id->local_num); // write the local which should be converted to a cell
686 : : }
687 : : }
688 : 13167 : emit->n_cell = mp_asm_base_get_code_pos(&emit->as->base) - cell_start;
689 : :
690 : : }
691 : :
692 : 13873 : ASM_END_PASS(emit->as);
693 : :
694 : : // check stack is back to zero size
695 [ - + ]: 13873 : assert(emit->stack_size == 0);
696 [ - + ]: 13873 : assert(emit->exc_stack_size == 0);
697 : :
698 [ + + ]: 13873 : if (emit->pass == MP_PASS_EMIT) {
699 [ + + ]: 4599 : void *f = mp_asm_base_get_code(&emit->as->base);
700 [ + + ]: 4599 : mp_uint_t f_len = mp_asm_base_get_code_size(&emit->as->base);
701 : :
702 : 4599 : mp_raw_code_t **children = emit->emit_common->children;
703 [ + + ]: 4599 : if (!emit->do_viper_types) {
704 : : #if MICROPY_EMIT_NATIVE_PRELUDE_SEPARATE_FROM_MACHINE_CODE
705 : : // Executable code cannot be accessed byte-wise on this architecture, so copy
706 : : // the prelude to a separate memory region that is byte-wise readable.
707 : : void *buf = emit->as->base.code_base + emit->prelude_offset;
708 : : size_t n = emit->as->base.code_offset - emit->prelude_offset;
709 : : const uint8_t *prelude_ptr = memcpy(m_new(uint8_t, n), buf, n);
710 : : #else
711 : : // Point to the prelude directly, at the end of the machine code data.
712 : 4389 : const uint8_t *prelude_ptr = (const uint8_t *)f + emit->prelude_offset;
713 : : #endif
714 : :
715 : : // Store the pointer to the prelude using the child_table.
716 [ - + ]: 4389 : assert(emit->prelude_ptr_index == emit->emit_common->ct_cur_child);
717 [ + + ]: 4389 : if (emit->prelude_ptr_index == 0) {
718 : : children = (void *)prelude_ptr;
719 : : } else {
720 : 1050 : children = m_renew(mp_raw_code_t *, children, emit->prelude_ptr_index, emit->prelude_ptr_index + 1);
721 : 1050 : children[emit->prelude_ptr_index] = (void *)prelude_ptr;
722 : : }
723 : : }
724 : :
725 : 4599 : mp_emit_glue_assign_native(emit->scope->raw_code,
726 : 4599 : emit->do_viper_types ? MP_CODE_NATIVE_VIPER : MP_CODE_NATIVE_PY,
727 : : f, f_len,
728 : : children,
729 : : #if MICROPY_PERSISTENT_CODE_SAVE
730 : : emit->emit_common->ct_cur_child,
731 : : emit->prelude_offset,
732 : : #endif
733 [ + + ]: 4599 : emit->scope->scope_flags, 0, 0);
734 : : }
735 : :
736 : 13873 : return true;
737 : : }
738 : :
739 : 392817 : static void ensure_extra_stack(emit_t *emit, size_t delta) {
740 [ + + ]: 392817 : if (emit->stack_size + delta > emit->stack_info_alloc) {
741 : 362 : size_t new_alloc = (emit->stack_size + delta + 8) & ~3;
742 : 362 : emit->stack_info = m_renew(stack_info_t, emit->stack_info, emit->stack_info_alloc, new_alloc);
743 : 362 : emit->stack_info_alloc = new_alloc;
744 : : }
745 : 392817 : }
746 : :
747 : 742204 : static void adjust_stack(emit_t *emit, mp_int_t stack_size_delta) {
748 [ - + ]: 742204 : assert((mp_int_t)emit->stack_size + stack_size_delta >= 0);
749 [ - + ]: 742204 : assert((mp_int_t)emit->stack_size + stack_size_delta <= (mp_int_t)emit->stack_info_alloc);
750 : 742204 : emit->stack_size += stack_size_delta;
751 [ + - + + ]: 742204 : if (emit->pass > MP_PASS_SCOPE && emit->stack_size > emit->scope->stack_size) {
752 : 17007 : emit->scope->stack_size = emit->stack_size;
753 : : }
754 : : #if DEBUG_PRINT
755 : : DEBUG_printf(" adjust_stack; stack_size=%d+%d; stack now:", emit->stack_size - stack_size_delta, stack_size_delta);
756 : : for (int i = 0; i < emit->stack_size; i++) {
757 : : stack_info_t *si = &emit->stack_info[i];
758 : : DEBUG_printf(" (v=%d k=%d %d)", si->vtype, si->kind, si->data.u_reg);
759 : : }
760 : : DEBUG_printf("\n");
761 : : #endif
762 : 742204 : }
763 : :
764 : 10893 : static void emit_native_adjust_stack_size(emit_t *emit, mp_int_t delta) {
765 : 10893 : DEBUG_printf("adjust_stack_size(" INT_FMT ")\n", delta);
766 [ + + ]: 10893 : if (delta > 0) {
767 : 9198 : ensure_extra_stack(emit, delta);
768 : : }
769 : : // If we are adjusting the stack in a positive direction (pushing) then we
770 : : // need to fill in values for the stack kind and vtype of the newly-pushed
771 : : // entries. These should be set to "value" (ie not reg or imm) because we
772 : : // should only need to adjust the stack due to a jump to this part in the
773 : : // code (and hence we have settled the stack before the jump).
774 [ + + ]: 21177 : for (mp_int_t i = 0; i < delta; i++) {
775 : 10284 : stack_info_t *si = &emit->stack_info[emit->stack_size + i];
776 : 10284 : si->kind = STACK_VALUE;
777 : : // TODO we don't know the vtype to use here. At the moment this is a
778 : : // hack to get the case of multi comparison working.
779 [ + + ]: 10284 : if (delta == 1) {
780 : 8517 : si->vtype = emit->saved_stack_vtype;
781 : : } else {
782 : 1767 : si->vtype = VTYPE_PYOBJ;
783 : : }
784 : : }
785 : 10893 : adjust_stack(emit, delta);
786 : 10893 : }
787 : :
788 : 260699 : static void emit_native_set_source_line(emit_t *emit, mp_uint_t source_line) {
789 : 260699 : (void)emit;
790 : 260699 : (void)source_line;
791 : 260699 : }
792 : :
793 : : // this must be called at start of emit functions
794 : : static void emit_native_pre(emit_t *emit) {
795 : : (void)emit;
796 : : }
797 : :
798 : : // depth==0 is top, depth==1 is before top, etc
799 : 13324 : static stack_info_t *peek_stack(emit_t *emit, mp_uint_t depth) {
800 : 13324 : return &emit->stack_info[emit->stack_size - 1 - depth];
801 : : }
802 : :
803 : : // depth==0 is top, depth==1 is before top, etc
804 : 114843 : static vtype_kind_t peek_vtype(emit_t *emit, mp_uint_t depth) {
805 [ + + ]: 114843 : if (emit->do_viper_types) {
806 : 4080 : return peek_stack(emit, depth)->vtype;
807 : : } else {
808 : : // Type is always PYOBJ even if the intermediate stored value is not
809 : : return VTYPE_PYOBJ;
810 : : }
811 : : }
812 : :
813 : : // pos=1 is TOS, pos=2 is next, etc
814 : : // use pos=0 for no skipping
815 : 277982 : static void need_reg_single(emit_t *emit, int reg_needed, int skip_stack_pos) {
816 : 277982 : skip_stack_pos = emit->stack_size - skip_stack_pos;
817 [ + + ]: 886062 : for (int i = 0; i < emit->stack_size; i++) {
818 [ + + ]: 608080 : if (i != skip_stack_pos) {
819 : 382556 : stack_info_t *si = &emit->stack_info[i];
820 [ + + + + ]: 382556 : if (si->kind == STACK_REG && si->data.u_reg == reg_needed) {
821 : 34887 : si->kind = STACK_VALUE;
822 : 34887 : emit_native_mov_state_reg(emit, emit->stack_start + i, si->data.u_reg);
823 : : }
824 : : }
825 : : }
826 : 277982 : }
827 : :
828 : : // Ensures all unsettled registers that hold Python values are copied to the
829 : : // concrete Python stack. All registers are then free to use.
830 : 556535 : static void need_reg_all(emit_t *emit) {
831 [ + + ]: 1258588 : for (int i = 0; i < emit->stack_size; i++) {
832 : 702053 : stack_info_t *si = &emit->stack_info[i];
833 [ + + ]: 702053 : if (si->kind == STACK_REG) {
834 : 111348 : DEBUG_printf(" reg(%u) to local(%u)\n", si->data.u_reg, emit->stack_start + i);
835 : 111348 : si->kind = STACK_VALUE;
836 : 111348 : emit_native_mov_state_reg(emit, emit->stack_start + i, si->data.u_reg);
837 : : }
838 : : }
839 : 556535 : }
840 : :
841 : 64365 : static vtype_kind_t load_reg_stack_imm(emit_t *emit, int reg_dest, const stack_info_t *si, bool convert_to_pyobj) {
842 [ + + + + ]: 64365 : if (!convert_to_pyobj && emit->do_viper_types) {
843 : 644 : ASM_MOV_REG_IMM(emit->as, reg_dest, si->data.u_imm);
844 : 644 : return si->vtype;
845 : : } else {
846 [ + + ]: 63721 : if (si->vtype == VTYPE_PYOBJ) {
847 : 1035 : ASM_MOV_REG_IMM(emit->as, reg_dest, si->data.u_imm);
848 [ + + ]: 62686 : } else if (si->vtype == VTYPE_BOOL) {
849 : 2025 : emit_native_mov_reg_const(emit, reg_dest, MP_F_CONST_FALSE_OBJ + si->data.u_imm);
850 [ + + ]: 60661 : } else if (si->vtype == VTYPE_INT || si->vtype == VTYPE_UINT) {
851 : 35994 : ASM_MOV_REG_IMM(emit->as, reg_dest, (uintptr_t)MP_OBJ_NEW_SMALL_INT(si->data.u_imm));
852 [ + + ]: 24667 : } else if (si->vtype == VTYPE_PTR_NONE) {
853 : 24663 : emit_native_mov_reg_const(emit, reg_dest, MP_F_CONST_NONE_OBJ);
854 : : } else {
855 : 4 : mp_raise_NotImplementedError(MP_ERROR_TEXT("conversion to object"));
856 : : }
857 : 63717 : return VTYPE_PYOBJ;
858 : : }
859 : : }
860 : :
861 : : // Copies all unsettled registers and immediates that are Python values into the
862 : : // concrete Python stack. This ensures the concrete Python stack holds valid
863 : : // values for the current stack_size.
864 : : // This function may clobber REG_TEMP1.
865 : 130827 : static void need_stack_settled(emit_t *emit) {
866 : 130827 : DEBUG_printf(" need_stack_settled; stack_size=%d\n", emit->stack_size);
867 : 130827 : need_reg_all(emit);
868 [ + + ]: 215679 : for (int i = 0; i < emit->stack_size; i++) {
869 : 84852 : stack_info_t *si = &emit->stack_info[i];
870 [ + + ]: 84852 : if (si->kind == STACK_IMM) {
871 : 3558 : DEBUG_printf(" imm(" INT_FMT ") to local(%u)\n", si->data.u_imm, emit->stack_start + i);
872 : 3558 : si->kind = STACK_VALUE;
873 : : // using REG_TEMP1 to avoid clobbering REG_TEMP0 (aka REG_RET)
874 : 3558 : si->vtype = load_reg_stack_imm(emit, REG_TEMP1, si, false);
875 : 3558 : emit_native_mov_state_reg(emit, emit->stack_start + i, REG_TEMP1);
876 : : }
877 : : }
878 : 130827 : }
879 : :
880 : : // pos=1 is TOS, pos=2 is next, etc
881 : 219010 : static void emit_access_stack(emit_t *emit, int pos, vtype_kind_t *vtype, int reg_dest) {
882 : 219010 : need_reg_single(emit, reg_dest, pos);
883 : 219010 : stack_info_t *si = &emit->stack_info[emit->stack_size - pos];
884 : 219010 : *vtype = si->vtype;
885 [ + + + - ]: 219010 : switch (si->kind) {
886 : 70610 : case STACK_VALUE:
887 : 70610 : emit_native_mov_reg_state(emit, reg_dest, emit->stack_start + emit->stack_size - pos);
888 : 70610 : break;
889 : :
890 : 113367 : case STACK_REG:
891 [ + + ]: 113367 : if (si->data.u_reg != reg_dest) {
892 : 106101 : ASM_MOV_REG_REG(emit->as, reg_dest, si->data.u_reg);
893 : : }
894 : : break;
895 : :
896 : 35033 : case STACK_IMM:
897 : 35033 : *vtype = load_reg_stack_imm(emit, reg_dest, si, false);
898 : 35033 : break;
899 : : }
900 : 219010 : }
901 : :
902 : : // does an efficient X=pop(); discard(); push(X)
903 : : // needs a (non-temp) register in case the popped element was stored in the stack
904 : 938 : static void emit_fold_stack_top(emit_t *emit, int reg_dest) {
905 : 938 : stack_info_t *si = &emit->stack_info[emit->stack_size - 2];
906 : 938 : si[0] = si[1];
907 [ + + ]: 938 : if (si->kind == STACK_VALUE) {
908 : : // if folded element was on the stack we need to put it in a register
909 : 924 : emit_native_mov_reg_state(emit, reg_dest, emit->stack_start + emit->stack_size - 1);
910 : 924 : si->kind = STACK_REG;
911 : 924 : si->data.u_reg = reg_dest;
912 : : }
913 : 938 : adjust_stack(emit, -1);
914 : 938 : }
915 : :
916 : : // If stacked value is in a register and the register is not r1 or r2, then
917 : : // *reg_dest is set to that register. Otherwise the value is put in *reg_dest.
918 : 8874 : static void emit_pre_pop_reg_flexible(emit_t *emit, vtype_kind_t *vtype, int *reg_dest, int not_r1, int not_r2) {
919 : 8874 : stack_info_t *si = peek_stack(emit, 0);
920 [ + + + + : 8874 : if (si->kind == STACK_REG && si->data.u_reg != not_r1 && si->data.u_reg != not_r2) {
+ - ]
921 : 6514 : *vtype = si->vtype;
922 : 6514 : *reg_dest = si->data.u_reg;
923 : 6514 : need_reg_single(emit, *reg_dest, 1);
924 : : } else {
925 : 2360 : emit_access_stack(emit, 1, vtype, *reg_dest);
926 : : }
927 : 8874 : adjust_stack(emit, -1);
928 : 8874 : }
929 : :
930 : 50274 : static void emit_pre_pop_discard(emit_t *emit) {
931 : 50274 : adjust_stack(emit, -1);
932 : 50274 : }
933 : :
934 : 213146 : static void emit_pre_pop_reg(emit_t *emit, vtype_kind_t *vtype, int reg_dest) {
935 : 213146 : emit_access_stack(emit, 1, vtype, reg_dest);
936 : 213146 : adjust_stack(emit, -1);
937 : 213146 : }
938 : :
939 : 25946 : static void emit_pre_pop_reg_reg(emit_t *emit, vtype_kind_t *vtypea, int rega, vtype_kind_t *vtypeb, int regb) {
940 : 25946 : emit_pre_pop_reg(emit, vtypea, rega);
941 : 25946 : emit_pre_pop_reg(emit, vtypeb, regb);
942 : 25946 : }
943 : :
944 : 4104 : static void emit_pre_pop_reg_reg_reg(emit_t *emit, vtype_kind_t *vtypea, int rega, vtype_kind_t *vtypeb, int regb, vtype_kind_t *vtypec, int regc) {
945 : 4104 : emit_pre_pop_reg(emit, vtypea, rega);
946 : 4104 : emit_pre_pop_reg(emit, vtypeb, regb);
947 : 4104 : emit_pre_pop_reg(emit, vtypec, regc);
948 : 4104 : }
949 : :
950 : : static void emit_post(emit_t *emit) {
951 : : (void)emit;
952 : : }
953 : :
954 : 14 : static void emit_post_top_set_vtype(emit_t *emit, vtype_kind_t new_vtype) {
955 : 14 : stack_info_t *si = &emit->stack_info[emit->stack_size - 1];
956 : 14 : si->vtype = new_vtype;
957 : 14 : }
958 : :
959 : 300900 : static void emit_post_push_reg(emit_t *emit, vtype_kind_t vtype, int reg) {
960 : 300900 : ensure_extra_stack(emit, 1);
961 : 300900 : stack_info_t *si = &emit->stack_info[emit->stack_size];
962 : 300900 : si->vtype = vtype;
963 : 300900 : si->kind = STACK_REG;
964 : 300900 : si->data.u_reg = reg;
965 : 300900 : adjust_stack(emit, 1);
966 : 300900 : }
967 : :
968 : 64251 : static void emit_post_push_imm(emit_t *emit, vtype_kind_t vtype, mp_int_t imm) {
969 : 64251 : ensure_extra_stack(emit, 1);
970 : 64251 : stack_info_t *si = &emit->stack_info[emit->stack_size];
971 : 64251 : si->vtype = vtype;
972 : 64251 : si->kind = STACK_IMM;
973 : 64251 : si->data.u_imm = imm;
974 : 64251 : adjust_stack(emit, 1);
975 : 64251 : }
976 : :
977 : 7227 : static void emit_post_push_reg_reg(emit_t *emit, vtype_kind_t vtypea, int rega, vtype_kind_t vtypeb, int regb) {
978 : 7227 : emit_post_push_reg(emit, vtypea, rega);
979 : 7227 : emit_post_push_reg(emit, vtypeb, regb);
980 : 7227 : }
981 : :
982 : 336 : static void emit_post_push_reg_reg_reg(emit_t *emit, vtype_kind_t vtypea, int rega, vtype_kind_t vtypeb, int regb, vtype_kind_t vtypec, int regc) {
983 : 336 : emit_post_push_reg(emit, vtypea, rega);
984 : 336 : emit_post_push_reg(emit, vtypeb, regb);
985 : 336 : emit_post_push_reg(emit, vtypec, regc);
986 : 336 : }
987 : :
988 : 450 : static void emit_post_push_reg_reg_reg_reg(emit_t *emit, vtype_kind_t vtypea, int rega, vtype_kind_t vtypeb, int regb, vtype_kind_t vtypec, int regc, vtype_kind_t vtyped, int regd) {
989 : 450 : emit_post_push_reg(emit, vtypea, rega);
990 : 450 : emit_post_push_reg(emit, vtypeb, regb);
991 : 450 : emit_post_push_reg(emit, vtypec, regc);
992 : 450 : emit_post_push_reg(emit, vtyped, regd);
993 : 450 : }
994 : :
995 : 88458 : static void emit_call(emit_t *emit, mp_fun_kind_t fun_kind) {
996 : 88458 : need_reg_all(emit);
997 : 88458 : ASM_CALL_IND(emit->as, fun_kind);
998 : 88458 : }
999 : :
1000 : 79537 : static void emit_call_with_imm_arg(emit_t *emit, mp_fun_kind_t fun_kind, mp_int_t arg_val, int arg_reg) {
1001 : 79537 : need_reg_all(emit);
1002 : 79537 : ASM_MOV_REG_IMM(emit->as, arg_reg, arg_val);
1003 : 79537 : ASM_CALL_IND(emit->as, fun_kind);
1004 : 79537 : }
1005 : :
1006 : 17175 : static void emit_call_with_2_imm_args(emit_t *emit, mp_fun_kind_t fun_kind, mp_int_t arg_val1, int arg_reg1, mp_int_t arg_val2, int arg_reg2) {
1007 : 17175 : need_reg_all(emit);
1008 : 17175 : ASM_MOV_REG_IMM(emit->as, arg_reg1, arg_val1);
1009 : 17175 : ASM_MOV_REG_IMM(emit->as, arg_reg2, arg_val2);
1010 : 17175 : ASM_CALL_IND(emit->as, fun_kind);
1011 : 17175 : }
1012 : :
1013 : 142187 : static void emit_call_with_qstr_arg(emit_t *emit, mp_fun_kind_t fun_kind, qstr qst, int arg_reg) {
1014 : 142187 : need_reg_all(emit);
1015 : 142187 : emit_native_mov_reg_qstr(emit, arg_reg, qst);
1016 : 142187 : ASM_CALL_IND(emit->as, fun_kind);
1017 : 142187 : }
1018 : :
1019 : : // vtype of all n_pop objects is VTYPE_PYOBJ
1020 : : // Will convert any items that are not VTYPE_PYOBJ to this type and put them back on the stack.
1021 : : // If any conversions of non-immediate values are needed, then it uses REG_ARG_1, REG_ARG_2 and REG_RET.
1022 : : // Otherwise, it does not use any temporary registers (but may use reg_dest before loading it with stack pointer).
1023 : 70508 : static void emit_get_stack_pointer_to_reg_for_pop(emit_t *emit, mp_uint_t reg_dest, mp_uint_t n_pop) {
1024 : 70508 : need_reg_all(emit);
1025 : :
1026 : : // First, store any immediate values to their respective place on the stack.
1027 [ + + ]: 209970 : for (mp_uint_t i = 0; i < n_pop; i++) {
1028 : 139466 : stack_info_t *si = &emit->stack_info[emit->stack_size - 1 - i];
1029 : : // must push any imm's to stack
1030 : : // must convert them to VTYPE_PYOBJ for viper code
1031 [ + + ]: 139466 : if (si->kind == STACK_IMM) {
1032 : 25774 : si->kind = STACK_VALUE;
1033 : 25774 : si->vtype = load_reg_stack_imm(emit, reg_dest, si, true);
1034 : 25770 : emit_native_mov_state_reg(emit, emit->stack_start + emit->stack_size - 1 - i, reg_dest);
1035 : : }
1036 : :
1037 : : // verify that this value is on the stack
1038 [ - + ]: 139462 : assert(si->kind == STACK_VALUE);
1039 : : }
1040 : :
1041 : : // Second, convert any non-VTYPE_PYOBJ to that type.
1042 [ + + ]: 209966 : for (mp_uint_t i = 0; i < n_pop; i++) {
1043 : 139462 : stack_info_t *si = &emit->stack_info[emit->stack_size - 1 - i];
1044 [ + + ]: 139462 : if (si->vtype != VTYPE_PYOBJ) {
1045 : 544 : mp_uint_t local_num = emit->stack_start + emit->stack_size - 1 - i;
1046 : 544 : emit_native_mov_reg_state(emit, REG_ARG_1, local_num);
1047 : 544 : emit_call_with_imm_arg(emit, MP_F_CONVERT_NATIVE_TO_OBJ, si->vtype, REG_ARG_2); // arg2 = type
1048 : 544 : emit_native_mov_state_reg(emit, local_num, REG_RET);
1049 : 544 : si->vtype = VTYPE_PYOBJ;
1050 : 139462 : DEBUG_printf(" convert_native_to_obj(local_num=" UINT_FMT ")\n", local_num);
1051 : : }
1052 : : }
1053 : :
1054 : : // Adujust the stack for a pop of n_pop items, and load the stack pointer into reg_dest.
1055 : 70504 : adjust_stack(emit, -n_pop);
1056 : 70504 : emit_native_mov_reg_state_addr(emit, reg_dest, emit->stack_start + emit->stack_size);
1057 : 70504 : }
1058 : :
1059 : : // vtype of all n_push objects is VTYPE_PYOBJ
1060 : 18468 : static void emit_get_stack_pointer_to_reg_for_push(emit_t *emit, mp_uint_t reg_dest, mp_uint_t n_push) {
1061 : 18468 : need_reg_all(emit);
1062 : 18468 : ensure_extra_stack(emit, n_push);
1063 [ + + ]: 58251 : for (mp_uint_t i = 0; i < n_push; i++) {
1064 : 39783 : emit->stack_info[emit->stack_size + i].kind = STACK_VALUE;
1065 : 39783 : emit->stack_info[emit->stack_size + i].vtype = VTYPE_PYOBJ;
1066 : : }
1067 : 18468 : emit_native_mov_reg_state_addr(emit, reg_dest, emit->stack_start + emit->stack_size);
1068 : 18468 : adjust_stack(emit, n_push);
1069 : 18468 : }
1070 : :
1071 : 6201 : static void emit_native_push_exc_stack(emit_t *emit, uint label, bool is_finally) {
1072 [ + + ]: 6201 : if (emit->exc_stack_size + 1 > emit->exc_stack_alloc) {
1073 : 6 : size_t new_alloc = emit->exc_stack_alloc + 4;
1074 : 6 : emit->exc_stack = m_renew(exc_stack_entry_t, emit->exc_stack, emit->exc_stack_alloc, new_alloc);
1075 : 6 : emit->exc_stack_alloc = new_alloc;
1076 : : }
1077 : :
1078 : 6201 : exc_stack_entry_t *e = &emit->exc_stack[emit->exc_stack_size++];
1079 : 6201 : e->label = label;
1080 : 6201 : e->is_finally = is_finally;
1081 : 6201 : e->unwind_label = UNWIND_LABEL_UNUSED;
1082 : 6201 : e->is_active = true;
1083 : :
1084 : 6201 : ASM_MOV_REG_PCREL(emit->as, REG_RET, label);
1085 : 6201 : ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_HANDLER_PC(emit), REG_RET);
1086 : 6201 : }
1087 : :
1088 : 11304 : static void emit_native_leave_exc_stack(emit_t *emit, bool start_of_handler) {
1089 [ - + ]: 11304 : assert(emit->exc_stack_size > 0);
1090 : :
1091 : : // Get current exception handler and deactivate it
1092 : 11304 : exc_stack_entry_t *e = &emit->exc_stack[emit->exc_stack_size - 1];
1093 : 11304 : e->is_active = false;
1094 : :
1095 : : // Find next innermost active exception handler, to restore as current handler
1096 [ + + + + ]: 12135 : for (--e; e >= emit->exc_stack && !e->is_active; --e) {
1097 : 831 : }
1098 : :
1099 : : // Update the PC of the new exception handler
1100 [ + + ]: 11304 : if (e < emit->exc_stack) {
1101 : : // No active handler, clear handler PC to zero
1102 [ + + ]: 10755 : if (start_of_handler) {
1103 : : // Optimisation: PC is already cleared by global exc handler
1104 : : return;
1105 : : }
1106 : 5835 : ASM_XOR_REG_REG(emit->as, REG_RET, REG_RET);
1107 : : } else {
1108 : : // Found new active handler, get its PC
1109 : 549 : ASM_MOV_REG_PCREL(emit->as, REG_RET, e->label);
1110 : : }
1111 : 6384 : ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_HANDLER_PC(emit), REG_RET);
1112 : : }
1113 : :
1114 : 6201 : static exc_stack_entry_t *emit_native_pop_exc_stack(emit_t *emit) {
1115 [ - + ]: 6201 : assert(emit->exc_stack_size > 0);
1116 : 6201 : exc_stack_entry_t *e = &emit->exc_stack[--emit->exc_stack_size];
1117 [ - + ]: 6201 : assert(e->is_active == false);
1118 : 6201 : return e;
1119 : : }
1120 : :
1121 : : // This function will clobber REG_TEMP0 (and `reg` can be REG_TEMP0).
1122 : 11514 : static void emit_load_reg_with_object(emit_t *emit, int reg, mp_obj_t obj) {
1123 : 11514 : emit->scope->scope_flags |= MP_SCOPE_FLAG_HASCONSTS;
1124 : 11514 : size_t table_off = mp_emit_common_use_const_obj(emit->emit_common, obj);
1125 : 11514 : emit_native_mov_reg_state(emit, REG_TEMP0, LOCAL_IDX_FUN_OBJ(emit));
1126 : 11514 : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_TEMP0, REG_TEMP0, OFFSETOF_OBJ_FUN_BC_CONTEXT);
1127 : 11514 : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_TEMP0, REG_TEMP0, OFFSETOF_MODULE_CONTEXT_OBJ_TABLE);
1128 : 11514 : ASM_LOAD_REG_REG_OFFSET(emit->as, reg, REG_TEMP0, table_off);
1129 : 11514 : }
1130 : :
1131 : 9375 : static void emit_load_reg_with_child(emit_t *emit, int reg, mp_raw_code_t *rc) {
1132 [ + + ]: 9375 : size_t table_off = mp_emit_common_alloc_const_child(emit->emit_common, rc);
1133 : 9375 : emit_native_mov_reg_state(emit, REG_TEMP0, LOCAL_IDX_FUN_OBJ(emit));
1134 : 9375 : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_TEMP0, REG_TEMP0, OFFSETOF_OBJ_FUN_BC_CHILD_TABLE);
1135 : 9375 : ASM_LOAD_REG_REG_OFFSET(emit->as, reg, REG_TEMP0, table_off);
1136 : 9375 : }
1137 : :
1138 : 77613 : static void emit_native_label_assign(emit_t *emit, mp_uint_t l) {
1139 : 77613 : DEBUG_printf("label_assign(" UINT_FMT ")\n", l);
1140 : :
1141 : 77613 : bool is_finally = false;
1142 [ + + ]: 77613 : if (emit->exc_stack_size > 0) {
1143 : 15378 : exc_stack_entry_t *e = &emit->exc_stack[emit->exc_stack_size - 1];
1144 [ + + + + ]: 15378 : is_finally = e->is_finally && e->label == l;
1145 : : }
1146 : :
1147 : 1098 : if (is_finally) {
1148 : : // Label is at start of finally handler: store TOS into exception slot
1149 : 1098 : vtype_kind_t vtype;
1150 : 1098 : emit_access_stack(emit, 1, &vtype, REG_TEMP0);
1151 : 1098 : ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_VAL(emit), REG_TEMP0);
1152 : : }
1153 : :
1154 : 77613 : emit_native_pre(emit);
1155 : : // need to commit stack because we can jump here from elsewhere
1156 : 77613 : need_stack_settled(emit);
1157 : 77613 : mp_asm_base_label_assign(&emit->as->base, l);
1158 : 77613 : emit_post(emit);
1159 : :
1160 [ + + ]: 77613 : if (is_finally) {
1161 : : // Label is at start of finally handler: pop exception stack
1162 : 1098 : emit_native_leave_exc_stack(emit, false);
1163 : : }
1164 : 77613 : }
1165 : :
1166 : 13889 : static void emit_native_global_exc_entry(emit_t *emit) {
1167 : : // Note: 4 labels are reserved for this function, starting at *emit->label_slot
1168 : :
1169 : 13889 : emit->exit_label = *emit->label_slot;
1170 : :
1171 [ + + + + ]: 13889 : if (NEED_GLOBAL_EXC_HANDLER(emit)) {
1172 : 9789 : mp_uint_t nlr_label = *emit->label_slot + 1;
1173 : 9789 : mp_uint_t start_label = *emit->label_slot + 2;
1174 : 9789 : mp_uint_t global_except_label = *emit->label_slot + 3;
1175 : :
1176 [ + + ]: 9789 : if (!(emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR)) {
1177 : : // Set new globals
1178 : 8758 : emit_native_mov_reg_state(emit, REG_ARG_1, LOCAL_IDX_FUN_OBJ(emit));
1179 : 8758 : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_ARG_1, REG_ARG_1, OFFSETOF_OBJ_FUN_BC_CONTEXT);
1180 : 8758 : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_ARG_1, REG_ARG_1, OFFSETOF_MODULE_CONTEXT_GLOBALS);
1181 : 8758 : emit_call(emit, MP_F_NATIVE_SWAP_GLOBALS);
1182 : :
1183 : : // Save old globals (or NULL if globals didn't change)
1184 : 8758 : emit_native_mov_state_reg(emit, LOCAL_IDX_OLD_GLOBALS(emit), REG_RET);
1185 : : }
1186 : :
1187 [ + + ]: 9789 : if (emit->scope->exc_stack_size == 0) {
1188 [ + + ]: 7236 : if (!(emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR)) {
1189 : : // Optimisation: if globals didn't change don't push the nlr context
1190 : 6499 : ASM_JUMP_IF_REG_ZERO(emit->as, REG_RET, start_label, false);
1191 : : }
1192 : :
1193 : : // Wrap everything in an nlr context
1194 : 7236 : ASM_MOV_REG_LOCAL_ADDR(emit->as, REG_ARG_1, 0);
1195 : 7236 : emit_call(emit, MP_F_NLR_PUSH);
1196 : : #if N_NLR_SETJMP
1197 : : ASM_MOV_REG_LOCAL_ADDR(emit->as, REG_ARG_1, 2);
1198 : : emit_call(emit, MP_F_SETJMP);
1199 : : #endif
1200 : 7236 : ASM_JUMP_IF_REG_ZERO(emit->as, REG_RET, start_label, true);
1201 : : } else {
1202 : : // Clear the unwind state
1203 : 2553 : ASM_XOR_REG_REG(emit->as, REG_TEMP0, REG_TEMP0);
1204 : 2553 : ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_HANDLER_UNWIND(emit), REG_TEMP0);
1205 : :
1206 : : // clear nlr.ret_val, because it's passed to mp_native_raise regardless
1207 : : // of whether there was an exception or not
1208 : 2553 : ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_VAL(emit), REG_TEMP0);
1209 : :
1210 : : // Put PC of start code block into REG_LOCAL_1
1211 : 2553 : ASM_MOV_REG_PCREL(emit->as, REG_LOCAL_1, start_label);
1212 : :
1213 : : // Wrap everything in an nlr context
1214 : 2553 : emit_native_label_assign(emit, nlr_label);
1215 : 2553 : ASM_MOV_REG_LOCAL_ADDR(emit->as, REG_ARG_1, 0);
1216 : 2553 : emit_call(emit, MP_F_NLR_PUSH);
1217 : : #if N_NLR_SETJMP
1218 : : ASM_MOV_REG_LOCAL_ADDR(emit->as, REG_ARG_1, 2);
1219 : : emit_call(emit, MP_F_SETJMP);
1220 : : #endif
1221 : 2553 : ASM_JUMP_IF_REG_NONZERO(emit->as, REG_RET, global_except_label, true);
1222 : :
1223 : : // Clear PC of current code block, and jump there to resume execution
1224 : 2553 : ASM_XOR_REG_REG(emit->as, REG_TEMP0, REG_TEMP0);
1225 : 2553 : ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_HANDLER_PC(emit), REG_TEMP0);
1226 : 2553 : ASM_JUMP_REG(emit->as, REG_LOCAL_1);
1227 : :
1228 : : // Global exception handler: check for valid exception handler
1229 : 2553 : emit_native_label_assign(emit, global_except_label);
1230 : 2553 : ASM_MOV_REG_LOCAL(emit->as, REG_LOCAL_1, LOCAL_IDX_EXC_HANDLER_PC(emit));
1231 : 2553 : ASM_JUMP_IF_REG_NONZERO(emit->as, REG_LOCAL_1, nlr_label, false);
1232 : : }
1233 : :
1234 [ + + ]: 9789 : if (!(emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR)) {
1235 : : // Restore old globals
1236 : 8758 : emit_native_mov_reg_state(emit, REG_ARG_1, LOCAL_IDX_OLD_GLOBALS(emit));
1237 : 8758 : emit_call(emit, MP_F_NATIVE_SWAP_GLOBALS);
1238 : : }
1239 : :
1240 [ + + ]: 9789 : if (emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR) {
1241 : : // Store return value in state[0]
1242 : 1031 : ASM_MOV_REG_LOCAL(emit->as, REG_TEMP0, LOCAL_IDX_EXC_VAL(emit));
1243 : 1031 : ASM_STORE_REG_REG_OFFSET(emit->as, REG_TEMP0, REG_GENERATOR_STATE, OFFSETOF_CODE_STATE_STATE);
1244 : :
1245 : : // Load return kind
1246 : 1031 : ASM_MOV_REG_IMM(emit->as, REG_PARENT_RET, MP_VM_RETURN_EXCEPTION);
1247 : :
1248 : 1031 : ASM_EXIT(emit->as);
1249 : : } else {
1250 : : // Re-raise exception out to caller
1251 : 8758 : ASM_MOV_REG_LOCAL(emit->as, REG_ARG_1, LOCAL_IDX_EXC_VAL(emit));
1252 : 8758 : emit_call(emit, MP_F_NATIVE_RAISE);
1253 : : }
1254 : :
1255 : : // Label for start of function
1256 : 9789 : emit_native_label_assign(emit, start_label);
1257 : :
1258 [ + + ]: 9789 : if (emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR) {
1259 : 1031 : emit_native_mov_reg_state(emit, REG_TEMP0, LOCAL_IDX_GEN_PC(emit));
1260 : 1031 : ASM_JUMP_REG(emit->as, REG_TEMP0);
1261 : 1031 : emit->start_offset = mp_asm_base_get_code_pos(&emit->as->base);
1262 : :
1263 : : // This is the first entry of the generator
1264 : :
1265 : : // Check LOCAL_IDX_THROW_VAL for any injected value
1266 : 1031 : ASM_MOV_REG_LOCAL(emit->as, REG_ARG_1, LOCAL_IDX_THROW_VAL(emit));
1267 : 1031 : ASM_MOV_REG_IMM(emit->as, REG_ARG_2, (mp_uint_t)MP_OBJ_NULL);
1268 : 1031 : ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_THROW_VAL(emit), REG_ARG_2);
1269 : 1031 : emit_call(emit, MP_F_NATIVE_RAISE);
1270 : : }
1271 : : }
1272 : 13889 : }
1273 : :
1274 : 13873 : static void emit_native_global_exc_exit(emit_t *emit) {
1275 : : // Label for end of function
1276 : 13873 : emit_native_label_assign(emit, emit->exit_label);
1277 : :
1278 [ + + + + ]: 13873 : if (NEED_GLOBAL_EXC_HANDLER(emit)) {
1279 : : // Get old globals
1280 [ + + ]: 9777 : if (!(emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR)) {
1281 : 8754 : emit_native_mov_reg_state(emit, REG_ARG_1, LOCAL_IDX_OLD_GLOBALS(emit));
1282 : :
1283 [ + + ]: 8754 : if (emit->scope->exc_stack_size == 0) {
1284 : : // Optimisation: if globals didn't change then don't restore them and don't do nlr_pop
1285 : 6495 : ASM_JUMP_IF_REG_ZERO(emit->as, REG_ARG_1, emit->exit_label + 1, false);
1286 : : }
1287 : :
1288 : : // Restore old globals
1289 : 8754 : emit_call(emit, MP_F_NATIVE_SWAP_GLOBALS);
1290 : : }
1291 : :
1292 : : // Pop the nlr context
1293 : 9777 : emit_call(emit, MP_F_NLR_POP);
1294 : :
1295 [ + + ]: 9777 : if (!(emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR)) {
1296 [ + + ]: 8754 : if (emit->scope->exc_stack_size == 0) {
1297 : : // Destination label for above optimisation
1298 : 6495 : emit_native_label_assign(emit, emit->exit_label + 1);
1299 : : }
1300 : : }
1301 : :
1302 : : // Load return value
1303 : 9777 : ASM_MOV_REG_LOCAL(emit->as, REG_PARENT_RET, LOCAL_IDX_RET_VAL(emit));
1304 : : }
1305 : :
1306 : 13873 : ASM_EXIT(emit->as);
1307 : 13873 : }
1308 : :
1309 : 2511 : static void emit_native_import_name(emit_t *emit, qstr qst) {
1310 : 2511 : DEBUG_printf("import_name %s\n", qstr_str(qst));
1311 : :
1312 : : // get arguments from stack: arg2 = fromlist, arg3 = level
1313 : : // If using viper types these arguments must be converted to proper objects, and
1314 : : // to accomplish this viper types are turned off for the emit_pre_pop_reg_reg call.
1315 : 2511 : bool orig_do_viper_types = emit->do_viper_types;
1316 : 2511 : emit->do_viper_types = false;
1317 : 2511 : vtype_kind_t vtype_fromlist;
1318 : 2511 : vtype_kind_t vtype_level;
1319 : 2511 : emit_pre_pop_reg_reg(emit, &vtype_fromlist, REG_ARG_2, &vtype_level, REG_ARG_3);
1320 [ - + ]: 2511 : assert(vtype_fromlist == VTYPE_PYOBJ);
1321 [ - + ]: 2511 : assert(vtype_level == VTYPE_PYOBJ);
1322 : 2511 : emit->do_viper_types = orig_do_viper_types;
1323 : :
1324 : 2511 : emit_call_with_qstr_arg(emit, MP_F_IMPORT_NAME, qst, REG_ARG_1); // arg1 = import name
1325 : 2511 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
1326 : 2511 : }
1327 : :
1328 : 1050 : static void emit_native_import_from(emit_t *emit, qstr qst) {
1329 : 1050 : DEBUG_printf("import_from %s\n", qstr_str(qst));
1330 : 1050 : emit_native_pre(emit);
1331 : 1050 : vtype_kind_t vtype_module;
1332 : 1050 : emit_access_stack(emit, 1, &vtype_module, REG_ARG_1); // arg1 = module
1333 [ - + ]: 1050 : assert(vtype_module == VTYPE_PYOBJ);
1334 : 1050 : emit_call_with_qstr_arg(emit, MP_F_IMPORT_FROM, qst, REG_ARG_2); // arg2 = import name
1335 : 1050 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
1336 : 1050 : }
1337 : :
1338 : 123 : static void emit_native_import_star(emit_t *emit) {
1339 : 123 : DEBUG_printf("import_star\n");
1340 : 123 : vtype_kind_t vtype_module;
1341 : 123 : emit_pre_pop_reg(emit, &vtype_module, REG_ARG_1); // arg1 = module
1342 [ - + ]: 123 : assert(vtype_module == VTYPE_PYOBJ);
1343 : 123 : emit_call(emit, MP_F_IMPORT_ALL);
1344 : 123 : emit_post(emit);
1345 : 123 : }
1346 : :
1347 : 3684 : static void emit_native_import(emit_t *emit, qstr qst, int kind) {
1348 [ + + ]: 3684 : if (kind == MP_EMIT_IMPORT_NAME) {
1349 : 2511 : emit_native_import_name(emit, qst);
1350 [ + + ]: 1173 : } else if (kind == MP_EMIT_IMPORT_FROM) {
1351 : 1050 : emit_native_import_from(emit, qst);
1352 : : } else {
1353 : 123 : emit_native_import_star(emit);
1354 : : }
1355 : 3684 : }
1356 : :
1357 : 25484 : static void emit_native_load_const_tok(emit_t *emit, mp_token_kind_t tok) {
1358 : 25484 : DEBUG_printf("load_const_tok(tok=%u)\n", tok);
1359 [ + + ]: 25484 : if (tok == MP_TOKEN_ELLIPSIS) {
1360 : 12 : emit_native_load_const_obj(emit, MP_OBJ_FROM_PTR(&mp_const_ellipsis_obj));
1361 : : } else {
1362 : 25472 : emit_native_pre(emit);
1363 [ + + ]: 25472 : if (tok == MP_TOKEN_KW_NONE) {
1364 : 23435 : emit_post_push_imm(emit, VTYPE_PTR_NONE, 0);
1365 : : } else {
1366 : 2037 : emit_post_push_imm(emit, VTYPE_BOOL, tok == MP_TOKEN_KW_FALSE ? 0 : 1);
1367 : : }
1368 : : }
1369 : 25484 : }
1370 : :
1371 : 36720 : static void emit_native_load_const_small_int(emit_t *emit, mp_int_t arg) {
1372 : 36720 : DEBUG_printf("load_const_small_int(int=" INT_FMT ")\n", arg);
1373 : 36720 : emit_native_pre(emit);
1374 : 36720 : emit_post_push_imm(emit, VTYPE_INT, arg);
1375 : 36720 : }
1376 : :
1377 : 25077 : static void emit_native_load_const_str(emit_t *emit, qstr qst) {
1378 : 25077 : emit_native_pre(emit);
1379 : : // TODO: Eventually we want to be able to work with raw pointers in viper to
1380 : : // do native array access. For now we just load them as any other object.
1381 : : /*
1382 : : if (emit->do_viper_types) {
1383 : : // load a pointer to the asciiz string?
1384 : : emit_post_push_imm(emit, VTYPE_PTR, (mp_uint_t)qstr_str(qst));
1385 : : } else
1386 : : */
1387 : : {
1388 : 25077 : need_reg_single(emit, REG_TEMP0, 0);
1389 : 25077 : emit_native_mov_reg_qstr_obj(emit, REG_TEMP0, qst);
1390 : 25077 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_TEMP0);
1391 : : }
1392 : 25077 : }
1393 : :
1394 : 11514 : static void emit_native_load_const_obj(emit_t *emit, mp_obj_t obj) {
1395 : 11514 : emit_native_pre(emit);
1396 : 11514 : need_reg_single(emit, REG_TEMP0, 0);
1397 : 11514 : emit_load_reg_with_object(emit, REG_TEMP0, obj);
1398 : 11514 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_TEMP0);
1399 : 11514 : }
1400 : :
1401 : 1035 : static void emit_native_load_null(emit_t *emit) {
1402 : 1035 : emit_native_pre(emit);
1403 : 1035 : emit_post_push_imm(emit, VTYPE_PYOBJ, 0);
1404 : 1035 : }
1405 : :
1406 : 30543 : static void emit_native_load_fast(emit_t *emit, qstr qst, mp_uint_t local_num) {
1407 : 30543 : DEBUG_printf("load_fast(%s, " UINT_FMT ")\n", qstr_str(qst), local_num);
1408 : 30543 : vtype_kind_t vtype = emit->local_vtype[local_num];
1409 [ + + ]: 30543 : if (vtype == VTYPE_UNBOUND) {
1410 : 4 : EMIT_NATIVE_VIPER_TYPE_ERROR(emit, MP_ERROR_TEXT("local '%q' used before type known"), qst);
1411 : : }
1412 : 30543 : emit_native_pre(emit);
1413 [ + + + + : 30543 : if (local_num < MAX_REGS_FOR_LOCAL_VARS && CAN_USE_REGS_FOR_LOCALS(emit)) {
+ + ]
1414 : 16539 : emit_post_push_reg(emit, vtype, reg_local_table[local_num]);
1415 : : } else {
1416 : 14004 : need_reg_single(emit, REG_TEMP0, 0);
1417 : 14004 : emit_native_mov_reg_state(emit, REG_TEMP0, LOCAL_IDX_LOCAL_VAR(emit, local_num));
1418 : 14004 : emit_post_push_reg(emit, vtype, REG_TEMP0);
1419 : : }
1420 : 30543 : }
1421 : :
1422 : 681 : static void emit_native_load_deref(emit_t *emit, qstr qst, mp_uint_t local_num) {
1423 : 681 : DEBUG_printf("load_deref(%s, " UINT_FMT ")\n", qstr_str(qst), local_num);
1424 : 681 : need_reg_single(emit, REG_RET, 0);
1425 : 681 : emit_native_load_fast(emit, qst, local_num);
1426 : 681 : vtype_kind_t vtype;
1427 : 681 : int reg_base = REG_RET;
1428 : 681 : emit_pre_pop_reg_flexible(emit, &vtype, ®_base, -1, -1);
1429 : 681 : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_RET, reg_base, 1);
1430 : : // closed over vars are always Python objects
1431 : 681 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
1432 : 681 : }
1433 : :
1434 : 30180 : static void emit_native_load_local(emit_t *emit, qstr qst, mp_uint_t local_num, int kind) {
1435 [ + + ]: 30180 : if (kind == MP_EMIT_IDOP_LOCAL_FAST) {
1436 : 29499 : emit_native_load_fast(emit, qst, local_num);
1437 : : } else {
1438 : 681 : emit_native_load_deref(emit, qst, local_num);
1439 : : }
1440 : 30180 : }
1441 : :
1442 : 84915 : static void emit_native_load_global(emit_t *emit, qstr qst, int kind) {
1443 : 84915 : MP_STATIC_ASSERT(MP_F_LOAD_NAME + MP_EMIT_IDOP_GLOBAL_NAME == MP_F_LOAD_NAME);
1444 : 84915 : MP_STATIC_ASSERT(MP_F_LOAD_NAME + MP_EMIT_IDOP_GLOBAL_GLOBAL == MP_F_LOAD_GLOBAL);
1445 : 84915 : emit_native_pre(emit);
1446 [ + + ]: 84915 : if (kind == MP_EMIT_IDOP_GLOBAL_NAME) {
1447 : : DEBUG_printf("load_name(%s)\n", qstr_str(qst));
1448 : : } else {
1449 : 18870 : DEBUG_printf("load_global(%s)\n", qstr_str(qst));
1450 [ + + ]: 18870 : if (emit->do_viper_types) {
1451 : : // check for builtin casting operators
1452 : 594 : int native_type = mp_native_type_from_qstr(qst);
1453 [ + + ]: 594 : if (native_type >= MP_NATIVE_TYPE_BOOL) {
1454 : 100 : emit_post_push_imm(emit, VTYPE_BUILTIN_CAST, native_type);
1455 : 100 : return;
1456 : : }
1457 : : }
1458 : : }
1459 : 84815 : emit_call_with_qstr_arg(emit, MP_F_LOAD_NAME + kind, qst, REG_ARG_1);
1460 : 84815 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
1461 : : }
1462 : :
1463 : 10299 : static void emit_native_load_attr(emit_t *emit, qstr qst) {
1464 : : // depends on type of subject:
1465 : : // - integer, function, pointer to integers: error
1466 : : // - pointer to structure: get member, quite easy
1467 : : // - Python object: call mp_load_attr, and needs to be typed to convert result
1468 : 10299 : vtype_kind_t vtype_base;
1469 : 10299 : emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1); // arg1 = base
1470 [ - + ]: 10299 : assert(vtype_base == VTYPE_PYOBJ);
1471 : 10299 : emit_call_with_qstr_arg(emit, MP_F_LOAD_ATTR, qst, REG_ARG_2); // arg2 = attribute name
1472 : 10299 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
1473 : 10299 : }
1474 : :
1475 : 16134 : static void emit_native_load_method(emit_t *emit, qstr qst, bool is_super) {
1476 : 16134 : DEBUG_printf("load_method(%s, %d)\n", qstr_str(qst), is_super);
1477 [ + + ]: 16134 : if (is_super) {
1478 : 63 : emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_2, 3); // arg2 = dest ptr
1479 : 63 : emit_get_stack_pointer_to_reg_for_push(emit, REG_ARG_2, 2); // arg2 = dest ptr
1480 : 63 : emit_call_with_qstr_arg(emit, MP_F_LOAD_SUPER_METHOD, qst, REG_ARG_1); // arg1 = method name
1481 : : } else {
1482 : 16071 : vtype_kind_t vtype_base;
1483 : 16071 : emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1); // arg1 = base
1484 [ - + ]: 16071 : assert(vtype_base == VTYPE_PYOBJ);
1485 : 16071 : emit_get_stack_pointer_to_reg_for_push(emit, REG_ARG_3, 2); // arg3 = dest ptr
1486 : 16071 : emit_call_with_qstr_arg(emit, MP_F_LOAD_METHOD, qst, REG_ARG_2); // arg2 = method name
1487 : : }
1488 : 16134 : }
1489 : :
1490 : 1560 : static void emit_native_load_build_class(emit_t *emit) {
1491 : 1560 : emit_native_pre(emit);
1492 : 1560 : emit_call(emit, MP_F_LOAD_BUILD_CLASS);
1493 : 1560 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
1494 : 1560 : }
1495 : :
1496 : 4193 : static void emit_native_load_subscr(emit_t *emit) {
1497 : 4193 : DEBUG_printf("load_subscr\n");
1498 : : // need to compile: base[index]
1499 : :
1500 : : // pop: index, base
1501 : : // optimise case where index is an immediate
1502 : 4193 : vtype_kind_t vtype_base = peek_vtype(emit, 1);
1503 : :
1504 [ + + ]: 4193 : if (vtype_base == VTYPE_PYOBJ) {
1505 : : // standard Python subscr
1506 : : // TODO factor this implicit cast code with other uses of it
1507 : 4047 : vtype_kind_t vtype_index = peek_vtype(emit, 0);
1508 [ + + ]: 4047 : if (vtype_index == VTYPE_PYOBJ) {
1509 : 4041 : emit_pre_pop_reg(emit, &vtype_index, REG_ARG_2);
1510 : : } else {
1511 : 6 : emit_pre_pop_reg(emit, &vtype_index, REG_ARG_1);
1512 : 6 : emit_call_with_imm_arg(emit, MP_F_CONVERT_NATIVE_TO_OBJ, vtype_index, REG_ARG_2); // arg2 = type
1513 : 6 : ASM_MOV_REG_REG(emit->as, REG_ARG_2, REG_RET);
1514 : : }
1515 : 4047 : emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1);
1516 : 4047 : emit_call_with_imm_arg(emit, MP_F_OBJ_SUBSCR, (mp_uint_t)MP_OBJ_SENTINEL, REG_ARG_3);
1517 : 4047 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
1518 : : } else {
1519 : : // viper load
1520 : : // TODO The different machine architectures have very different
1521 : : // capabilities and requirements for loads, so probably best to
1522 : : // write a completely separate load-optimiser for each one.
1523 : 146 : stack_info_t *top = peek_stack(emit, 0);
1524 [ + + ]: 146 : if (top->vtype == VTYPE_INT && top->kind == STACK_IMM) {
1525 : : // index is an immediate
1526 : 70 : mp_int_t index_value = top->data.u_imm;
1527 : 70 : emit_pre_pop_discard(emit); // discard index
1528 : 70 : int reg_base = REG_ARG_1;
1529 : 70 : int reg_index = REG_ARG_2;
1530 : 70 : emit_pre_pop_reg_flexible(emit, &vtype_base, ®_base, reg_index, reg_index);
1531 : 70 : need_reg_single(emit, REG_RET, 0);
1532 [ + + + + ]: 70 : switch (vtype_base) {
1533 : 42 : case VTYPE_PTR8: {
1534 : : // pointer to 8-bit memory
1535 : : // TODO optimise to use thumb ldrb r1, [r2, r3]
1536 [ + + ]: 42 : if (index_value != 0) {
1537 : : // index is non-zero
1538 : : #if N_THUMB
1539 : : if (index_value > 0 && index_value < 32) {
1540 : : asm_thumb_ldrb_rlo_rlo_i5(emit->as, REG_RET, reg_base, index_value);
1541 : : break;
1542 : : }
1543 : : #elif N_RV32
1544 : : if (FIT_SIGNED(index_value, 12)) {
1545 : : asm_rv32_opcode_lbu(emit->as, REG_RET, reg_base, index_value);
1546 : : break;
1547 : : }
1548 : : #endif
1549 : 30 : need_reg_single(emit, reg_index, 0);
1550 : 30 : ASM_MOV_REG_IMM(emit->as, reg_index, index_value);
1551 : 30 : ASM_ADD_REG_REG(emit->as, reg_index, reg_base); // add index to base
1552 : 30 : reg_base = reg_index;
1553 : : }
1554 : 42 : ASM_LOAD8_REG_REG(emit->as, REG_RET, reg_base); // load from (base+index)
1555 : 42 : break;
1556 : : }
1557 : 12 : case VTYPE_PTR16: {
1558 : : // pointer to 16-bit memory
1559 [ + + ]: 12 : if (index_value != 0) {
1560 : : // index is a non-zero immediate
1561 : : #if N_THUMB
1562 : : if (index_value > 0 && index_value < 32) {
1563 : : asm_thumb_ldrh_rlo_rlo_i5(emit->as, REG_RET, reg_base, index_value);
1564 : : break;
1565 : : }
1566 : : #elif N_RV32
1567 : : if (FIT_SIGNED(index_value, 11)) {
1568 : : asm_rv32_opcode_lhu(emit->as, REG_RET, reg_base, index_value << 1);
1569 : : break;
1570 : : }
1571 : : #endif
1572 : 6 : need_reg_single(emit, reg_index, 0);
1573 : 6 : ASM_MOV_REG_IMM(emit->as, reg_index, index_value << 1);
1574 : 6 : ASM_ADD_REG_REG(emit->as, reg_index, reg_base); // add 2*index to base
1575 : 6 : reg_base = reg_index;
1576 : : }
1577 : 12 : ASM_LOAD16_REG_REG(emit->as, REG_RET, reg_base); // load from (base+2*index)
1578 : 12 : break;
1579 : : }
1580 : 12 : case VTYPE_PTR32: {
1581 : : // pointer to 32-bit memory
1582 [ + + ]: 12 : if (index_value != 0) {
1583 : : // index is a non-zero immediate
1584 : : #if N_THUMB
1585 : : if (index_value > 0 && index_value < 32) {
1586 : : asm_thumb_ldr_rlo_rlo_i5(emit->as, REG_RET, reg_base, index_value);
1587 : : break;
1588 : : }
1589 : : #elif N_RV32
1590 : : if (FIT_SIGNED(index_value, 10)) {
1591 : : asm_rv32_opcode_lw(emit->as, REG_RET, reg_base, index_value << 2);
1592 : : break;
1593 : : }
1594 : : #endif
1595 : 6 : need_reg_single(emit, reg_index, 0);
1596 : 6 : ASM_MOV_REG_IMM(emit->as, reg_index, index_value << 2);
1597 : 6 : ASM_ADD_REG_REG(emit->as, reg_index, reg_base); // add 4*index to base
1598 : 6 : reg_base = reg_index;
1599 : : }
1600 : 12 : ASM_LOAD32_REG_REG(emit->as, REG_RET, reg_base); // load from (base+4*index)
1601 : 12 : break;
1602 : : }
1603 : 4 : default:
1604 : 70 : EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
1605 : : MP_ERROR_TEXT("can't load from '%q'"), vtype_to_qstr(vtype_base));
1606 : : }
1607 : : } else {
1608 : : // index is not an immediate
1609 : 76 : vtype_kind_t vtype_index;
1610 : 76 : int reg_index = REG_ARG_2;
1611 : 76 : emit_pre_pop_reg_flexible(emit, &vtype_index, ®_index, REG_ARG_1, REG_ARG_1);
1612 : 76 : emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1);
1613 : 76 : need_reg_single(emit, REG_RET, 0);
1614 [ + + ]: 76 : if (vtype_index != VTYPE_INT && vtype_index != VTYPE_UINT) {
1615 : 4 : EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
1616 : : MP_ERROR_TEXT("can't load with '%q' index"), vtype_to_qstr(vtype_index));
1617 : : }
1618 [ + + + + ]: 76 : switch (vtype_base) {
1619 : 48 : case VTYPE_PTR8: {
1620 : : // pointer to 8-bit memory
1621 : : // TODO optimise to use thumb ldrb r1, [r2, r3]
1622 : 48 : ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
1623 : 48 : ASM_LOAD8_REG_REG(emit->as, REG_RET, REG_ARG_1); // store value to (base+index)
1624 : 48 : break;
1625 : : }
1626 : 12 : case VTYPE_PTR16: {
1627 : : // pointer to 16-bit memory
1628 : 12 : ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
1629 : 12 : ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
1630 : 12 : ASM_LOAD16_REG_REG(emit->as, REG_RET, REG_ARG_1); // load from (base+2*index)
1631 : 12 : break;
1632 : : }
1633 : 12 : case VTYPE_PTR32: {
1634 : : // pointer to word-size memory
1635 : : #if N_RV32
1636 : : asm_rv32_opcode_slli(emit->as, REG_TEMP2, reg_index, 2);
1637 : : asm_rv32_opcode_cadd(emit->as, REG_ARG_1, REG_TEMP2);
1638 : : asm_rv32_opcode_lw(emit->as, REG_RET, REG_ARG_1, 0);
1639 : : break;
1640 : : #endif
1641 : 12 : ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
1642 : 12 : ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
1643 : 12 : ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
1644 : 12 : ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
1645 : 12 : ASM_LOAD32_REG_REG(emit->as, REG_RET, REG_ARG_1); // load from (base+4*index)
1646 : 12 : break;
1647 : : }
1648 : 4 : default:
1649 : 76 : EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
1650 : : MP_ERROR_TEXT("can't load from '%q'"), vtype_to_qstr(vtype_base));
1651 : : }
1652 : : }
1653 : 146 : emit_post_push_reg(emit, VTYPE_INT, REG_RET);
1654 : : }
1655 : 4193 : }
1656 : :
1657 : 7273 : static void emit_native_store_fast(emit_t *emit, qstr qst, mp_uint_t local_num) {
1658 : 7273 : vtype_kind_t vtype;
1659 [ + + + + : 7273 : if (local_num < MAX_REGS_FOR_LOCAL_VARS && CAN_USE_REGS_FOR_LOCALS(emit)) {
+ + ]
1660 : 1420 : emit_pre_pop_reg(emit, &vtype, reg_local_table[local_num]);
1661 : : } else {
1662 : 5853 : emit_pre_pop_reg(emit, &vtype, REG_TEMP0);
1663 : 5853 : emit_native_mov_state_reg(emit, LOCAL_IDX_LOCAL_VAR(emit, local_num), REG_TEMP0);
1664 : : }
1665 : 7273 : emit_post(emit);
1666 : :
1667 : : // check types
1668 [ + + ]: 7273 : if (emit->local_vtype[local_num] == VTYPE_UNBOUND) {
1669 : : // first time this local is assigned, so give it a type of the object stored in it
1670 : 312 : emit->local_vtype[local_num] = vtype;
1671 [ + + ]: 6961 : } else if (emit->local_vtype[local_num] != vtype) {
1672 : : // type of local is not the same as object stored in it
1673 : 4 : EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
1674 : : MP_ERROR_TEXT("local '%q' has type '%q' but source is '%q'"),
1675 : : qst, vtype_to_qstr(emit->local_vtype[local_num]), vtype_to_qstr(vtype));
1676 : : }
1677 : 7273 : }
1678 : :
1679 : 363 : static void emit_native_store_deref(emit_t *emit, qstr qst, mp_uint_t local_num) {
1680 : 363 : DEBUG_printf("store_deref(%s, " UINT_FMT ")\n", qstr_str(qst), local_num);
1681 : 363 : need_reg_single(emit, REG_TEMP0, 0);
1682 : 363 : need_reg_single(emit, REG_TEMP1, 0);
1683 : 363 : emit_native_load_fast(emit, qst, local_num);
1684 : 363 : vtype_kind_t vtype;
1685 : 363 : int reg_base = REG_TEMP0;
1686 : 363 : emit_pre_pop_reg_flexible(emit, &vtype, ®_base, -1, -1);
1687 : 363 : int reg_src = REG_TEMP1;
1688 : 363 : emit_pre_pop_reg_flexible(emit, &vtype, ®_src, reg_base, reg_base);
1689 : 363 : ASM_STORE_REG_REG_OFFSET(emit->as, reg_src, reg_base, 1);
1690 : 363 : emit_post(emit);
1691 : 363 : }
1692 : :
1693 : 7294 : static void emit_native_store_local(emit_t *emit, qstr qst, mp_uint_t local_num, int kind) {
1694 [ + + ]: 7294 : if (kind == MP_EMIT_IDOP_LOCAL_FAST) {
1695 : 6931 : emit_native_store_fast(emit, qst, local_num);
1696 : : } else {
1697 : 363 : emit_native_store_deref(emit, qst, local_num);
1698 : : }
1699 : 7294 : }
1700 : :
1701 : 23964 : static void emit_native_store_global(emit_t *emit, qstr qst, int kind) {
1702 : 23964 : MP_STATIC_ASSERT(MP_F_STORE_NAME + MP_EMIT_IDOP_GLOBAL_NAME == MP_F_STORE_NAME);
1703 : 23964 : MP_STATIC_ASSERT(MP_F_STORE_NAME + MP_EMIT_IDOP_GLOBAL_GLOBAL == MP_F_STORE_GLOBAL);
1704 [ + + ]: 23964 : if (kind == MP_EMIT_IDOP_GLOBAL_NAME) {
1705 : : // mp_store_name, but needs conversion of object (maybe have mp_viper_store_name(obj, type))
1706 : 23052 : vtype_kind_t vtype;
1707 : 23052 : emit_pre_pop_reg(emit, &vtype, REG_ARG_2);
1708 [ - + ]: 23052 : assert(vtype == VTYPE_PYOBJ);
1709 : : } else {
1710 : 912 : vtype_kind_t vtype = peek_vtype(emit, 0);
1711 [ + + ]: 912 : if (vtype == VTYPE_PYOBJ) {
1712 : 906 : emit_pre_pop_reg(emit, &vtype, REG_ARG_2);
1713 : : } else {
1714 : 6 : emit_pre_pop_reg(emit, &vtype, REG_ARG_1);
1715 : 6 : emit_call_with_imm_arg(emit, MP_F_CONVERT_NATIVE_TO_OBJ, vtype, REG_ARG_2); // arg2 = type
1716 : 6 : ASM_MOV_REG_REG(emit->as, REG_ARG_2, REG_RET);
1717 : : }
1718 : : }
1719 : 23964 : emit_call_with_qstr_arg(emit, MP_F_STORE_NAME + kind, qst, REG_ARG_1); // arg1 = name
1720 : 23964 : emit_post(emit);
1721 : 23964 : }
1722 : :
1723 : 2544 : static void emit_native_store_attr(emit_t *emit, qstr qst) {
1724 : 2544 : vtype_kind_t vtype_base;
1725 : 2544 : vtype_kind_t vtype_val = peek_vtype(emit, 1);
1726 [ + + ]: 2544 : if (vtype_val == VTYPE_PYOBJ) {
1727 : 2532 : emit_pre_pop_reg_reg(emit, &vtype_base, REG_ARG_1, &vtype_val, REG_ARG_3); // arg1 = base, arg3 = value
1728 : : } else {
1729 : 12 : emit_access_stack(emit, 2, &vtype_val, REG_ARG_1); // arg1 = value
1730 : 12 : emit_call_with_imm_arg(emit, MP_F_CONVERT_NATIVE_TO_OBJ, vtype_val, REG_ARG_2); // arg2 = type
1731 : 12 : ASM_MOV_REG_REG(emit->as, REG_ARG_3, REG_RET); // arg3 = value (converted)
1732 : 12 : emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1); // arg1 = base
1733 : 12 : adjust_stack(emit, -1); // pop value
1734 : : }
1735 [ - + ]: 2544 : assert(vtype_base == VTYPE_PYOBJ);
1736 : 2544 : emit_call_with_qstr_arg(emit, MP_F_STORE_ATTR, qst, REG_ARG_2); // arg2 = attribute name
1737 : 2544 : emit_post(emit);
1738 : 2544 : }
1739 : :
1740 : 1529 : static void emit_native_store_subscr(emit_t *emit) {
1741 : 1529 : DEBUG_printf("store_subscr\n");
1742 : : // need to compile: base[index] = value
1743 : :
1744 : : // pop: index, base, value
1745 : : // optimise case where index is an immediate
1746 : 1529 : vtype_kind_t vtype_base = peek_vtype(emit, 1);
1747 : :
1748 [ + + ]: 1529 : if (vtype_base == VTYPE_PYOBJ) {
1749 : : // standard Python subscr
1750 : 1401 : vtype_kind_t vtype_index = peek_vtype(emit, 0);
1751 : 1401 : vtype_kind_t vtype_value = peek_vtype(emit, 2);
1752 [ + + ]: 1401 : if (vtype_index != VTYPE_PYOBJ || vtype_value != VTYPE_PYOBJ) {
1753 : : // need to implicitly convert non-objects to objects
1754 : : // TODO do this properly
1755 : 6 : emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_1, 3);
1756 : 6 : adjust_stack(emit, 3);
1757 : : }
1758 : 1401 : emit_pre_pop_reg_reg_reg(emit, &vtype_index, REG_ARG_2, &vtype_base, REG_ARG_1, &vtype_value, REG_ARG_3);
1759 : 1401 : emit_call(emit, MP_F_OBJ_SUBSCR);
1760 : : } else {
1761 : : // viper store
1762 : : // TODO The different machine architectures have very different
1763 : : // capabilities and requirements for stores, so probably best to
1764 : : // write a completely separate store-optimiser for each one.
1765 : 128 : stack_info_t *top = peek_stack(emit, 0);
1766 [ + + ]: 128 : if (top->vtype == VTYPE_INT && top->kind == STACK_IMM) {
1767 : : // index is an immediate
1768 : 48 : mp_int_t index_value = top->data.u_imm;
1769 : 48 : emit_pre_pop_discard(emit); // discard index
1770 : 48 : vtype_kind_t vtype_value;
1771 : 48 : int reg_base = REG_ARG_1;
1772 : 48 : int reg_index = REG_ARG_2;
1773 : 48 : int reg_value = REG_ARG_3;
1774 : 48 : emit_pre_pop_reg_flexible(emit, &vtype_base, ®_base, reg_index, reg_value);
1775 : : #if N_X64 || N_X86
1776 : : // special case: x86 needs byte stores to be from lower 4 regs (REG_ARG_3 is EDX)
1777 : 48 : emit_pre_pop_reg(emit, &vtype_value, reg_value);
1778 : : #else
1779 : : emit_pre_pop_reg_flexible(emit, &vtype_value, ®_value, reg_base, reg_index);
1780 : : #endif
1781 [ + + ]: 48 : if (vtype_value != VTYPE_BOOL && vtype_value != VTYPE_INT && vtype_value != VTYPE_UINT) {
1782 : 4 : EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
1783 : : MP_ERROR_TEXT("can't store '%q'"), vtype_to_qstr(vtype_value));
1784 : : }
1785 [ + + + + ]: 48 : switch (vtype_base) {
1786 : 12 : case VTYPE_PTR8: {
1787 : : // pointer to 8-bit memory
1788 : : // TODO optimise to use thumb strb r1, [r2, r3]
1789 [ + + ]: 12 : if (index_value != 0) {
1790 : : // index is non-zero
1791 : : #if N_THUMB
1792 : : if (index_value > 0 && index_value < 32) {
1793 : : asm_thumb_strb_rlo_rlo_i5(emit->as, reg_value, reg_base, index_value);
1794 : : break;
1795 : : }
1796 : : #elif N_RV32
1797 : : if (FIT_SIGNED(index_value, 12)) {
1798 : : asm_rv32_opcode_sb(emit->as, reg_value, reg_base, index_value);
1799 : : break;
1800 : : }
1801 : : #endif
1802 : 6 : ASM_MOV_REG_IMM(emit->as, reg_index, index_value);
1803 : : #if N_ARM
1804 : : asm_arm_strb_reg_reg_reg(emit->as, reg_value, reg_base, reg_index);
1805 : : return;
1806 : : #endif
1807 : 6 : ASM_ADD_REG_REG(emit->as, reg_index, reg_base); // add index to base
1808 : 6 : reg_base = reg_index;
1809 : : }
1810 : 12 : ASM_STORE8_REG_REG(emit->as, reg_value, reg_base); // store value to (base+index)
1811 : 12 : break;
1812 : : }
1813 : 12 : case VTYPE_PTR16: {
1814 : : // pointer to 16-bit memory
1815 [ + + ]: 12 : if (index_value != 0) {
1816 : : // index is a non-zero immediate
1817 : : #if N_THUMB
1818 : : if (index_value > 0 && index_value < 32) {
1819 : : asm_thumb_strh_rlo_rlo_i5(emit->as, reg_value, reg_base, index_value);
1820 : : break;
1821 : : }
1822 : : #elif N_RV32
1823 : : if (FIT_SIGNED(index_value, 11)) {
1824 : : asm_rv32_opcode_sh(emit->as, reg_value, reg_base, index_value << 1);
1825 : : break;
1826 : : }
1827 : : #endif
1828 : 6 : ASM_MOV_REG_IMM(emit->as, reg_index, index_value << 1);
1829 : 6 : ASM_ADD_REG_REG(emit->as, reg_index, reg_base); // add 2*index to base
1830 : 6 : reg_base = reg_index;
1831 : : }
1832 : 12 : ASM_STORE16_REG_REG(emit->as, reg_value, reg_base); // store value to (base+2*index)
1833 : 12 : break;
1834 : : }
1835 : 16 : case VTYPE_PTR32: {
1836 : : // pointer to 32-bit memory
1837 [ + + ]: 16 : if (index_value != 0) {
1838 : : // index is a non-zero immediate
1839 : : #if N_THUMB
1840 : : if (index_value > 0 && index_value < 32) {
1841 : : asm_thumb_str_rlo_rlo_i5(emit->as, reg_value, reg_base, index_value);
1842 : : break;
1843 : : }
1844 : : #elif N_RV32
1845 : : if (FIT_SIGNED(index_value, 10)) {
1846 : : asm_rv32_opcode_sw(emit->as, reg_value, reg_base, index_value << 2);
1847 : : break;
1848 : : }
1849 : : #elif N_ARM
1850 : : ASM_MOV_REG_IMM(emit->as, reg_index, index_value);
1851 : : asm_arm_str_reg_reg_reg(emit->as, reg_value, reg_base, reg_index);
1852 : : return;
1853 : : #endif
1854 : 6 : ASM_MOV_REG_IMM(emit->as, reg_index, index_value << 2);
1855 : 6 : ASM_ADD_REG_REG(emit->as, reg_index, reg_base); // add 4*index to base
1856 : 6 : reg_base = reg_index;
1857 : : }
1858 : 16 : ASM_STORE32_REG_REG(emit->as, reg_value, reg_base); // store value to (base+4*index)
1859 : 16 : break;
1860 : : }
1861 : 8 : default:
1862 : 48 : EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
1863 : : MP_ERROR_TEXT("can't store to '%q'"), vtype_to_qstr(vtype_base));
1864 : : }
1865 : : } else {
1866 : : // index is not an immediate
1867 : 80 : vtype_kind_t vtype_index, vtype_value;
1868 : 80 : int reg_index = REG_ARG_2;
1869 : 80 : int reg_value = REG_ARG_3;
1870 : 80 : emit_pre_pop_reg_flexible(emit, &vtype_index, ®_index, REG_ARG_1, reg_value);
1871 : 80 : emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1);
1872 [ + + ]: 80 : if (vtype_index != VTYPE_INT && vtype_index != VTYPE_UINT) {
1873 : 8 : EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
1874 : : MP_ERROR_TEXT("can't store with '%q' index"), vtype_to_qstr(vtype_index));
1875 : : }
1876 : : #if N_X64 || N_X86
1877 : : // special case: x86 needs byte stores to be from lower 4 regs (REG_ARG_3 is EDX)
1878 : 80 : emit_pre_pop_reg(emit, &vtype_value, reg_value);
1879 : : #else
1880 : : emit_pre_pop_reg_flexible(emit, &vtype_value, ®_value, REG_ARG_1, reg_index);
1881 : : #endif
1882 [ + + ]: 80 : if (vtype_value != VTYPE_BOOL && vtype_value != VTYPE_INT && vtype_value != VTYPE_UINT) {
1883 : 4 : EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
1884 : : MP_ERROR_TEXT("can't store '%q'"), vtype_to_qstr(vtype_value));
1885 : : }
1886 [ + + + + ]: 80 : switch (vtype_base) {
1887 : 48 : case VTYPE_PTR8: {
1888 : : // pointer to 8-bit memory
1889 : : // TODO optimise to use thumb strb r1, [r2, r3]
1890 : : #if N_ARM
1891 : : asm_arm_strb_reg_reg_reg(emit->as, reg_value, REG_ARG_1, reg_index);
1892 : : break;
1893 : : #endif
1894 : 48 : ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
1895 : 48 : ASM_STORE8_REG_REG(emit->as, reg_value, REG_ARG_1); // store value to (base+index)
1896 : 48 : break;
1897 : : }
1898 : 12 : case VTYPE_PTR16: {
1899 : : // pointer to 16-bit memory
1900 : : #if N_ARM
1901 : : asm_arm_strh_reg_reg_reg(emit->as, reg_value, REG_ARG_1, reg_index);
1902 : : break;
1903 : : #endif
1904 : 12 : ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
1905 : 12 : ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
1906 : 12 : ASM_STORE16_REG_REG(emit->as, reg_value, REG_ARG_1); // store value to (base+2*index)
1907 : 12 : break;
1908 : : }
1909 : 16 : case VTYPE_PTR32: {
1910 : : // pointer to 32-bit memory
1911 : : #if N_ARM
1912 : : asm_arm_str_reg_reg_reg(emit->as, reg_value, REG_ARG_1, reg_index);
1913 : : break;
1914 : : #elif N_RV32
1915 : : asm_rv32_opcode_slli(emit->as, REG_TEMP2, reg_index, 2);
1916 : : asm_rv32_opcode_cadd(emit->as, REG_ARG_1, REG_TEMP2);
1917 : : asm_rv32_opcode_sw(emit->as, reg_value, REG_ARG_1, 0);
1918 : : break;
1919 : : #endif
1920 : 16 : ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
1921 : 16 : ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
1922 : 16 : ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
1923 : 16 : ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
1924 : 16 : ASM_STORE32_REG_REG(emit->as, reg_value, REG_ARG_1); // store value to (base+4*index)
1925 : 16 : break;
1926 : : }
1927 : 4 : default:
1928 : 80 : EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
1929 : : MP_ERROR_TEXT("can't store to '%q'"), vtype_to_qstr(vtype_base));
1930 : : }
1931 : : }
1932 : :
1933 : : }
1934 : 1529 : }
1935 : :
1936 : 342 : static void emit_native_delete_local(emit_t *emit, qstr qst, mp_uint_t local_num, int kind) {
1937 [ + - ]: 342 : if (kind == MP_EMIT_IDOP_LOCAL_FAST) {
1938 : : // TODO: This is not compliant implementation. We could use MP_OBJ_SENTINEL
1939 : : // to mark deleted vars but then every var would need to be checked on
1940 : : // each access. Very inefficient, so just set value to None to enable GC.
1941 : 342 : emit_native_load_const_tok(emit, MP_TOKEN_KW_NONE);
1942 : 342 : emit_native_store_fast(emit, qst, local_num);
1943 : : } else {
1944 : : // TODO implement me!
1945 : 342 : }
1946 : 342 : }
1947 : :
1948 : 360 : static void emit_native_delete_global(emit_t *emit, qstr qst, int kind) {
1949 : 360 : MP_STATIC_ASSERT(MP_F_DELETE_NAME + MP_EMIT_IDOP_GLOBAL_NAME == MP_F_DELETE_NAME);
1950 : 360 : MP_STATIC_ASSERT(MP_F_DELETE_NAME + MP_EMIT_IDOP_GLOBAL_GLOBAL == MP_F_DELETE_GLOBAL);
1951 : 360 : emit_native_pre(emit);
1952 : 360 : emit_call_with_qstr_arg(emit, MP_F_DELETE_NAME + kind, qst, REG_ARG_1);
1953 : 360 : emit_post(emit);
1954 : 360 : }
1955 : :
1956 : 48 : static void emit_native_delete_attr(emit_t *emit, qstr qst) {
1957 : 48 : vtype_kind_t vtype_base;
1958 : 48 : emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1); // arg1 = base
1959 [ - + ]: 48 : assert(vtype_base == VTYPE_PYOBJ);
1960 : 48 : ASM_XOR_REG_REG(emit->as, REG_ARG_3, REG_ARG_3); // arg3 = value (null for delete)
1961 : 48 : emit_call_with_qstr_arg(emit, MP_F_STORE_ATTR, qst, REG_ARG_2); // arg2 = attribute name
1962 : 48 : emit_post(emit);
1963 : 48 : }
1964 : :
1965 : 189 : static void emit_native_delete_subscr(emit_t *emit) {
1966 : 189 : vtype_kind_t vtype_index, vtype_base;
1967 : 189 : emit_pre_pop_reg_reg(emit, &vtype_index, REG_ARG_2, &vtype_base, REG_ARG_1); // index, base
1968 [ - + ]: 189 : assert(vtype_index == VTYPE_PYOBJ);
1969 [ - + ]: 189 : assert(vtype_base == VTYPE_PYOBJ);
1970 : 189 : emit_call_with_imm_arg(emit, MP_F_OBJ_SUBSCR, (mp_uint_t)MP_OBJ_NULL, REG_ARG_3);
1971 : 189 : }
1972 : :
1973 : 5911 : static void emit_native_subscr(emit_t *emit, int kind) {
1974 [ + + ]: 5911 : if (kind == MP_EMIT_SUBSCR_LOAD) {
1975 : 4193 : emit_native_load_subscr(emit);
1976 [ + + ]: 1718 : } else if (kind == MP_EMIT_SUBSCR_STORE) {
1977 : 1529 : emit_native_store_subscr(emit);
1978 : : } else {
1979 : 189 : emit_native_delete_subscr(emit);
1980 : : }
1981 : 5911 : }
1982 : :
1983 : 12891 : static void emit_native_attr(emit_t *emit, qstr qst, int kind) {
1984 [ + + ]: 12891 : if (kind == MP_EMIT_ATTR_LOAD) {
1985 : 10299 : emit_native_load_attr(emit, qst);
1986 [ + + ]: 2592 : } else if (kind == MP_EMIT_ATTR_STORE) {
1987 : 2544 : emit_native_store_attr(emit, qst);
1988 : : } else {
1989 : 48 : emit_native_delete_attr(emit, qst);
1990 : : }
1991 : 12891 : }
1992 : :
1993 : 6423 : static void emit_native_dup_top(emit_t *emit) {
1994 : 6423 : DEBUG_printf("dup_top\n");
1995 : 6423 : vtype_kind_t vtype;
1996 : 6423 : int reg = REG_TEMP0;
1997 : 6423 : emit_pre_pop_reg_flexible(emit, &vtype, ®, -1, -1);
1998 : 6423 : emit_post_push_reg_reg(emit, vtype, reg, vtype, reg);
1999 : 6423 : }
2000 : :
2001 : 450 : static void emit_native_dup_top_two(emit_t *emit) {
2002 : 450 : vtype_kind_t vtype0, vtype1;
2003 : 450 : emit_pre_pop_reg_reg(emit, &vtype0, REG_TEMP0, &vtype1, REG_TEMP1);
2004 : 450 : emit_post_push_reg_reg_reg_reg(emit, vtype1, REG_TEMP1, vtype0, REG_TEMP0, vtype1, REG_TEMP1, vtype0, REG_TEMP0);
2005 : 450 : }
2006 : :
2007 : 43123 : static void emit_native_pop_top(emit_t *emit) {
2008 : 43123 : DEBUG_printf("pop_top\n");
2009 : 43123 : emit_pre_pop_discard(emit);
2010 : 43123 : emit_post(emit);
2011 : 43123 : }
2012 : :
2013 : 804 : static void emit_native_rot_two(emit_t *emit) {
2014 : 804 : DEBUG_printf("rot_two\n");
2015 : 804 : vtype_kind_t vtype0, vtype1;
2016 : 804 : emit_pre_pop_reg_reg(emit, &vtype0, REG_TEMP0, &vtype1, REG_TEMP1);
2017 : 804 : emit_post_push_reg_reg(emit, vtype0, REG_TEMP0, vtype1, REG_TEMP1);
2018 : 804 : }
2019 : :
2020 : 336 : static void emit_native_rot_three(emit_t *emit) {
2021 : 336 : DEBUG_printf("rot_three\n");
2022 : 336 : vtype_kind_t vtype0, vtype1, vtype2;
2023 : 336 : emit_pre_pop_reg_reg_reg(emit, &vtype0, REG_TEMP0, &vtype1, REG_TEMP1, &vtype2, REG_TEMP2);
2024 : 336 : emit_post_push_reg_reg_reg(emit, vtype0, REG_TEMP0, vtype2, REG_TEMP2, vtype1, REG_TEMP1);
2025 : 336 : }
2026 : :
2027 : 33890 : static void emit_native_jump(emit_t *emit, mp_uint_t label) {
2028 : 33890 : DEBUG_printf("jump(label=" UINT_FMT ")\n", label);
2029 : 33890 : emit_native_pre(emit);
2030 : : // need to commit stack because we are jumping elsewhere
2031 : 33890 : need_stack_settled(emit);
2032 : 33890 : ASM_JUMP(emit->as, label);
2033 : 33890 : emit_post(emit);
2034 : 33890 : mp_asm_base_suppress_code(&emit->as->base);
2035 : 33890 : }
2036 : :
2037 : 11659 : static void emit_native_jump_helper(emit_t *emit, bool cond, mp_uint_t label, bool pop) {
2038 : 11659 : vtype_kind_t vtype = peek_vtype(emit, 0);
2039 [ + + ]: 11659 : if (vtype == VTYPE_PYOBJ) {
2040 : 11391 : emit_pre_pop_reg(emit, &vtype, REG_ARG_1);
2041 [ + + ]: 11391 : if (!pop) {
2042 : 267 : adjust_stack(emit, 1);
2043 : : }
2044 : 11391 : emit_call(emit, MP_F_OBJ_IS_TRUE);
2045 : : } else {
2046 : 268 : emit_pre_pop_reg(emit, &vtype, REG_RET);
2047 [ + + ]: 268 : if (!pop) {
2048 : 36 : adjust_stack(emit, 1);
2049 : : }
2050 [ + + ]: 268 : if (!(vtype == VTYPE_BOOL || vtype == VTYPE_INT || vtype == VTYPE_UINT)) {
2051 : 4 : EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
2052 : : MP_ERROR_TEXT("can't implicitly convert '%q' to 'bool'"), vtype_to_qstr(vtype));
2053 : : }
2054 : : }
2055 : : // For non-pop need to save the vtype so that emit_native_adjust_stack_size
2056 : : // can use it. This is a bit of a hack.
2057 [ + + ]: 11659 : if (!pop) {
2058 : 303 : emit->saved_stack_vtype = vtype;
2059 : : }
2060 : : // need to commit stack because we may jump elsewhere
2061 : 11659 : need_stack_settled(emit);
2062 : : // Emit the jump
2063 [ + + ]: 11659 : if (cond) {
2064 [ + + ]: 3009 : ASM_JUMP_IF_REG_NONZERO(emit->as, REG_RET, label, vtype == VTYPE_PYOBJ);
2065 : : } else {
2066 [ + + ]: 8650 : ASM_JUMP_IF_REG_ZERO(emit->as, REG_RET, label, vtype == VTYPE_PYOBJ);
2067 : : }
2068 [ + + ]: 11659 : if (!pop) {
2069 : 303 : adjust_stack(emit, -1);
2070 : : }
2071 : 11659 : emit_post(emit);
2072 : 11659 : }
2073 : :
2074 : 11356 : static void emit_native_pop_jump_if(emit_t *emit, bool cond, mp_uint_t label) {
2075 : 11356 : DEBUG_printf("pop_jump_if(cond=%u, label=" UINT_FMT ")\n", cond, label);
2076 : 11356 : emit_native_jump_helper(emit, cond, label, true);
2077 : 11356 : }
2078 : :
2079 : 303 : static void emit_native_jump_if_or_pop(emit_t *emit, bool cond, mp_uint_t label) {
2080 : 303 : DEBUG_printf("jump_if_or_pop(cond=%u, label=" UINT_FMT ")\n", cond, label);
2081 : 303 : emit_native_jump_helper(emit, cond, label, false);
2082 : 303 : }
2083 : :
2084 : 17966 : static void emit_native_unwind_jump(emit_t *emit, mp_uint_t label, mp_uint_t except_depth) {
2085 [ + + ]: 17966 : if (except_depth > 0) {
2086 : 357 : exc_stack_entry_t *first_finally = NULL;
2087 : 357 : exc_stack_entry_t *prev_finally = NULL;
2088 : 357 : exc_stack_entry_t *e = &emit->exc_stack[emit->exc_stack_size - 1];
2089 [ + + ]: 1050 : for (; except_depth > 0; --except_depth, --e) {
2090 [ + + + + ]: 693 : if (e->is_finally && e->is_active) {
2091 : : // Found an active finally handler
2092 [ + + ]: 315 : if (first_finally == NULL) {
2093 : 267 : first_finally = e;
2094 : : }
2095 [ + + ]: 315 : if (prev_finally != NULL) {
2096 : : // Mark prev finally as needed to unwind a jump
2097 : 48 : prev_finally->unwind_label = e->label;
2098 : : }
2099 : : prev_finally = e;
2100 : : }
2101 : : }
2102 [ + + ]: 357 : if (prev_finally == NULL) {
2103 : : // No finally, handle the jump ourselves
2104 : : // First, restore the exception handler address for the jump
2105 [ + - ]: 90 : if (e < emit->exc_stack) {
2106 : 90 : ASM_XOR_REG_REG(emit->as, REG_RET, REG_RET);
2107 : : } else {
2108 : 0 : ASM_MOV_REG_PCREL(emit->as, REG_RET, e->label);
2109 : : }
2110 : 90 : ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_HANDLER_PC(emit), REG_RET);
2111 : : } else {
2112 : : // Last finally should do our jump for us
2113 : : // Mark finally as needing to decide the type of jump
2114 : 267 : prev_finally->unwind_label = UNWIND_LABEL_DO_FINAL_UNWIND;
2115 : 267 : ASM_MOV_REG_PCREL(emit->as, REG_RET, label & ~MP_EMIT_BREAK_FROM_FOR);
2116 : 267 : ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_HANDLER_UNWIND(emit), REG_RET);
2117 : : // Cancel any active exception (see also emit_native_pop_except_jump)
2118 : 267 : ASM_MOV_REG_IMM(emit->as, REG_RET, (mp_uint_t)MP_OBJ_NULL);
2119 : 267 : ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_VAL(emit), REG_RET);
2120 : : // Jump to the innermost active finally
2121 : 267 : label = first_finally->label;
2122 : : }
2123 : : }
2124 : 17966 : emit_native_jump(emit, label & ~MP_EMIT_BREAK_FROM_FOR);
2125 : 17966 : }
2126 : :
2127 : 231 : static void emit_native_setup_with(emit_t *emit, mp_uint_t label) {
2128 : : // the context manager is on the top of the stack
2129 : : // stack: (..., ctx_mgr)
2130 : :
2131 : : // get __exit__ method
2132 : 231 : vtype_kind_t vtype;
2133 : 231 : emit_access_stack(emit, 1, &vtype, REG_ARG_1); // arg1 = ctx_mgr
2134 [ - + ]: 231 : assert(vtype == VTYPE_PYOBJ);
2135 : 231 : emit_get_stack_pointer_to_reg_for_push(emit, REG_ARG_3, 2); // arg3 = dest ptr
2136 : 231 : emit_call_with_qstr_arg(emit, MP_F_LOAD_METHOD, MP_QSTR___exit__, REG_ARG_2);
2137 : : // stack: (..., ctx_mgr, __exit__, self)
2138 : :
2139 : 231 : emit_pre_pop_reg(emit, &vtype, REG_ARG_3); // self
2140 : 231 : emit_pre_pop_reg(emit, &vtype, REG_ARG_2); // __exit__
2141 : 231 : emit_pre_pop_reg(emit, &vtype, REG_ARG_1); // ctx_mgr
2142 : 231 : emit_post_push_reg(emit, vtype, REG_ARG_2); // __exit__
2143 : 231 : emit_post_push_reg(emit, vtype, REG_ARG_3); // self
2144 : : // stack: (..., __exit__, self)
2145 : : // REG_ARG_1=ctx_mgr
2146 : :
2147 : : // get __enter__ method
2148 : 231 : emit_get_stack_pointer_to_reg_for_push(emit, REG_ARG_3, 2); // arg3 = dest ptr
2149 : 231 : emit_call_with_qstr_arg(emit, MP_F_LOAD_METHOD, MP_QSTR___enter__, REG_ARG_2); // arg2 = method name
2150 : : // stack: (..., __exit__, self, __enter__, self)
2151 : :
2152 : : // call __enter__ method
2153 : 231 : emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, 2); // pointer to items, including meth and self
2154 : 231 : emit_call_with_2_imm_args(emit, MP_F_CALL_METHOD_N_KW, 0, REG_ARG_1, 0, REG_ARG_2);
2155 : 231 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET); // push return value of __enter__
2156 : : // stack: (..., __exit__, self, as_value)
2157 : :
2158 : : // need to commit stack because we may jump elsewhere
2159 : 231 : need_stack_settled(emit);
2160 : 231 : emit_native_push_exc_stack(emit, label, true);
2161 : :
2162 : 231 : emit_native_dup_top(emit);
2163 : : // stack: (..., __exit__, self, as_value, as_value)
2164 : 231 : }
2165 : :
2166 : 6201 : static void emit_native_setup_block(emit_t *emit, mp_uint_t label, int kind) {
2167 : 6201 : DEBUG_printf("setup_block(%d, %d)\n", (int)label, kind);
2168 [ + + ]: 6201 : if (kind == MP_EMIT_SETUP_BLOCK_WITH) {
2169 : 231 : emit_native_setup_with(emit, label);
2170 : : } else {
2171 : : // Set up except and finally
2172 : 5970 : emit_native_pre(emit);
2173 : 5970 : need_stack_settled(emit);
2174 : 5970 : emit_native_push_exc_stack(emit, label, kind == MP_EMIT_SETUP_BLOCK_FINALLY);
2175 : 5970 : emit_post(emit);
2176 : : }
2177 : 6201 : }
2178 : :
2179 : 231 : static void emit_native_with_cleanup(emit_t *emit, mp_uint_t label) {
2180 : : // Note: 3 labels are reserved for this function, starting at *emit->label_slot
2181 : :
2182 : : // stack: (..., __exit__, self, as_value)
2183 : 231 : emit_native_pre(emit);
2184 : 231 : emit_native_leave_exc_stack(emit, false);
2185 : 231 : adjust_stack(emit, -1);
2186 : : // stack: (..., __exit__, self)
2187 : :
2188 : : // Label for case where __exit__ is called from an unwind jump
2189 : 231 : emit_native_label_assign(emit, *emit->label_slot + 2);
2190 : :
2191 : : // call __exit__
2192 : 231 : emit_post_push_imm(emit, VTYPE_PTR_NONE, 0);
2193 : 231 : emit_post_push_imm(emit, VTYPE_PTR_NONE, 0);
2194 : 231 : emit_post_push_imm(emit, VTYPE_PTR_NONE, 0);
2195 : 231 : emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, 5);
2196 : 231 : emit_call_with_2_imm_args(emit, MP_F_CALL_METHOD_N_KW, 3, REG_ARG_1, 0, REG_ARG_2);
2197 : :
2198 : : // Replace exc with None and finish
2199 : 231 : emit_native_jump(emit, *emit->label_slot);
2200 : :
2201 : : // nlr_catch
2202 : : // Don't use emit_native_label_assign because this isn't a real finally label
2203 : 231 : mp_asm_base_label_assign(&emit->as->base, label);
2204 : :
2205 : : // Leave with's exception handler
2206 : 231 : emit_native_leave_exc_stack(emit, true);
2207 : :
2208 : : // Adjust stack counter for: __exit__, self (implicitly discard as_value which is above self)
2209 : 231 : emit_native_adjust_stack_size(emit, 2);
2210 : : // stack: (..., __exit__, self)
2211 : :
2212 : 231 : ASM_MOV_REG_LOCAL(emit->as, REG_ARG_1, LOCAL_IDX_EXC_VAL(emit)); // get exc
2213 : :
2214 : : // Check if exc is MP_OBJ_NULL (i.e. zero) and jump to non-exc handler if it is
2215 : 231 : ASM_JUMP_IF_REG_ZERO(emit->as, REG_ARG_1, *emit->label_slot + 2, false);
2216 : :
2217 : 231 : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_ARG_2, REG_ARG_1, 0); // get type(exc)
2218 : 231 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_ARG_2); // push type(exc)
2219 : 231 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_ARG_1); // push exc value
2220 : 231 : emit_post_push_imm(emit, VTYPE_PTR_NONE, 0); // traceback info
2221 : : // Stack: (..., __exit__, self, type(exc), exc, traceback)
2222 : :
2223 : : // call __exit__ method
2224 : 231 : emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, 5);
2225 : 231 : emit_call_with_2_imm_args(emit, MP_F_CALL_METHOD_N_KW, 3, REG_ARG_1, 0, REG_ARG_2);
2226 : : // Stack: (...)
2227 : :
2228 : : // If REG_RET is true then we need to replace exception with None (swallow exception)
2229 : 231 : if (REG_ARG_1 != REG_RET) {
2230 : 231 : ASM_MOV_REG_REG(emit->as, REG_ARG_1, REG_RET);
2231 : : }
2232 : 231 : emit_call(emit, MP_F_OBJ_IS_TRUE);
2233 : 231 : ASM_JUMP_IF_REG_ZERO(emit->as, REG_RET, *emit->label_slot + 1, true);
2234 : :
2235 : : // Replace exception with MP_OBJ_NULL.
2236 : 231 : emit_native_label_assign(emit, *emit->label_slot);
2237 : 231 : ASM_MOV_REG_IMM(emit->as, REG_TEMP0, (mp_uint_t)MP_OBJ_NULL);
2238 : 231 : ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_VAL(emit), REG_TEMP0);
2239 : :
2240 : : // end of with cleanup nlr_catch block
2241 : 231 : emit_native_label_assign(emit, *emit->label_slot + 1);
2242 : :
2243 : : // Exception is in nlr_buf.ret_val slot
2244 : 231 : adjust_stack(emit, 1);
2245 : 231 : }
2246 : :
2247 : : #if MICROPY_PY_ASYNC_AWAIT
2248 : 42 : static void emit_native_async_with_setup_finally(emit_t *emit, mp_uint_t label_aexit_no_exc, mp_uint_t label_finally_block, mp_uint_t label_ret_unwind_jump) {
2249 : : // The async-with body has executed and no exception was raised, the execution fell through to this point.
2250 : : // Stack: (..., ctx_mgr)
2251 : :
2252 : : // Insert a dummy value into the stack so the stack has the same layout to execute the code starting at label_aexit_no_exc
2253 : 42 : emit_native_adjust_stack_size(emit, 1); // push dummy value, it won't ever be used
2254 : 42 : emit_native_rot_two(emit);
2255 : 42 : emit_native_load_const_tok(emit, MP_TOKEN_KW_NONE); // to tell end_finally there's no exception
2256 : 42 : emit_native_rot_two(emit);
2257 : : // Stack: (..., <dummy>, None, ctx_mgr)
2258 : 42 : emit_native_jump(emit, label_aexit_no_exc); // jump to code to call __aexit__
2259 : 42 : emit_native_adjust_stack_size(emit, -1);
2260 : :
2261 : : // Start of "finally" block which is entered via one of: an exception propagating out, a return, an unwind jump.
2262 : 42 : emit_native_label_assign(emit, label_finally_block);
2263 : :
2264 : : // Detect which case we have by the local exception slot holding an exception or not.
2265 : 42 : emit_pre_pop_discard(emit);
2266 : 42 : ASM_MOV_REG_LOCAL(emit->as, REG_ARG_1, LOCAL_IDX_EXC_VAL(emit)); // get exception
2267 : 42 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_ARG_1);
2268 : 42 : ASM_JUMP_IF_REG_ZERO(emit->as, REG_ARG_1, label_ret_unwind_jump, false); // if not an exception then we have return or unwind jump.
2269 : 42 : }
2270 : : #endif
2271 : :
2272 : 6201 : static void emit_native_end_finally(emit_t *emit) {
2273 : : // logic:
2274 : : // exc = pop_stack
2275 : : // if exc == None: pass
2276 : : // else: raise exc
2277 : : // the check if exc is None is done in the MP_F_NATIVE_RAISE stub
2278 : 6201 : DEBUG_printf("end_finally\n");
2279 : :
2280 : 6201 : emit_pre_pop_discard(emit);
2281 : 6201 : ASM_MOV_REG_LOCAL(emit->as, REG_ARG_1, LOCAL_IDX_EXC_VAL(emit));
2282 : 6201 : emit_call(emit, MP_F_NATIVE_RAISE);
2283 : :
2284 : : // Get state for this finally and see if we need to unwind
2285 : 6201 : exc_stack_entry_t *e = emit_native_pop_exc_stack(emit);
2286 [ + + ]: 6201 : if (e->unwind_label != UNWIND_LABEL_UNUSED) {
2287 : 312 : ASM_MOV_REG_LOCAL(emit->as, REG_RET, LOCAL_IDX_EXC_HANDLER_UNWIND(emit));
2288 : 312 : ASM_JUMP_IF_REG_ZERO(emit->as, REG_RET, *emit->label_slot, false);
2289 [ + + ]: 312 : if (e->unwind_label == UNWIND_LABEL_DO_FINAL_UNWIND) {
2290 : 264 : ASM_JUMP_REG(emit->as, REG_RET);
2291 : : } else {
2292 : 48 : emit_native_jump(emit, e->unwind_label);
2293 : : }
2294 : 312 : emit_native_label_assign(emit, *emit->label_slot);
2295 : : }
2296 : :
2297 : 6201 : emit_post(emit);
2298 : 6201 : }
2299 : :
2300 : 2356 : static void emit_native_get_iter(emit_t *emit, bool use_stack) {
2301 : : // perhaps the difficult one, as we want to rewrite for loops using native code
2302 : : // in cases where we iterate over a Python object, can we use normal runtime calls?
2303 : :
2304 : 2356 : DEBUG_printf("get_iter(%d)\n", use_stack);
2305 : :
2306 : 2356 : vtype_kind_t vtype;
2307 : 2356 : emit_pre_pop_reg(emit, &vtype, REG_ARG_1);
2308 [ - + ]: 2356 : assert(vtype == VTYPE_PYOBJ);
2309 [ + + ]: 2356 : if (use_stack) {
2310 : 1326 : emit_get_stack_pointer_to_reg_for_push(emit, REG_ARG_2, MP_OBJ_ITER_BUF_NSLOTS);
2311 : 1326 : emit_call(emit, MP_F_NATIVE_GETITER);
2312 : : } else {
2313 : : // mp_getiter will allocate the iter_buf on the heap
2314 : 1030 : ASM_MOV_REG_IMM(emit->as, REG_ARG_2, 0);
2315 : 1030 : emit_call(emit, MP_F_NATIVE_GETITER);
2316 : 1030 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
2317 : : }
2318 : 2356 : }
2319 : :
2320 : 1428 : static void emit_native_for_iter(emit_t *emit, mp_uint_t label) {
2321 : 1428 : emit_native_pre(emit);
2322 : 1428 : emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_1, MP_OBJ_ITER_BUF_NSLOTS);
2323 : 1428 : adjust_stack(emit, MP_OBJ_ITER_BUF_NSLOTS);
2324 : 1428 : emit_call(emit, MP_F_NATIVE_ITERNEXT);
2325 : : #if MICROPY_DEBUG_MP_OBJ_SENTINELS
2326 : : ASM_MOV_REG_IMM(emit->as, REG_TEMP1, (mp_uint_t)MP_OBJ_STOP_ITERATION);
2327 : : ASM_JUMP_IF_REG_EQ(emit->as, REG_RET, REG_TEMP1, label);
2328 : : #else
2329 : 1428 : MP_STATIC_ASSERT(MP_OBJ_STOP_ITERATION == 0);
2330 : 1428 : ASM_JUMP_IF_REG_ZERO(emit->as, REG_RET, label, false);
2331 : : #endif
2332 : 1428 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
2333 : 1428 : }
2334 : :
2335 : 1428 : static void emit_native_for_iter_end(emit_t *emit) {
2336 : : // adjust stack counter (we get here from for_iter ending, which popped the value for us)
2337 : 1428 : emit_native_pre(emit);
2338 : 1428 : adjust_stack(emit, -MP_OBJ_ITER_BUF_NSLOTS);
2339 : 1428 : emit_post(emit);
2340 : 1428 : }
2341 : :
2342 : 9789 : static void emit_native_pop_except_jump(emit_t *emit, mp_uint_t label, bool within_exc_handler) {
2343 [ + + ]: 9789 : if (within_exc_handler) {
2344 : : // Cancel any active exception so subsequent handlers don't see it
2345 : 4917 : ASM_MOV_REG_IMM(emit->as, REG_TEMP0, (mp_uint_t)MP_OBJ_NULL);
2346 : 4917 : ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_VAL(emit), REG_TEMP0);
2347 : : } else {
2348 : 4872 : emit_native_leave_exc_stack(emit, false);
2349 : : }
2350 : 9789 : emit_native_jump(emit, label);
2351 : 9789 : }
2352 : :
2353 : 1304 : static void emit_native_unary_op(emit_t *emit, mp_unary_op_t op) {
2354 : 1304 : vtype_kind_t vtype = peek_vtype(emit, 0);
2355 [ + + ]: 1304 : if (vtype == VTYPE_INT || vtype == VTYPE_UINT) {
2356 [ + + ]: 22 : if (op == MP_UNARY_OP_POSITIVE) {
2357 : : // No-operation, just leave the argument on the stack.
2358 [ + + ]: 16 : } else if (op == MP_UNARY_OP_NEGATIVE) {
2359 : 6 : int reg = REG_RET;
2360 : 6 : emit_pre_pop_reg_flexible(emit, &vtype, ®, reg, reg);
2361 : 6 : ASM_NEG_REG(emit->as, reg);
2362 : 6 : emit_post_push_reg(emit, vtype, reg);
2363 [ + + ]: 10 : } else if (op == MP_UNARY_OP_INVERT) {
2364 : : #ifdef ASM_NOT_REG
2365 : 6 : int reg = REG_RET;
2366 : 6 : emit_pre_pop_reg_flexible(emit, &vtype, ®, reg, reg);
2367 : 6 : ASM_NOT_REG(emit->as, reg);
2368 : : #else
2369 : : int reg = REG_RET;
2370 : : emit_pre_pop_reg_flexible(emit, &vtype, ®, REG_ARG_1, reg);
2371 : : ASM_MOV_REG_IMM(emit->as, REG_ARG_1, -1);
2372 : : ASM_XOR_REG_REG(emit->as, reg, REG_ARG_1);
2373 : : #endif
2374 : 6 : emit_post_push_reg(emit, vtype, reg);
2375 : : } else {
2376 : 4 : EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
2377 : : MP_ERROR_TEXT("'not' not implemented"), mp_binary_op_method_name[op]);
2378 : : }
2379 [ + + ]: 1282 : } else if (vtype == VTYPE_PYOBJ) {
2380 : 1278 : emit_pre_pop_reg(emit, &vtype, REG_ARG_2);
2381 : 1278 : emit_call_with_imm_arg(emit, MP_F_UNARY_OP, op, REG_ARG_1);
2382 : 1278 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
2383 : : } else {
2384 : 4 : EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
2385 : : MP_ERROR_TEXT("can't do unary op of '%q'"), vtype_to_qstr(vtype));
2386 : : }
2387 : 1304 : }
2388 : :
2389 : 19445 : static void emit_native_binary_op(emit_t *emit, mp_binary_op_t op) {
2390 : 19445 : DEBUG_printf("binary_op(" UINT_FMT ")\n", op);
2391 : 19445 : vtype_kind_t vtype_lhs = peek_vtype(emit, 1);
2392 : 19445 : vtype_kind_t vtype_rhs = peek_vtype(emit, 0);
2393 [ + + ]: 19445 : if ((vtype_lhs == VTYPE_INT || vtype_lhs == VTYPE_UINT)
2394 [ + + ]: 878 : && (vtype_rhs == VTYPE_INT || vtype_rhs == VTYPE_UINT)) {
2395 : : // for integers, inplace and normal ops are equivalent, so use just normal ops
2396 [ + + ]: 874 : if (MP_BINARY_OP_INPLACE_OR <= op && op <= MP_BINARY_OP_INPLACE_POWER) {
2397 : 186 : op += MP_BINARY_OP_OR - MP_BINARY_OP_INPLACE_OR;
2398 : : }
2399 : :
2400 : : #if N_X64 || N_X86
2401 : : // special cases for x86 and shifting
2402 [ + + ]: 874 : if (op == MP_BINARY_OP_LSHIFT || op == MP_BINARY_OP_RSHIFT) {
2403 : : #if N_X64
2404 : 96 : emit_pre_pop_reg_reg(emit, &vtype_rhs, ASM_X64_REG_RCX, &vtype_lhs, REG_RET);
2405 : : #else
2406 : : emit_pre_pop_reg_reg(emit, &vtype_rhs, ASM_X86_REG_ECX, &vtype_lhs, REG_RET);
2407 : : #endif
2408 [ + + ]: 96 : if (op == MP_BINARY_OP_LSHIFT) {
2409 : 48 : ASM_LSL_REG(emit->as, REG_RET);
2410 : : } else {
2411 [ + + ]: 48 : if (vtype_lhs == VTYPE_UINT) {
2412 : 6 : ASM_LSR_REG(emit->as, REG_RET);
2413 : : } else {
2414 : 42 : ASM_ASR_REG(emit->as, REG_RET);
2415 : : }
2416 : : }
2417 : 96 : emit_post_push_reg(emit, vtype_lhs, REG_RET);
2418 : 212 : return;
2419 : : }
2420 : : #endif
2421 : :
2422 : : // special cases for floor-divide and module because we dispatch to helper functions
2423 [ + + ]: 778 : if (op == MP_BINARY_OP_FLOOR_DIVIDE || op == MP_BINARY_OP_MODULO) {
2424 : 20 : emit_pre_pop_reg_reg(emit, &vtype_rhs, REG_ARG_2, &vtype_lhs, REG_ARG_1);
2425 [ + + ]: 20 : if (vtype_lhs != VTYPE_INT) {
2426 : 8 : EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
2427 : : MP_ERROR_TEXT("div/mod not implemented for uint"), mp_binary_op_method_name[op]);
2428 : : }
2429 [ + + ]: 20 : if (op == MP_BINARY_OP_FLOOR_DIVIDE) {
2430 : 10 : emit_call(emit, MP_F_SMALL_INT_FLOOR_DIVIDE);
2431 : : } else {
2432 : 10 : emit_call(emit, MP_F_SMALL_INT_MODULO);
2433 : : }
2434 : 20 : emit_post_push_reg(emit, VTYPE_INT, REG_RET);
2435 : 20 : return;
2436 : : }
2437 : :
2438 : 758 : int reg_rhs = REG_ARG_3;
2439 : 758 : emit_pre_pop_reg_flexible(emit, &vtype_rhs, ®_rhs, REG_RET, REG_ARG_2);
2440 : 758 : emit_pre_pop_reg(emit, &vtype_lhs, REG_ARG_2);
2441 : :
2442 : : #if !(N_X64 || N_X86)
2443 : : if (op == MP_BINARY_OP_LSHIFT || op == MP_BINARY_OP_RSHIFT) {
2444 : : if (op == MP_BINARY_OP_LSHIFT) {
2445 : : ASM_LSL_REG_REG(emit->as, REG_ARG_2, reg_rhs);
2446 : : } else {
2447 : : if (vtype_lhs == VTYPE_UINT) {
2448 : : ASM_LSR_REG_REG(emit->as, REG_ARG_2, reg_rhs);
2449 : : } else {
2450 : : ASM_ASR_REG_REG(emit->as, REG_ARG_2, reg_rhs);
2451 : : }
2452 : : }
2453 : : emit_post_push_reg(emit, vtype_lhs, REG_ARG_2);
2454 : : return;
2455 : : }
2456 : : #endif
2457 : :
2458 [ + + + + : 758 : if (op == MP_BINARY_OP_OR) {
+ + + + ]
2459 : 60 : ASM_OR_REG_REG(emit->as, REG_ARG_2, reg_rhs);
2460 : 60 : emit_post_push_reg(emit, vtype_lhs, REG_ARG_2);
2461 : : } else if (op == MP_BINARY_OP_XOR) {
2462 : 24 : ASM_XOR_REG_REG(emit->as, REG_ARG_2, reg_rhs);
2463 : 24 : emit_post_push_reg(emit, vtype_lhs, REG_ARG_2);
2464 : : } else if (op == MP_BINARY_OP_AND) {
2465 : 60 : ASM_AND_REG_REG(emit->as, REG_ARG_2, reg_rhs);
2466 : 60 : emit_post_push_reg(emit, vtype_lhs, REG_ARG_2);
2467 : : } else if (op == MP_BINARY_OP_ADD) {
2468 : 294 : ASM_ADD_REG_REG(emit->as, REG_ARG_2, reg_rhs);
2469 : 294 : emit_post_push_reg(emit, vtype_lhs, REG_ARG_2);
2470 : : } else if (op == MP_BINARY_OP_SUBTRACT) {
2471 : 24 : ASM_SUB_REG_REG(emit->as, REG_ARG_2, reg_rhs);
2472 : 24 : emit_post_push_reg(emit, vtype_lhs, REG_ARG_2);
2473 : : } else if (op == MP_BINARY_OP_MULTIPLY) {
2474 : 24 : ASM_MUL_REG_REG(emit->as, REG_ARG_2, reg_rhs);
2475 : 24 : emit_post_push_reg(emit, vtype_lhs, REG_ARG_2);
2476 : : } else if (op == MP_BINARY_OP_LESS
2477 : : || op == MP_BINARY_OP_MORE
2478 : : || op == MP_BINARY_OP_EQUAL
2479 : : || op == MP_BINARY_OP_LESS_EQUAL
2480 : : || op == MP_BINARY_OP_MORE_EQUAL
2481 : : || op == MP_BINARY_OP_NOT_EQUAL) {
2482 : : // comparison ops
2483 : :
2484 [ + + ]: 268 : if (vtype_lhs != vtype_rhs) {
2485 : 4 : EMIT_NATIVE_VIPER_TYPE_ERROR(emit, MP_ERROR_TEXT("comparison of int and uint"));
2486 : : }
2487 : :
2488 [ + + ]: 268 : size_t op_idx = op - MP_BINARY_OP_LESS + (vtype_lhs == VTYPE_UINT ? 0 : 6);
2489 : :
2490 : 268 : need_reg_single(emit, REG_RET, 0);
2491 : : #if N_X64
2492 : 268 : asm_x64_xor_r64_r64(emit->as, REG_RET, REG_RET);
2493 : 268 : asm_x64_cmp_r64_with_r64(emit->as, reg_rhs, REG_ARG_2);
2494 : 268 : static byte ops[6 + 6] = {
2495 : : // unsigned
2496 : : ASM_X64_CC_JB,
2497 : : ASM_X64_CC_JA,
2498 : : ASM_X64_CC_JE,
2499 : : ASM_X64_CC_JBE,
2500 : : ASM_X64_CC_JAE,
2501 : : ASM_X64_CC_JNE,
2502 : : // signed
2503 : : ASM_X64_CC_JL,
2504 : : ASM_X64_CC_JG,
2505 : : ASM_X64_CC_JE,
2506 : : ASM_X64_CC_JLE,
2507 : : ASM_X64_CC_JGE,
2508 : : ASM_X64_CC_JNE,
2509 : : };
2510 : 268 : asm_x64_setcc_r8(emit->as, ops[op_idx], REG_RET);
2511 : : #elif N_X86
2512 : : asm_x86_xor_r32_r32(emit->as, REG_RET, REG_RET);
2513 : : asm_x86_cmp_r32_with_r32(emit->as, reg_rhs, REG_ARG_2);
2514 : : static byte ops[6 + 6] = {
2515 : : // unsigned
2516 : : ASM_X86_CC_JB,
2517 : : ASM_X86_CC_JA,
2518 : : ASM_X86_CC_JE,
2519 : : ASM_X86_CC_JBE,
2520 : : ASM_X86_CC_JAE,
2521 : : ASM_X86_CC_JNE,
2522 : : // signed
2523 : : ASM_X86_CC_JL,
2524 : : ASM_X86_CC_JG,
2525 : : ASM_X86_CC_JE,
2526 : : ASM_X86_CC_JLE,
2527 : : ASM_X86_CC_JGE,
2528 : : ASM_X86_CC_JNE,
2529 : : };
2530 : : asm_x86_setcc_r8(emit->as, ops[op_idx], REG_RET);
2531 : : #elif N_THUMB
2532 : : asm_thumb_cmp_rlo_rlo(emit->as, REG_ARG_2, reg_rhs);
2533 : : if (asm_thumb_allow_armv7m(emit->as)) {
2534 : : static uint16_t ops[6 + 6] = {
2535 : : // unsigned
2536 : : ASM_THUMB_OP_ITE_CC,
2537 : : ASM_THUMB_OP_ITE_HI,
2538 : : ASM_THUMB_OP_ITE_EQ,
2539 : : ASM_THUMB_OP_ITE_LS,
2540 : : ASM_THUMB_OP_ITE_CS,
2541 : : ASM_THUMB_OP_ITE_NE,
2542 : : // signed
2543 : : ASM_THUMB_OP_ITE_LT,
2544 : : ASM_THUMB_OP_ITE_GT,
2545 : : ASM_THUMB_OP_ITE_EQ,
2546 : : ASM_THUMB_OP_ITE_LE,
2547 : : ASM_THUMB_OP_ITE_GE,
2548 : : ASM_THUMB_OP_ITE_NE,
2549 : : };
2550 : : asm_thumb_op16(emit->as, ops[op_idx]);
2551 : : asm_thumb_mov_rlo_i8(emit->as, REG_RET, 1);
2552 : : asm_thumb_mov_rlo_i8(emit->as, REG_RET, 0);
2553 : : } else {
2554 : : static uint16_t ops[6 + 6] = {
2555 : : // unsigned
2556 : : ASM_THUMB_CC_CC,
2557 : : ASM_THUMB_CC_HI,
2558 : : ASM_THUMB_CC_EQ,
2559 : : ASM_THUMB_CC_LS,
2560 : : ASM_THUMB_CC_CS,
2561 : : ASM_THUMB_CC_NE,
2562 : : // signed
2563 : : ASM_THUMB_CC_LT,
2564 : : ASM_THUMB_CC_GT,
2565 : : ASM_THUMB_CC_EQ,
2566 : : ASM_THUMB_CC_LE,
2567 : : ASM_THUMB_CC_GE,
2568 : : ASM_THUMB_CC_NE,
2569 : : };
2570 : : asm_thumb_bcc_rel9(emit->as, ops[op_idx], 6);
2571 : : asm_thumb_mov_rlo_i8(emit->as, REG_RET, 0);
2572 : : asm_thumb_b_rel12(emit->as, 4);
2573 : : asm_thumb_mov_rlo_i8(emit->as, REG_RET, 1);
2574 : : }
2575 : : #elif N_ARM
2576 : : asm_arm_cmp_reg_reg(emit->as, REG_ARG_2, reg_rhs);
2577 : : static uint ccs[6 + 6] = {
2578 : : // unsigned
2579 : : ASM_ARM_CC_CC,
2580 : : ASM_ARM_CC_HI,
2581 : : ASM_ARM_CC_EQ,
2582 : : ASM_ARM_CC_LS,
2583 : : ASM_ARM_CC_CS,
2584 : : ASM_ARM_CC_NE,
2585 : : // signed
2586 : : ASM_ARM_CC_LT,
2587 : : ASM_ARM_CC_GT,
2588 : : ASM_ARM_CC_EQ,
2589 : : ASM_ARM_CC_LE,
2590 : : ASM_ARM_CC_GE,
2591 : : ASM_ARM_CC_NE,
2592 : : };
2593 : : asm_arm_setcc_reg(emit->as, REG_RET, ccs[op_idx]);
2594 : : #elif N_XTENSA || N_XTENSAWIN
2595 : : static uint8_t ccs[6 + 6] = {
2596 : : // unsigned
2597 : : ASM_XTENSA_CC_LTU,
2598 : : 0x80 | ASM_XTENSA_CC_LTU, // for GTU we'll swap args
2599 : : ASM_XTENSA_CC_EQ,
2600 : : 0x80 | ASM_XTENSA_CC_GEU, // for LEU we'll swap args
2601 : : ASM_XTENSA_CC_GEU,
2602 : : ASM_XTENSA_CC_NE,
2603 : : // signed
2604 : : ASM_XTENSA_CC_LT,
2605 : : 0x80 | ASM_XTENSA_CC_LT, // for GT we'll swap args
2606 : : ASM_XTENSA_CC_EQ,
2607 : : 0x80 | ASM_XTENSA_CC_GE, // for LE we'll swap args
2608 : : ASM_XTENSA_CC_GE,
2609 : : ASM_XTENSA_CC_NE,
2610 : : };
2611 : : uint8_t cc = ccs[op_idx];
2612 : : if ((cc & 0x80) == 0) {
2613 : : asm_xtensa_setcc_reg_reg_reg(emit->as, cc, REG_RET, REG_ARG_2, reg_rhs);
2614 : : } else {
2615 : : asm_xtensa_setcc_reg_reg_reg(emit->as, cc & ~0x80, REG_RET, reg_rhs, REG_ARG_2);
2616 : : }
2617 : : #elif N_RV32
2618 : : (void)op_idx;
2619 : : switch (op) {
2620 : : case MP_BINARY_OP_LESS:
2621 : : asm_rv32_meta_comparison_lt(emit->as, REG_ARG_2, reg_rhs, REG_RET, vtype_lhs == VTYPE_UINT);
2622 : : break;
2623 : :
2624 : : case MP_BINARY_OP_MORE:
2625 : : asm_rv32_meta_comparison_lt(emit->as, reg_rhs, REG_ARG_2, REG_RET, vtype_lhs == VTYPE_UINT);
2626 : : break;
2627 : :
2628 : : case MP_BINARY_OP_EQUAL:
2629 : : asm_rv32_meta_comparison_eq(emit->as, REG_ARG_2, reg_rhs, REG_RET);
2630 : : break;
2631 : :
2632 : : case MP_BINARY_OP_LESS_EQUAL:
2633 : : asm_rv32_meta_comparison_le(emit->as, REG_ARG_2, reg_rhs, REG_RET, vtype_lhs == VTYPE_UINT);
2634 : : break;
2635 : :
2636 : : case MP_BINARY_OP_MORE_EQUAL:
2637 : : asm_rv32_meta_comparison_le(emit->as, reg_rhs, REG_ARG_2, REG_RET, vtype_lhs == VTYPE_UINT);
2638 : : break;
2639 : :
2640 : : case MP_BINARY_OP_NOT_EQUAL:
2641 : : asm_rv32_meta_comparison_ne(emit->as, reg_rhs, REG_ARG_2, REG_RET);
2642 : : break;
2643 : :
2644 : : default:
2645 : : break;
2646 : : }
2647 : : #elif N_DEBUG
2648 : : asm_debug_setcc_reg_reg_reg(emit->as, op_idx, REG_RET, REG_ARG_2, reg_rhs);
2649 : : #else
2650 : : #error not implemented
2651 : : #endif
2652 : 268 : emit_post_push_reg(emit, VTYPE_BOOL, REG_RET);
2653 : : } else {
2654 : : // TODO other ops not yet implemented
2655 : 4 : adjust_stack(emit, 1);
2656 : 758 : EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
2657 : : MP_ERROR_TEXT("binary op %q not implemented"), mp_binary_op_method_name[op]);
2658 : : }
2659 [ + + + - ]: 37138 : } else if (vtype_lhs == VTYPE_PYOBJ && vtype_rhs == VTYPE_PYOBJ) {
2660 : 18567 : emit_pre_pop_reg_reg(emit, &vtype_rhs, REG_ARG_3, &vtype_lhs, REG_ARG_2);
2661 : 18567 : bool invert = false;
2662 [ + + ]: 18567 : if (op == MP_BINARY_OP_NOT_IN) {
2663 : : invert = true;
2664 : : op = MP_BINARY_OP_IN;
2665 [ + + ]: 18411 : } else if (op == MP_BINARY_OP_IS_NOT) {
2666 : 666 : invert = true;
2667 : 666 : op = MP_BINARY_OP_IS;
2668 : : }
2669 : 18567 : emit_call_with_imm_arg(emit, MP_F_BINARY_OP, op, REG_ARG_1);
2670 [ + + ]: 18567 : if (invert) {
2671 : 822 : ASM_MOV_REG_REG(emit->as, REG_ARG_2, REG_RET);
2672 : 822 : emit_call_with_imm_arg(emit, MP_F_UNARY_OP, MP_UNARY_OP_NOT, REG_ARG_1);
2673 : : }
2674 : 18567 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
2675 : : } else {
2676 : 4 : adjust_stack(emit, -1);
2677 : 19329 : EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
2678 : : MP_ERROR_TEXT("can't do binary op between '%q' and '%q'"),
2679 : : vtype_to_qstr(vtype_lhs), vtype_to_qstr(vtype_rhs));
2680 : : }
2681 : : }
2682 : :
2683 : : #if MICROPY_PY_BUILTINS_SLICE
2684 : : static void emit_native_build_slice(emit_t *emit, mp_uint_t n_args);
2685 : : #endif
2686 : :
2687 : 7695 : static void emit_native_build(emit_t *emit, mp_uint_t n_args, int kind) {
2688 : : // for viper: call runtime, with types of args
2689 : : // if wrapped in byte_array, or something, allocates memory and fills it
2690 : 7695 : MP_STATIC_ASSERT(MP_F_BUILD_TUPLE + MP_EMIT_BUILD_TUPLE == MP_F_BUILD_TUPLE);
2691 : 7695 : MP_STATIC_ASSERT(MP_F_BUILD_TUPLE + MP_EMIT_BUILD_LIST == MP_F_BUILD_LIST);
2692 : 7695 : MP_STATIC_ASSERT(MP_F_BUILD_TUPLE + MP_EMIT_BUILD_MAP == MP_F_BUILD_MAP);
2693 : 7695 : MP_STATIC_ASSERT(MP_F_BUILD_TUPLE + MP_EMIT_BUILD_SET == MP_F_BUILD_SET);
2694 : : #if MICROPY_PY_BUILTINS_SLICE
2695 [ + + ]: 7695 : if (kind == MP_EMIT_BUILD_SLICE) {
2696 : 924 : emit_native_build_slice(emit, n_args);
2697 : 924 : return;
2698 : : }
2699 : : #endif
2700 : 6771 : emit_native_pre(emit);
2701 [ + + ]: 6771 : if (kind == MP_EMIT_BUILD_TUPLE || kind == MP_EMIT_BUILD_LIST || kind == MP_EMIT_BUILD_SET) {
2702 : 5628 : emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_2, n_args); // pointer to items
2703 : : }
2704 : 6771 : emit_call_with_imm_arg(emit, MP_F_BUILD_TUPLE + kind, n_args, REG_ARG_1);
2705 : 6771 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET); // new tuple/list/map/set
2706 : : }
2707 : :
2708 : 2196 : static void emit_native_store_map(emit_t *emit) {
2709 : 2196 : vtype_kind_t vtype_key, vtype_value, vtype_map;
2710 : 2196 : emit_pre_pop_reg_reg_reg(emit, &vtype_key, REG_ARG_2, &vtype_value, REG_ARG_3, &vtype_map, REG_ARG_1); // key, value, map
2711 [ - + ]: 2196 : assert(vtype_key == VTYPE_PYOBJ);
2712 [ - + ]: 2196 : assert(vtype_value == VTYPE_PYOBJ);
2713 [ - + ]: 2196 : assert(vtype_map == VTYPE_PYOBJ);
2714 : 2196 : emit_call(emit, MP_F_STORE_MAP);
2715 : 2196 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET); // map
2716 : 2196 : }
2717 : :
2718 : : #if MICROPY_PY_BUILTINS_SLICE
2719 : 924 : static void emit_native_build_slice(emit_t *emit, mp_uint_t n_args) {
2720 : 924 : DEBUG_printf("build_slice %d\n", n_args);
2721 [ + + ]: 924 : if (n_args == 2) {
2722 : 753 : vtype_kind_t vtype_start, vtype_stop;
2723 : 753 : emit_pre_pop_reg_reg(emit, &vtype_stop, REG_ARG_2, &vtype_start, REG_ARG_1); // arg1 = start, arg2 = stop
2724 [ - + ]: 753 : assert(vtype_start == VTYPE_PYOBJ);
2725 [ - + ]: 753 : assert(vtype_stop == VTYPE_PYOBJ);
2726 : 753 : emit_native_mov_reg_const(emit, REG_ARG_3, MP_F_CONST_NONE_OBJ); // arg3 = step
2727 : : } else {
2728 [ - + ]: 171 : assert(n_args == 3);
2729 : 171 : vtype_kind_t vtype_start, vtype_stop, vtype_step;
2730 : 171 : emit_pre_pop_reg_reg_reg(emit, &vtype_step, REG_ARG_3, &vtype_stop, REG_ARG_2, &vtype_start, REG_ARG_1); // arg1 = start, arg2 = stop, arg3 = step
2731 [ - + ]: 171 : assert(vtype_start == VTYPE_PYOBJ);
2732 [ - + ]: 171 : assert(vtype_stop == VTYPE_PYOBJ);
2733 [ - + ]: 171 : assert(vtype_step == VTYPE_PYOBJ);
2734 : : }
2735 : 924 : emit_call(emit, MP_F_NEW_SLICE);
2736 : 924 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
2737 : 924 : }
2738 : : #endif
2739 : :
2740 : 189 : static void emit_native_store_comp(emit_t *emit, scope_kind_t kind, mp_uint_t collection_index) {
2741 : 189 : mp_fun_kind_t f;
2742 [ + + ]: 189 : if (kind == SCOPE_LIST_COMP) {
2743 : 162 : vtype_kind_t vtype_item;
2744 : 162 : emit_pre_pop_reg(emit, &vtype_item, REG_ARG_2);
2745 [ - + ]: 162 : assert(vtype_item == VTYPE_PYOBJ);
2746 : 162 : f = MP_F_LIST_APPEND;
2747 : : #if MICROPY_PY_BUILTINS_SET
2748 [ + + ]: 27 : } else if (kind == SCOPE_SET_COMP) {
2749 : 3 : vtype_kind_t vtype_item;
2750 : 3 : emit_pre_pop_reg(emit, &vtype_item, REG_ARG_2);
2751 [ - + ]: 3 : assert(vtype_item == VTYPE_PYOBJ);
2752 : 3 : f = MP_F_STORE_SET;
2753 : : #endif
2754 : : } else {
2755 : : // SCOPE_DICT_COMP
2756 : 24 : vtype_kind_t vtype_key, vtype_value;
2757 : 24 : emit_pre_pop_reg_reg(emit, &vtype_key, REG_ARG_2, &vtype_value, REG_ARG_3);
2758 [ - + ]: 24 : assert(vtype_key == VTYPE_PYOBJ);
2759 [ - + ]: 24 : assert(vtype_value == VTYPE_PYOBJ);
2760 : 24 : f = MP_F_STORE_MAP;
2761 : : }
2762 : 189 : vtype_kind_t vtype_collection;
2763 : 189 : emit_access_stack(emit, collection_index, &vtype_collection, REG_ARG_1);
2764 [ - + ]: 189 : assert(vtype_collection == VTYPE_PYOBJ);
2765 : 189 : emit_call(emit, f);
2766 : 189 : emit_post(emit);
2767 : 189 : }
2768 : :
2769 : 447 : static void emit_native_unpack_sequence(emit_t *emit, mp_uint_t n_args) {
2770 : 447 : DEBUG_printf("unpack_sequence %d\n", n_args);
2771 : 447 : vtype_kind_t vtype_base;
2772 : 447 : emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1); // arg1 = seq
2773 [ - + ]: 447 : assert(vtype_base == VTYPE_PYOBJ);
2774 : 447 : emit_get_stack_pointer_to_reg_for_push(emit, REG_ARG_3, n_args); // arg3 = dest ptr
2775 : 447 : emit_call_with_imm_arg(emit, MP_F_UNPACK_SEQUENCE, n_args, REG_ARG_2); // arg2 = n_args
2776 : 447 : }
2777 : :
2778 : 99 : static void emit_native_unpack_ex(emit_t *emit, mp_uint_t n_left, mp_uint_t n_right) {
2779 : 99 : DEBUG_printf("unpack_ex %d %d\n", n_left, n_right);
2780 : 99 : vtype_kind_t vtype_base;
2781 : 99 : emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1); // arg1 = seq
2782 [ - + ]: 99 : assert(vtype_base == VTYPE_PYOBJ);
2783 : 99 : emit_get_stack_pointer_to_reg_for_push(emit, REG_ARG_3, n_left + n_right + 1); // arg3 = dest ptr
2784 : 99 : emit_call_with_imm_arg(emit, MP_F_UNPACK_EX, n_left | (n_right << 8), REG_ARG_2); // arg2 = n_left + n_right
2785 : 99 : }
2786 : :
2787 : 9126 : static void emit_native_make_function(emit_t *emit, scope_t *scope, mp_uint_t n_pos_defaults, mp_uint_t n_kw_defaults) {
2788 : : // call runtime, with type info for args, or don't support dict/default params, or only support Python objects for them
2789 : 9126 : emit_native_pre(emit);
2790 : 9126 : emit_native_mov_reg_state(emit, REG_ARG_2, LOCAL_IDX_FUN_OBJ(emit));
2791 : 9126 : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_ARG_2, REG_ARG_2, OFFSETOF_OBJ_FUN_BC_CONTEXT);
2792 [ + + ]: 9126 : if (n_pos_defaults == 0 && n_kw_defaults == 0) {
2793 : 8553 : need_reg_all(emit);
2794 : 8553 : ASM_MOV_REG_IMM(emit->as, REG_ARG_3, 0);
2795 : : } else {
2796 : 573 : emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, 2);
2797 : 573 : need_reg_all(emit);
2798 : : }
2799 : 9126 : emit_load_reg_with_child(emit, REG_ARG_1, scope->raw_code);
2800 : 9126 : ASM_CALL_IND(emit->as, MP_F_MAKE_FUNCTION_FROM_PROTO_FUN);
2801 : 9126 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
2802 : 9126 : }
2803 : :
2804 : 249 : static void emit_native_make_closure(emit_t *emit, scope_t *scope, mp_uint_t n_closed_over, mp_uint_t n_pos_defaults, mp_uint_t n_kw_defaults) {
2805 : : // make function
2806 : 249 : emit_native_pre(emit);
2807 : 249 : emit_native_mov_reg_state(emit, REG_ARG_2, LOCAL_IDX_FUN_OBJ(emit));
2808 : 249 : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_ARG_2, REG_ARG_2, OFFSETOF_OBJ_FUN_BC_CONTEXT);
2809 [ + + ]: 249 : if (n_pos_defaults == 0 && n_kw_defaults == 0) {
2810 : 246 : need_reg_all(emit);
2811 : 246 : ASM_MOV_REG_IMM(emit->as, REG_ARG_3, 0);
2812 : : } else {
2813 : 3 : emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, 2 + n_closed_over);
2814 : 3 : adjust_stack(emit, 2 + n_closed_over);
2815 : 3 : need_reg_all(emit);
2816 : : }
2817 : 249 : emit_load_reg_with_child(emit, REG_ARG_1, scope->raw_code);
2818 : 249 : ASM_CALL_IND(emit->as, MP_F_MAKE_FUNCTION_FROM_PROTO_FUN);
2819 : :
2820 : : // make closure
2821 : : #if REG_ARG_1 != REG_RET
2822 : 249 : ASM_MOV_REG_REG(emit->as, REG_ARG_1, REG_RET);
2823 : : #endif
2824 : 249 : ASM_MOV_REG_IMM(emit->as, REG_ARG_2, n_closed_over);
2825 : 249 : emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, n_closed_over);
2826 [ + + ]: 249 : if (n_pos_defaults != 0 || n_kw_defaults != 0) {
2827 : 3 : adjust_stack(emit, -2);
2828 : : }
2829 : 249 : ASM_CALL_IND(emit->as, MP_F_NEW_CLOSURE);
2830 : 249 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
2831 : 249 : }
2832 : :
2833 : 45839 : static void emit_native_call_function(emit_t *emit, mp_uint_t n_positional, mp_uint_t n_keyword, mp_uint_t star_flags) {
2834 : 45839 : DEBUG_printf("call_function(n_pos=" UINT_FMT ", n_kw=" UINT_FMT ", star_flags=" UINT_FMT ")\n", n_positional, n_keyword, star_flags);
2835 : :
2836 : : // TODO: in viper mode, call special runtime routine with type info for args,
2837 : : // and wanted type info for return, to remove need for boxing/unboxing
2838 : :
2839 : 45839 : emit_native_pre(emit);
2840 : 45839 : vtype_kind_t vtype_fun = peek_vtype(emit, n_positional + 2 * n_keyword);
2841 [ + + ]: 45839 : if (vtype_fun == VTYPE_BUILTIN_CAST) {
2842 : : // casting operator
2843 [ - + ]: 96 : assert(n_positional == 1 && n_keyword == 0);
2844 [ - + ]: 96 : assert(!star_flags);
2845 : 96 : DEBUG_printf(" cast to %d\n", vtype_fun);
2846 : 96 : vtype_kind_t vtype_cast = peek_stack(emit, 1)->data.u_imm;
2847 [ + + + ]: 96 : switch (peek_vtype(emit, 0)) {
2848 : 78 : case VTYPE_PYOBJ: {
2849 : 78 : vtype_kind_t vtype;
2850 : 78 : emit_pre_pop_reg(emit, &vtype, REG_ARG_1);
2851 : 78 : emit_pre_pop_discard(emit);
2852 : 78 : emit_call_with_imm_arg(emit, MP_F_CONVERT_OBJ_TO_NATIVE, vtype_cast, REG_ARG_2); // arg2 = type
2853 : 78 : emit_post_push_reg(emit, vtype_cast, REG_RET);
2854 : 78 : break;
2855 : : }
2856 : 14 : case VTYPE_BOOL:
2857 : : case VTYPE_INT:
2858 : : case VTYPE_UINT:
2859 : : case VTYPE_PTR:
2860 : : case VTYPE_PTR8:
2861 : : case VTYPE_PTR16:
2862 : : case VTYPE_PTR32:
2863 : : case VTYPE_PTR_NONE:
2864 : 14 : emit_fold_stack_top(emit, REG_ARG_1);
2865 : 14 : emit_post_top_set_vtype(emit, vtype_cast);
2866 : 14 : break;
2867 : : default:
2868 : : // this can happen when casting a cast: int(int)
2869 : 4 : mp_raise_NotImplementedError(MP_ERROR_TEXT("casting"));
2870 : : }
2871 : : } else {
2872 [ - + ]: 45743 : assert(vtype_fun == VTYPE_PYOBJ);
2873 [ + + ]: 45743 : if (star_flags) {
2874 : 348 : emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, n_positional + 2 * n_keyword + 2); // pointer to args
2875 : 348 : emit_call_with_2_imm_args(emit, MP_F_CALL_METHOD_N_KW_VAR, 0, REG_ARG_1, n_positional | (n_keyword << 8), REG_ARG_2);
2876 : 348 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
2877 : : } else {
2878 [ + + ]: 45395 : if (n_positional != 0 || n_keyword != 0) {
2879 : 41696 : emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, n_positional + 2 * n_keyword); // pointer to args
2880 : : }
2881 : 45391 : emit_pre_pop_reg(emit, &vtype_fun, REG_ARG_1); // the function
2882 : 45391 : emit_call_with_imm_arg(emit, MP_F_NATIVE_CALL_FUNCTION_N_KW, n_positional | (n_keyword << 8), REG_ARG_2);
2883 : 45391 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
2884 : : }
2885 : : }
2886 : 45831 : }
2887 : :
2888 : 16134 : static void emit_native_call_method(emit_t *emit, mp_uint_t n_positional, mp_uint_t n_keyword, mp_uint_t star_flags) {
2889 : 16134 : DEBUG_printf("call_method(%d, %d, %d)\n", n_positional, n_keyword, star_flags);
2890 [ + + ]: 16134 : if (star_flags) {
2891 : 120 : emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, n_positional + 2 * n_keyword + 3); // pointer to args
2892 : 120 : emit_call_with_2_imm_args(emit, MP_F_CALL_METHOD_N_KW_VAR, 1, REG_ARG_1, n_positional | (n_keyword << 8), REG_ARG_2);
2893 : 120 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
2894 : : } else {
2895 : 16014 : emit_native_pre(emit);
2896 : 16014 : emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, 2 + n_positional + 2 * n_keyword); // pointer to items, including meth and self
2897 : 16014 : emit_call_with_2_imm_args(emit, MP_F_CALL_METHOD_N_KW, n_positional, REG_ARG_1, n_keyword, REG_ARG_2);
2898 : 16014 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
2899 : : }
2900 : 16134 : }
2901 : :
2902 : 17588 : static void emit_native_return_value(emit_t *emit) {
2903 : 17588 : DEBUG_printf("return_value\n");
2904 : :
2905 [ + + ]: 17588 : if (emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR) {
2906 : : // Save pointer to current stack position for caller to access return value
2907 : 1299 : emit_get_stack_pointer_to_reg_for_pop(emit, REG_TEMP0, 1);
2908 : 1299 : emit_native_mov_state_reg(emit, OFFSETOF_CODE_STATE_SP, REG_TEMP0);
2909 : :
2910 : : // Put return type in return value slot
2911 : 1299 : ASM_MOV_REG_IMM(emit->as, REG_TEMP0, MP_VM_RETURN_NORMAL);
2912 : 1299 : ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_RET_VAL(emit), REG_TEMP0);
2913 : :
2914 : : // Do the unwinding jump to get to the return handler
2915 : 1299 : emit_native_unwind_jump(emit, emit->exit_label, emit->exc_stack_size);
2916 : 1299 : return;
2917 : : }
2918 : :
2919 [ + + ]: 16289 : if (emit->do_viper_types) {
2920 : 1028 : vtype_kind_t return_vtype = emit->scope->scope_flags >> MP_SCOPE_FLAG_VIPERRET_POS;
2921 [ + + ]: 1028 : if (peek_vtype(emit, 0) == VTYPE_PTR_NONE) {
2922 : 712 : emit_pre_pop_discard(emit);
2923 [ + + ]: 712 : if (return_vtype == VTYPE_PYOBJ) {
2924 : 504 : emit_native_mov_reg_const(emit, REG_PARENT_RET, MP_F_CONST_NONE_OBJ);
2925 : : } else {
2926 : 208 : ASM_MOV_REG_IMM(emit->as, REG_ARG_1, 0);
2927 : : }
2928 : : } else {
2929 : 316 : vtype_kind_t vtype;
2930 [ + + ]: 512 : emit_pre_pop_reg(emit, &vtype, return_vtype == VTYPE_PYOBJ ? REG_PARENT_RET : REG_ARG_1);
2931 [ + + ]: 316 : if (vtype != return_vtype) {
2932 : 316 : EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
2933 : : MP_ERROR_TEXT("return expected '%q' but got '%q'"),
2934 : : vtype_to_qstr(return_vtype), vtype_to_qstr(vtype));
2935 : : }
2936 : : }
2937 [ + + ]: 1028 : if (return_vtype != VTYPE_PYOBJ) {
2938 : 404 : emit_call_with_imm_arg(emit, MP_F_CONVERT_NATIVE_TO_OBJ, return_vtype, REG_ARG_2);
2939 : : #if REG_RET != REG_PARENT_RET
2940 : : ASM_MOV_REG_REG(emit->as, REG_PARENT_RET, REG_RET);
2941 : : #endif
2942 : : }
2943 : : } else {
2944 : 15261 : vtype_kind_t vtype;
2945 : 15261 : emit_pre_pop_reg(emit, &vtype, REG_PARENT_RET);
2946 [ - + ]: 15261 : assert(vtype == VTYPE_PYOBJ);
2947 : : }
2948 [ + + + + ]: 16289 : if (NEED_GLOBAL_EXC_HANDLER(emit)) {
2949 : : // Save return value for the global exception handler to use
2950 : 11322 : ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_RET_VAL(emit), REG_PARENT_RET);
2951 : : }
2952 : 16289 : emit_native_unwind_jump(emit, emit->exit_label, emit->exc_stack_size);
2953 : : }
2954 : :
2955 : 3349 : static void emit_native_raise_varargs(emit_t *emit, mp_uint_t n_args) {
2956 : 3349 : DEBUG_printf("raise_varargs(%d)\n", n_args);
2957 : 3349 : (void)n_args;
2958 [ - + ]: 3349 : assert(n_args == 1);
2959 : 3349 : vtype_kind_t vtype_exc;
2960 : 3349 : emit_pre_pop_reg(emit, &vtype_exc, REG_ARG_1); // arg1 = object to raise
2961 [ + + ]: 3349 : if (vtype_exc != VTYPE_PYOBJ) {
2962 : 4 : EMIT_NATIVE_VIPER_TYPE_ERROR(emit, MP_ERROR_TEXT("must raise an object"));
2963 : : }
2964 : : // TODO probably make this 1 call to the runtime (which could even call convert, native_raise(obj, type))
2965 : 3349 : emit_call(emit, MP_F_NATIVE_RAISE);
2966 : 3349 : mp_asm_base_suppress_code(&emit->as->base);
2967 : 3349 : }
2968 : :
2969 : 1472 : static void emit_native_yield(emit_t *emit, int kind) {
2970 : : // Note: 1 (yield) or 3 (yield from) labels are reserved for this function, starting at *emit->label_slot
2971 : :
2972 : 1472 : DEBUG_printf("yield(%d)\n", kind);
2973 : :
2974 [ + + ]: 1472 : if (emit->do_viper_types) {
2975 : 8 : mp_raise_NotImplementedError(MP_ERROR_TEXT("native yield"));
2976 : : }
2977 : 1464 : emit->scope->scope_flags |= MP_SCOPE_FLAG_GENERATOR;
2978 : :
2979 : 1464 : need_stack_settled(emit);
2980 : :
2981 [ + + ]: 1464 : if (kind == MP_EMIT_YIELD_FROM) {
2982 : :
2983 : : // Top of yield-from loop, conceptually implementing:
2984 : : // for item in generator:
2985 : : // yield item
2986 : :
2987 : : // Jump to start of loop
2988 : 924 : emit_native_jump(emit, *emit->label_slot + 2);
2989 : :
2990 : : // Label for top of loop
2991 : 924 : emit_native_label_assign(emit, *emit->label_slot + 1);
2992 : : }
2993 : :
2994 : : // Save pointer to current stack position for caller to access yielded value
2995 : 1464 : emit_get_stack_pointer_to_reg_for_pop(emit, REG_TEMP0, 1);
2996 : 1464 : emit_native_mov_state_reg(emit, OFFSETOF_CODE_STATE_SP, REG_TEMP0);
2997 : :
2998 : : // Put return type in return value slot
2999 : 1464 : ASM_MOV_REG_IMM(emit->as, REG_TEMP0, MP_VM_RETURN_YIELD);
3000 : 1464 : ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_RET_VAL(emit), REG_TEMP0);
3001 : :
3002 : : // Save re-entry PC
3003 : 1464 : ASM_MOV_REG_PCREL(emit->as, REG_TEMP0, *emit->label_slot);
3004 : 1464 : emit_native_mov_state_reg(emit, LOCAL_IDX_GEN_PC(emit), REG_TEMP0);
3005 : :
3006 : : // Jump to exit handler
3007 : 1464 : ASM_JUMP(emit->as, emit->exit_label);
3008 : :
3009 : : // Label re-entry point
3010 : 1464 : mp_asm_base_label_assign(&emit->as->base, *emit->label_slot);
3011 : :
3012 : : // Re-open any active exception handler
3013 [ + + ]: 1464 : if (emit->exc_stack_size > 0) {
3014 : : // Find innermost active exception handler, to restore as current handler
3015 : 417 : exc_stack_entry_t *e = &emit->exc_stack[emit->exc_stack_size - 1];
3016 [ + + ]: 513 : for (; e >= emit->exc_stack; --e) {
3017 [ + + ]: 453 : if (e->is_active) {
3018 : : // Found active handler, get its PC
3019 : 357 : ASM_MOV_REG_PCREL(emit->as, REG_RET, e->label);
3020 : 357 : ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_HANDLER_PC(emit), REG_RET);
3021 : 357 : break;
3022 : : }
3023 : : }
3024 : : }
3025 : :
3026 : 1464 : emit_native_adjust_stack_size(emit, 1); // send_value
3027 : :
3028 [ + + ]: 1464 : if (kind == MP_EMIT_YIELD_VALUE) {
3029 : : // Check LOCAL_IDX_THROW_VAL for any injected value
3030 : 540 : ASM_MOV_REG_LOCAL(emit->as, REG_ARG_1, LOCAL_IDX_THROW_VAL(emit));
3031 : 540 : ASM_MOV_REG_IMM(emit->as, REG_ARG_2, (mp_uint_t)MP_OBJ_NULL);
3032 : 540 : ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_THROW_VAL(emit), REG_ARG_2);
3033 : 540 : emit_call(emit, MP_F_NATIVE_RAISE);
3034 : : } else {
3035 : : // Label loop entry
3036 : 924 : emit_native_label_assign(emit, *emit->label_slot + 2);
3037 : :
3038 : : // Get the next item from the delegate generator
3039 : 924 : ASM_MOV_REG_LOCAL(emit->as, REG_ARG_3, LOCAL_IDX_THROW_VAL(emit)); // throw_value
3040 : 924 : ASM_MOV_REG_IMM(emit->as, REG_ARG_2, (mp_uint_t)MP_OBJ_NULL);
3041 : 924 : ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_THROW_VAL(emit), REG_ARG_2);
3042 : 924 : vtype_kind_t vtype;
3043 : 924 : emit_pre_pop_reg(emit, &vtype, REG_ARG_2); // send_value
3044 : 924 : emit_access_stack(emit, 1, &vtype, REG_ARG_1); // generator
3045 : 924 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_ARG_3);
3046 : 924 : emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, 1); // ret_value
3047 : 924 : emit_call(emit, MP_F_NATIVE_YIELD_FROM);
3048 : :
3049 : : // If returned non-zero then generator continues
3050 : 924 : ASM_JUMP_IF_REG_NONZERO(emit->as, REG_RET, *emit->label_slot + 1, true);
3051 : :
3052 : : // Pop exhausted gen, replace with ret_value
3053 : 924 : emit_native_adjust_stack_size(emit, 1); // ret_value
3054 : 924 : emit_fold_stack_top(emit, REG_ARG_1);
3055 : : }
3056 : 1464 : }
3057 : :
3058 : 4872 : static void emit_native_start_except_handler(emit_t *emit) {
3059 : : // Protected block has finished so leave the current exception handler
3060 : 4872 : emit_native_leave_exc_stack(emit, true);
3061 : :
3062 : : // Get and push nlr_buf.ret_val
3063 : 4872 : ASM_MOV_REG_LOCAL(emit->as, REG_TEMP0, LOCAL_IDX_EXC_VAL(emit));
3064 : 4872 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_TEMP0);
3065 : 4872 : }
3066 : :
3067 : 4872 : static void emit_native_end_except_handler(emit_t *emit) {
3068 : 4872 : }
3069 : :
3070 : : const emit_method_table_t EXPORT_FUN(method_table) = {
3071 : : #if MICROPY_DYNAMIC_COMPILER
3072 : : EXPORT_FUN(new),
3073 : : EXPORT_FUN(free),
3074 : : #endif
3075 : :
3076 : : emit_native_start_pass,
3077 : : emit_native_end_pass,
3078 : : emit_native_adjust_stack_size,
3079 : : emit_native_set_source_line,
3080 : :
3081 : : {
3082 : : emit_native_load_local,
3083 : : emit_native_load_global,
3084 : : },
3085 : : {
3086 : : emit_native_store_local,
3087 : : emit_native_store_global,
3088 : : },
3089 : : {
3090 : : emit_native_delete_local,
3091 : : emit_native_delete_global,
3092 : : },
3093 : :
3094 : : emit_native_label_assign,
3095 : : emit_native_import,
3096 : : emit_native_load_const_tok,
3097 : : emit_native_load_const_small_int,
3098 : : emit_native_load_const_str,
3099 : : emit_native_load_const_obj,
3100 : : emit_native_load_null,
3101 : : emit_native_load_method,
3102 : : emit_native_load_build_class,
3103 : : emit_native_subscr,
3104 : : emit_native_attr,
3105 : : emit_native_dup_top,
3106 : : emit_native_dup_top_two,
3107 : : emit_native_pop_top,
3108 : : emit_native_rot_two,
3109 : : emit_native_rot_three,
3110 : : emit_native_jump,
3111 : : emit_native_pop_jump_if,
3112 : : emit_native_jump_if_or_pop,
3113 : : emit_native_unwind_jump,
3114 : : emit_native_setup_block,
3115 : : emit_native_with_cleanup,
3116 : : #if MICROPY_PY_ASYNC_AWAIT
3117 : : emit_native_async_with_setup_finally,
3118 : : #endif
3119 : : emit_native_end_finally,
3120 : : emit_native_get_iter,
3121 : : emit_native_for_iter,
3122 : : emit_native_for_iter_end,
3123 : : emit_native_pop_except_jump,
3124 : : emit_native_unary_op,
3125 : : emit_native_binary_op,
3126 : : emit_native_build,
3127 : : emit_native_store_map,
3128 : : emit_native_store_comp,
3129 : : emit_native_unpack_sequence,
3130 : : emit_native_unpack_ex,
3131 : : emit_native_make_function,
3132 : : emit_native_make_closure,
3133 : : emit_native_call_function,
3134 : : emit_native_call_method,
3135 : : emit_native_return_value,
3136 : : emit_native_raise_varargs,
3137 : : emit_native_yield,
3138 : :
3139 : : emit_native_start_except_handler,
3140 : : emit_native_end_except_handler,
3141 : : };
3142 : :
3143 : : #endif
|