Branch data Line data Source code
1 : : /*
2 : : * This file is part of the MicroPython project, http://micropython.org/
3 : : *
4 : : * The MIT License (MIT)
5 : : *
6 : : * Copyright (c) 2013, 2014 Damien P. George
7 : : *
8 : : * Permission is hereby granted, free of charge, to any person obtaining a copy
9 : : * of this software and associated documentation files (the "Software"), to deal
10 : : * in the Software without restriction, including without limitation the rights
11 : : * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
12 : : * copies of the Software, and to permit persons to whom the Software is
13 : : * furnished to do so, subject to the following conditions:
14 : : *
15 : : * The above copyright notice and this permission notice shall be included in
16 : : * all copies or substantial portions of the Software.
17 : : *
18 : : * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
19 : : * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
20 : : * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
21 : : * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
22 : : * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
23 : : * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
24 : : * THE SOFTWARE.
25 : : */
26 : :
27 : : // Essentially normal Python has 1 type: Python objects
28 : : // Viper has more than 1 type, and is just a more complicated (a superset of) Python.
29 : : // If you declare everything in Viper as a Python object (ie omit type decls) then
30 : : // it should in principle be exactly the same as Python native.
31 : : // Having types means having more opcodes, like binary_op_nat_nat, binary_op_nat_obj etc.
32 : : // In practice we won't have a VM but rather do this in asm which is actually very minimal.
33 : :
34 : : // Because it breaks strict Python equivalence it should be a completely separate
35 : : // decorator. It breaks equivalence because overflow on integers wraps around.
36 : : // It shouldn't break equivalence if you don't use the new types, but since the
37 : : // type decls might be used in normal Python for other reasons, it's probably safest,
38 : : // cleanest and clearest to make it a separate decorator.
39 : :
40 : : // Actually, it does break equivalence because integers default to native integers,
41 : : // not Python objects.
42 : :
43 : : // for x in l[0:8]: can be compiled into a native loop if l has pointer type
44 : :
45 : : #include <stdio.h>
46 : : #include <string.h>
47 : : #include <assert.h>
48 : :
49 : : #include "py/emit.h"
50 : : #include "py/nativeglue.h"
51 : : #include "py/objfun.h"
52 : : #include "py/objstr.h"
53 : :
54 : : #if MICROPY_DEBUG_VERBOSE // print debugging info
55 : : #define DEBUG_PRINT (1)
56 : : #define DEBUG_printf DEBUG_printf
57 : : #else // don't print debugging info
58 : : #define DEBUG_printf(...) (void)0
59 : : #endif
60 : :
61 : : // wrapper around everything in this file
62 : : #if N_X64 || N_X86 || N_THUMB || N_ARM || N_XTENSA || N_XTENSAWIN || N_RV32 || N_DEBUG
63 : :
64 : : // C stack layout for native functions:
65 : : // 0: nlr_buf_t [optional]
66 : : // return_value [optional word]
67 : : // exc_handler_unwind [optional word]
68 : : // emit->code_state_start: mp_code_state_native_t
69 : : // emit->stack_start: Python object stack | emit->n_state
70 : : // locals (reversed, L0 at end) |
71 : : //
72 : : // C stack layout for native generator functions:
73 : : // 0=emit->stack_start: nlr_buf_t
74 : : // return_value
75 : : // exc_handler_unwind [optional word]
76 : : //
77 : : // Then REG_GENERATOR_STATE points to:
78 : : // 0=emit->code_state_start: mp_code_state_native_t
79 : : // emit->stack_start: Python object stack | emit->n_state
80 : : // locals (reversed, L0 at end) |
81 : : //
82 : : // C stack layout for viper functions:
83 : : // 0: nlr_buf_t [optional]
84 : : // return_value [optional word]
85 : : // exc_handler_unwind [optional word]
86 : : // emit->code_state_start: fun_obj, old_globals [optional]
87 : : // emit->stack_start: Python object stack | emit->n_state
88 : : // locals (reversed, L0 at end) |
89 : : // (L0-L2 may be in regs instead)
90 : :
91 : : // Native emitter needs to know the following sizes and offsets of C structs (on the target):
92 : : #if MICROPY_DYNAMIC_COMPILER
93 : : #define SIZEOF_NLR_BUF (2 + mp_dynamic_compiler.nlr_buf_num_regs + 1) // the +1 is conservative in case MICROPY_ENABLE_PYSTACK enabled
94 : : #else
95 : : #define SIZEOF_NLR_BUF (sizeof(nlr_buf_t) / sizeof(uintptr_t))
96 : : #endif
97 : : #define SIZEOF_CODE_STATE (sizeof(mp_code_state_native_t) / sizeof(uintptr_t))
98 : : #define OFFSETOF_CODE_STATE_STATE (offsetof(mp_code_state_native_t, state) / sizeof(uintptr_t))
99 : : #define OFFSETOF_CODE_STATE_FUN_BC (offsetof(mp_code_state_native_t, fun_bc) / sizeof(uintptr_t))
100 : : #define OFFSETOF_CODE_STATE_IP (offsetof(mp_code_state_native_t, ip) / sizeof(uintptr_t))
101 : : #define OFFSETOF_CODE_STATE_SP (offsetof(mp_code_state_native_t, sp) / sizeof(uintptr_t))
102 : : #define OFFSETOF_CODE_STATE_N_STATE (offsetof(mp_code_state_native_t, n_state) / sizeof(uintptr_t))
103 : : #define OFFSETOF_OBJ_FUN_BC_CONTEXT (offsetof(mp_obj_fun_bc_t, context) / sizeof(uintptr_t))
104 : : #define OFFSETOF_OBJ_FUN_BC_CHILD_TABLE (offsetof(mp_obj_fun_bc_t, child_table) / sizeof(uintptr_t))
105 : : #define OFFSETOF_OBJ_FUN_BC_BYTECODE (offsetof(mp_obj_fun_bc_t, bytecode) / sizeof(uintptr_t))
106 : : #define OFFSETOF_MODULE_CONTEXT_QSTR_TABLE (offsetof(mp_module_context_t, constants.qstr_table) / sizeof(uintptr_t))
107 : : #define OFFSETOF_MODULE_CONTEXT_OBJ_TABLE (offsetof(mp_module_context_t, constants.obj_table) / sizeof(uintptr_t))
108 : : #define OFFSETOF_MODULE_CONTEXT_GLOBALS (offsetof(mp_module_context_t, module.globals) / sizeof(uintptr_t))
109 : :
110 : : // If not already defined, set parent args to same as child call registers
111 : : #ifndef REG_PARENT_RET
112 : : #define REG_PARENT_RET REG_RET
113 : : #define REG_PARENT_ARG_1 REG_ARG_1
114 : : #define REG_PARENT_ARG_2 REG_ARG_2
115 : : #define REG_PARENT_ARG_3 REG_ARG_3
116 : : #define REG_PARENT_ARG_4 REG_ARG_4
117 : : #endif
118 : :
119 : : // Word index of nlr_buf_t.ret_val
120 : : #define NLR_BUF_IDX_RET_VAL (1)
121 : :
122 : : // Whether the viper function needs access to fun_obj
123 : : #define NEED_FUN_OBJ(emit) ((emit)->scope->exc_stack_size > 0 \
124 : : || ((emit)->scope->scope_flags & (MP_SCOPE_FLAG_REFGLOBALS | MP_SCOPE_FLAG_HASCONSTS)))
125 : :
126 : : // Whether the native/viper function needs to be wrapped in an exception handler
127 : : #define NEED_GLOBAL_EXC_HANDLER(emit) ((emit)->scope->exc_stack_size > 0 \
128 : : || ((emit)->scope->scope_flags & (MP_SCOPE_FLAG_GENERATOR | MP_SCOPE_FLAG_REFGLOBALS)))
129 : :
130 : : // Whether a slot is needed to store LOCAL_IDX_EXC_HANDLER_UNWIND
131 : : #define NEED_EXC_HANDLER_UNWIND(emit) ((emit)->scope->exc_stack_size > 0)
132 : : #define NEED_THROW_VAL(emit) ((emit)->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR)
133 : :
134 : : // Whether registers can be used to store locals (only true if there are no
135 : : // exception handlers, because otherwise an nlr_jump will restore registers to
136 : : // their state at the start of the function and updates to locals will be lost)
137 : : #define CAN_USE_REGS_FOR_LOCALS(emit) ((emit)->scope->exc_stack_size == 0 && !(emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR))
138 : :
139 : : // Indices within the local C stack for various variables
140 : : #define LOCAL_IDX_EXC_VAL(emit) (NLR_BUF_IDX_RET_VAL)
141 : : #define LOCAL_IDX_EXC_HANDLER_PC(emit) (NLR_BUF_IDX_LOCAL_1)
142 : : #define LOCAL_IDX_EXC_HANDLER_UNWIND(emit) (SIZEOF_NLR_BUF + 1) // this needs a dedicated variable outside nlr_buf_t
143 : : #define LOCAL_IDX_THROW_VAL(emit) (SIZEOF_NLR_BUF + 2) // needs a dedicated variable outside nlr_buf_t, following inject_exc in py/vm.c
144 : : #define LOCAL_IDX_RET_VAL(emit) (SIZEOF_NLR_BUF) // needed when NEED_GLOBAL_EXC_HANDLER is true
145 : : #define LOCAL_IDX_FUN_OBJ(emit) ((emit)->code_state_start + OFFSETOF_CODE_STATE_FUN_BC)
146 : : #define LOCAL_IDX_OLD_GLOBALS(emit) ((emit)->code_state_start + OFFSETOF_CODE_STATE_IP)
147 : : #define LOCAL_IDX_GEN_PC(emit) ((emit)->code_state_start + OFFSETOF_CODE_STATE_IP)
148 : : #define LOCAL_IDX_LOCAL_VAR(emit, local_num) ((emit)->stack_start + (emit)->n_state - 1 - (local_num))
149 : :
150 : : #if MICROPY_PERSISTENT_CODE_SAVE
151 : :
152 : : // When building with the ability to save native code to .mpy files:
153 : : // - Qstrs are indirect via qstr_table, and REG_LOCAL_3 always points to qstr_table.
154 : : // - In a generator no registers are used to store locals, and REG_LOCAL_2 points to the generator state.
155 : : // - At most 2 registers hold local variables (see CAN_USE_REGS_FOR_LOCALS for when this is possible).
156 : :
157 : : #define REG_GENERATOR_STATE (REG_LOCAL_2)
158 : : #define REG_QSTR_TABLE (REG_LOCAL_3)
159 : : #define MAX_REGS_FOR_LOCAL_VARS (2)
160 : :
161 : : static const uint8_t reg_local_table[MAX_REGS_FOR_LOCAL_VARS] = {REG_LOCAL_1, REG_LOCAL_2};
162 : :
163 : : #else
164 : :
165 : : // When building without the ability to save native code to .mpy files:
166 : : // - Qstrs values are written directly into the machine code.
167 : : // - In a generator no registers are used to store locals, and REG_LOCAL_3 points to the generator state.
168 : : // - At most 3 registers hold local variables (see CAN_USE_REGS_FOR_LOCALS for when this is possible).
169 : :
170 : : #define REG_GENERATOR_STATE (REG_LOCAL_3)
171 : : #define MAX_REGS_FOR_LOCAL_VARS (3)
172 : :
173 : : static const uint8_t reg_local_table[MAX_REGS_FOR_LOCAL_VARS] = {REG_LOCAL_1, REG_LOCAL_2, REG_LOCAL_3};
174 : :
175 : : #endif
176 : :
177 : : #define REG_LOCAL_LAST (reg_local_table[MAX_REGS_FOR_LOCAL_VARS - 1])
178 : :
179 : : #define EMIT_NATIVE_VIPER_TYPE_ERROR(emit, ...) do { \
180 : : *emit->error_slot = mp_obj_new_exception_msg_varg(&mp_type_ViperTypeError, __VA_ARGS__); \
181 : : } while (0)
182 : :
183 : : #if N_RV32
184 : : #define FIT_SIGNED(value, bits) \
185 : : ((((value) & ~((1U << ((bits) - 1)) - 1)) == 0) || \
186 : : (((value) & ~((1U << ((bits) - 1)) - 1)) == ~((1U << ((bits) - 1)) - 1)))
187 : : #endif
188 : :
189 : : typedef enum {
190 : : STACK_VALUE,
191 : : STACK_REG,
192 : : STACK_IMM,
193 : : } stack_info_kind_t;
194 : :
195 : : // these enums must be distinct and the bottom 4 bits
196 : : // must correspond to the correct MP_NATIVE_TYPE_xxx value
197 : : typedef enum {
198 : : VTYPE_PYOBJ = 0x00 | MP_NATIVE_TYPE_OBJ,
199 : : VTYPE_BOOL = 0x00 | MP_NATIVE_TYPE_BOOL,
200 : : VTYPE_INT = 0x00 | MP_NATIVE_TYPE_INT,
201 : : VTYPE_UINT = 0x00 | MP_NATIVE_TYPE_UINT,
202 : : VTYPE_PTR = 0x00 | MP_NATIVE_TYPE_PTR,
203 : : VTYPE_PTR8 = 0x00 | MP_NATIVE_TYPE_PTR8,
204 : : VTYPE_PTR16 = 0x00 | MP_NATIVE_TYPE_PTR16,
205 : : VTYPE_PTR32 = 0x00 | MP_NATIVE_TYPE_PTR32,
206 : :
207 : : VTYPE_PTR_NONE = 0x50 | MP_NATIVE_TYPE_PTR,
208 : :
209 : : VTYPE_UNBOUND = 0x60 | MP_NATIVE_TYPE_OBJ,
210 : : VTYPE_BUILTIN_CAST = 0x70 | MP_NATIVE_TYPE_OBJ,
211 : : } vtype_kind_t;
212 : :
213 : 72 : static qstr vtype_to_qstr(vtype_kind_t vtype) {
214 [ - + - + : 72 : switch (vtype) {
- - + +
+ ]
215 : : case VTYPE_PYOBJ:
216 : : return MP_QSTR_object;
217 : 0 : case VTYPE_BOOL:
218 : 0 : return MP_QSTR_bool;
219 : 32 : case VTYPE_INT:
220 : 32 : return MP_QSTR_int;
221 : 0 : case VTYPE_UINT:
222 : 0 : return MP_QSTR_uint;
223 : 8 : case VTYPE_PTR:
224 : 8 : return MP_QSTR_ptr;
225 : 0 : case VTYPE_PTR8:
226 : 0 : return MP_QSTR_ptr8;
227 : 0 : case VTYPE_PTR16:
228 : 0 : return MP_QSTR_ptr16;
229 : 4 : case VTYPE_PTR32:
230 : 4 : return MP_QSTR_ptr32;
231 : 8 : case VTYPE_PTR_NONE:
232 : : default:
233 : 8 : return MP_QSTR_None;
234 : : }
235 : : }
236 : :
237 : : typedef struct _stack_info_t {
238 : : vtype_kind_t vtype;
239 : : stack_info_kind_t kind;
240 : : union {
241 : : int u_reg;
242 : : mp_int_t u_imm;
243 : : } data;
244 : : } stack_info_t;
245 : :
246 : : #define UNWIND_LABEL_UNUSED (0x7fff)
247 : : #define UNWIND_LABEL_DO_FINAL_UNWIND (0x7ffe)
248 : :
249 : : typedef struct _exc_stack_entry_t {
250 : : uint16_t label : 15;
251 : : uint16_t is_finally : 1;
252 : : uint16_t unwind_label : 15;
253 : : uint16_t is_active : 1;
254 : : } exc_stack_entry_t;
255 : :
256 : : struct _emit_t {
257 : : mp_emit_common_t *emit_common;
258 : : mp_obj_t *error_slot;
259 : : uint *label_slot;
260 : : uint exit_label;
261 : : int pass;
262 : :
263 : : bool do_viper_types;
264 : :
265 : : mp_uint_t local_vtype_alloc;
266 : : vtype_kind_t *local_vtype;
267 : :
268 : : mp_uint_t stack_info_alloc;
269 : : stack_info_t *stack_info;
270 : : vtype_kind_t saved_stack_vtype;
271 : :
272 : : size_t exc_stack_alloc;
273 : : size_t exc_stack_size;
274 : : exc_stack_entry_t *exc_stack;
275 : :
276 : : int prelude_offset;
277 : : int prelude_ptr_index;
278 : : int start_offset;
279 : : int n_state;
280 : : uint16_t code_state_start;
281 : : uint16_t stack_start;
282 : : int stack_size;
283 : : uint16_t n_info;
284 : : uint16_t n_cell;
285 : :
286 : : scope_t *scope;
287 : :
288 : : ASM_T *as;
289 : : };
290 : :
291 : : #ifndef REG_ZERO
292 : : #define REG_ZERO REG_TEMP0
293 : : #define ASM_CLR_REG(state, rd) ASM_XOR_REG_REG(state, rd, rd)
294 : : #endif
295 : :
296 : : static void emit_load_reg_with_object(emit_t *emit, int reg, mp_obj_t obj);
297 : : static void emit_native_global_exc_entry(emit_t *emit);
298 : : static void emit_native_global_exc_exit(emit_t *emit);
299 : : static void emit_native_load_const_obj(emit_t *emit, mp_obj_t obj);
300 : :
301 : 1502 : emit_t *EXPORT_FUN(new)(mp_emit_common_t * emit_common, mp_obj_t *error_slot, uint *label_slot, mp_uint_t max_num_labels) {
302 : 1502 : emit_t *emit = m_new0(emit_t, 1);
303 : 1502 : emit->emit_common = emit_common;
304 : 1502 : emit->error_slot = error_slot;
305 : 1502 : emit->label_slot = label_slot;
306 : 1502 : emit->stack_info_alloc = 8;
307 : 1502 : emit->stack_info = m_new(stack_info_t, emit->stack_info_alloc);
308 : 1502 : emit->exc_stack_alloc = 8;
309 : 1502 : emit->exc_stack = m_new(exc_stack_entry_t, emit->exc_stack_alloc);
310 : 1502 : emit->as = m_new0(ASM_T, 1);
311 : 1502 : mp_asm_base_init(&emit->as->base, max_num_labels);
312 : 1502 : return emit;
313 : : }
314 : :
315 : 1486 : void EXPORT_FUN(free)(emit_t * emit) {
316 : 1486 : mp_asm_base_deinit(&emit->as->base, false);
317 : 1486 : m_del_obj(ASM_T, emit->as);
318 : 1486 : m_del(exc_stack_entry_t, emit->exc_stack, emit->exc_stack_alloc);
319 : 1486 : m_del(vtype_kind_t, emit->local_vtype, emit->local_vtype_alloc);
320 : 1486 : m_del(stack_info_t, emit->stack_info, emit->stack_info_alloc);
321 : 1486 : m_del_obj(emit_t, emit);
322 : 1486 : }
323 : :
324 : : static void emit_call_with_imm_arg(emit_t *emit, mp_fun_kind_t fun_kind, mp_int_t arg_val, int arg_reg);
325 : :
326 : 29901 : static void emit_native_mov_reg_const(emit_t *emit, int reg_dest, int const_val) {
327 : 29901 : ASM_LOAD_REG_REG_OFFSET(emit->as, reg_dest, REG_FUN_TABLE, const_val);
328 : 29901 : }
329 : :
330 : 217802 : static void emit_native_mov_state_reg(emit_t *emit, int local_num, int reg_src) {
331 [ + + ]: 217802 : if (emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR) {
332 : 20238 : ASM_STORE_REG_REG_OFFSET(emit->as, reg_src, REG_GENERATOR_STATE, local_num);
333 : : } else {
334 : 197564 : ASM_MOV_LOCAL_REG(emit->as, local_num, reg_src);
335 : : }
336 : 217802 : }
337 : :
338 : 151846 : static void emit_native_mov_reg_state(emit_t *emit, int reg_dest, int local_num) {
339 [ + + ]: 151846 : if (emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR) {
340 : 13025 : ASM_LOAD_REG_REG_OFFSET(emit->as, reg_dest, REG_GENERATOR_STATE, local_num);
341 : : } else {
342 : 138821 : ASM_MOV_REG_LOCAL(emit->as, reg_dest, local_num);
343 : : }
344 : 151846 : }
345 : :
346 : 93802 : static void emit_native_mov_reg_state_addr(emit_t *emit, int reg_dest, int local_num) {
347 [ + + ]: 93802 : if (emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR) {
348 : 9657 : ASM_MOV_REG_IMM(emit->as, reg_dest, local_num * ASM_WORD_SIZE);
349 : 9657 : ASM_ADD_REG_REG(emit->as, reg_dest, REG_GENERATOR_STATE);
350 : : } else {
351 : 84145 : ASM_MOV_REG_LOCAL_ADDR(emit->as, reg_dest, local_num);
352 : : }
353 : 93802 : }
354 : :
355 : 147971 : static void emit_native_mov_reg_qstr(emit_t *emit, int arg_reg, qstr qst) {
356 : : #if MICROPY_PERSISTENT_CODE_SAVE
357 : : ASM_LOAD16_REG_REG_OFFSET(emit->as, arg_reg, REG_QSTR_TABLE, mp_emit_common_use_qstr(emit->emit_common, qst));
358 : : #elif defined(ASM_MOV_REG_QSTR)
359 : : ASM_MOV_REG_QSTR(emit->as, arg_reg, qst);
360 : : #else
361 : 147971 : ASM_MOV_REG_IMM(emit->as, arg_reg, qst);
362 : : #endif
363 : 147971 : }
364 : :
365 : : // This function may clobber REG_TEMP0 (and `reg_dest` can be REG_TEMP0).
366 : 26142 : static void emit_native_mov_reg_qstr_obj(emit_t *emit, int reg_dest, qstr qst) {
367 : : #if MICROPY_PERSISTENT_CODE_SAVE
368 : : emit_load_reg_with_object(emit, reg_dest, MP_OBJ_NEW_QSTR(qst));
369 : : #else
370 : 26142 : ASM_MOV_REG_IMM(emit->as, reg_dest, (mp_uint_t)MP_OBJ_NEW_QSTR(qst));
371 : : #endif
372 : 26142 : }
373 : :
374 : : #define emit_native_mov_state_imm_via(emit, local_num, imm, reg_temp) \
375 : : do { \
376 : : ASM_MOV_REG_IMM((emit)->as, (reg_temp), (imm)); \
377 : : emit_native_mov_state_reg((emit), (local_num), (reg_temp)); \
378 : : } while (false)
379 : :
380 : 14795 : static void emit_native_start_pass(emit_t *emit, pass_kind_t pass, scope_t *scope) {
381 : 14795 : DEBUG_printf("start_pass(pass=%u, scope=%p)\n", pass, scope);
382 : :
383 : 14795 : emit->pass = pass;
384 : 14795 : emit->do_viper_types = scope->emit_options == MP_EMIT_OPT_VIPER;
385 : 14795 : emit->stack_size = 0;
386 : 14795 : emit->scope = scope;
387 : :
388 : : // allocate memory for keeping track of the types of locals
389 [ + + ]: 14795 : if (emit->local_vtype_alloc < scope->num_locals) {
390 : 820 : emit->local_vtype = m_renew(vtype_kind_t, emit->local_vtype, emit->local_vtype_alloc, scope->num_locals);
391 : 820 : emit->local_vtype_alloc = scope->num_locals;
392 : : }
393 : :
394 : : // set default type for arguments
395 : 14795 : mp_uint_t num_args = emit->scope->num_pos_args + emit->scope->num_kwonly_args;
396 [ + + ]: 14795 : if (scope->scope_flags & MP_SCOPE_FLAG_VARARGS) {
397 : 204 : num_args += 1;
398 : : }
399 [ + + ]: 14795 : if (scope->scope_flags & MP_SCOPE_FLAG_VARKEYWORDS) {
400 : 111 : num_args += 1;
401 : : }
402 [ + + ]: 27263 : for (mp_uint_t i = 0; i < num_args; i++) {
403 : 12468 : emit->local_vtype[i] = VTYPE_PYOBJ;
404 : : }
405 : :
406 : : // Set viper type for arguments
407 [ + + ]: 14795 : if (emit->do_viper_types) {
408 [ + + ]: 2456 : for (int i = 0; i < emit->scope->id_info_len; ++i) {
409 : 1734 : id_info_t *id = &emit->scope->id_info[i];
410 [ + + ]: 1734 : if (id->flags & ID_FLAG_IS_PARAM) {
411 [ - + ]: 996 : assert(id->local_num < emit->local_vtype_alloc);
412 : 996 : emit->local_vtype[id->local_num] = id->flags >> ID_FLAG_VIPER_TYPE_POS;
413 : : }
414 : : }
415 : : }
416 : :
417 : : // local variables begin unbound, and have unknown type
418 [ + + ]: 48486 : for (mp_uint_t i = num_args; i < emit->local_vtype_alloc; i++) {
419 [ + + ]: 66814 : emit->local_vtype[i] = emit->do_viper_types ? VTYPE_UNBOUND : VTYPE_PYOBJ;
420 : : }
421 : :
422 : : // values on stack begin unbound
423 [ + + ]: 151315 : for (mp_uint_t i = 0; i < emit->stack_info_alloc; i++) {
424 : 136520 : emit->stack_info[i].kind = STACK_VALUE;
425 : 136520 : emit->stack_info[i].vtype = VTYPE_UNBOUND;
426 : : }
427 : :
428 [ + + ]: 24689 : mp_asm_base_start_pass(&emit->as->base, pass == MP_PASS_EMIT ? MP_ASM_PASS_EMIT : MP_ASM_PASS_COMPUTE);
429 : :
430 : : // generate code for entry to function
431 : :
432 : : // Work out start of code state (mp_code_state_native_t or reduced version for viper)
433 : 14795 : emit->code_state_start = 0;
434 [ + + + + ]: 14795 : if (NEED_GLOBAL_EXC_HANDLER(emit)) {
435 : 10323 : emit->code_state_start = SIZEOF_NLR_BUF; // for nlr_buf_t
436 : 10323 : emit->code_state_start += 1; // for return_value
437 [ + + ]: 10323 : if (NEED_THROW_VAL(emit)) {
438 : 1049 : emit->code_state_start += 2;
439 [ + + ]: 9274 : } else if (NEED_EXC_HANDLER_UNWIND(emit)) {
440 : 2379 : emit->code_state_start += 1;
441 : : }
442 : : }
443 : :
444 : 14795 : size_t fun_table_off = mp_emit_common_use_const_obj(emit->emit_common, MP_OBJ_FROM_PTR(&mp_fun_table));
445 : :
446 [ + + ]: 14795 : if (emit->do_viper_types) {
447 : : // Work out size of state (locals plus stack)
448 : : // n_state counts all stack and locals, even those in registers
449 : 722 : emit->n_state = scope->num_locals + scope->stack_size;
450 : 722 : int num_locals_in_regs = 0;
451 [ + + + + ]: 722 : if (CAN_USE_REGS_FOR_LOCALS(emit)) {
452 : 684 : num_locals_in_regs = scope->num_locals;
453 [ + + ]: 684 : if (num_locals_in_regs > MAX_REGS_FOR_LOCAL_VARS) {
454 : 120 : num_locals_in_regs = MAX_REGS_FOR_LOCAL_VARS;
455 : : }
456 : : // Need a spot for REG_LOCAL_LAST (see below)
457 [ + + ]: 684 : if (scope->num_pos_args >= MAX_REGS_FOR_LOCAL_VARS + 1) {
458 : 24 : --num_locals_in_regs;
459 : : }
460 : : }
461 : :
462 : : // Work out where the locals and Python stack start within the C stack
463 [ + + + + ]: 722 : if (NEED_GLOBAL_EXC_HANDLER(emit)) {
464 : : // Reserve 2 words for function object and old globals
465 : 288 : emit->stack_start = emit->code_state_start + 2;
466 [ + + ]: 434 : } else if (scope->scope_flags & MP_SCOPE_FLAG_HASCONSTS) {
467 : : // Reserve 1 word for function object, to access const table
468 : 12 : emit->stack_start = emit->code_state_start + 1;
469 : : } else {
470 : 422 : emit->stack_start = emit->code_state_start + 0;
471 : : }
472 : :
473 : : // Entry to function
474 : 722 : ASM_ENTRY(emit->as, emit->stack_start + emit->n_state - num_locals_in_regs);
475 : :
476 : : #if N_X86
477 : : asm_x86_mov_arg_to_r32(emit->as, 0, REG_PARENT_ARG_1);
478 : : #endif
479 : :
480 : : // Load REG_FUN_TABLE with a pointer to mp_fun_table, found in the const_table
481 : 722 : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_FUN_TABLE, REG_PARENT_ARG_1, OFFSETOF_OBJ_FUN_BC_CONTEXT);
482 : : #if MICROPY_PERSISTENT_CODE_SAVE
483 : : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_QSTR_TABLE, REG_FUN_TABLE, OFFSETOF_MODULE_CONTEXT_QSTR_TABLE);
484 : : #endif
485 : 722 : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_FUN_TABLE, REG_FUN_TABLE, OFFSETOF_MODULE_CONTEXT_OBJ_TABLE);
486 : 722 : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_FUN_TABLE, REG_FUN_TABLE, fun_table_off);
487 : :
488 : : // Store function object (passed as first arg) to stack if needed
489 [ + + + + ]: 722 : if (NEED_FUN_OBJ(emit)) {
490 : 296 : ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_FUN_OBJ(emit), REG_PARENT_ARG_1);
491 : : }
492 : :
493 : : // Put n_args in REG_ARG_1, n_kw in REG_ARG_2, args array in REG_LOCAL_LAST
494 : : #if N_X86
495 : : asm_x86_mov_arg_to_r32(emit->as, 1, REG_ARG_1);
496 : : asm_x86_mov_arg_to_r32(emit->as, 2, REG_ARG_2);
497 : : asm_x86_mov_arg_to_r32(emit->as, 3, REG_LOCAL_LAST);
498 : : #else
499 : 722 : ASM_MOV_REG_REG(emit->as, REG_ARG_1, REG_PARENT_ARG_2);
500 : 722 : ASM_MOV_REG_REG(emit->as, REG_ARG_2, REG_PARENT_ARG_3);
501 : 722 : ASM_MOV_REG_REG(emit->as, REG_LOCAL_LAST, REG_PARENT_ARG_4);
502 : : #endif
503 : :
504 : : // Check number of args matches this function, and call mp_arg_check_num_sig if not
505 : 722 : ASM_JUMP_IF_REG_NONZERO(emit->as, REG_ARG_2, *emit->label_slot + 4, true);
506 : 722 : ASM_MOV_REG_IMM(emit->as, REG_ARG_3, scope->num_pos_args);
507 : 722 : ASM_JUMP_IF_REG_EQ(emit->as, REG_ARG_1, REG_ARG_3, *emit->label_slot + 5);
508 : 722 : mp_asm_base_label_assign(&emit->as->base, *emit->label_slot + 4);
509 : 722 : ASM_MOV_REG_IMM(emit->as, REG_ARG_3, MP_OBJ_FUN_MAKE_SIG(scope->num_pos_args, scope->num_pos_args, false));
510 : 722 : ASM_CALL_IND(emit->as, MP_F_ARG_CHECK_NUM_SIG);
511 : 722 : mp_asm_base_label_assign(&emit->as->base, *emit->label_slot + 5);
512 : :
513 : : // Store arguments into locals (reg or stack), converting to native if needed
514 [ + + ]: 1706 : for (int i = 0; i < emit->scope->num_pos_args; i++) {
515 : 984 : int r = REG_ARG_1;
516 : 984 : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_ARG_1, REG_LOCAL_LAST, i);
517 [ + + ]: 984 : if (emit->local_vtype[i] != VTYPE_PYOBJ) {
518 : 876 : emit_call_with_imm_arg(emit, MP_F_CONVERT_OBJ_TO_NATIVE, emit->local_vtype[i], REG_ARG_2);
519 : 876 : r = REG_RET;
520 : : }
521 : : // REG_LOCAL_LAST points to the args array so be sure not to overwrite it if it's still needed
522 [ + + + - : 984 : if (i < MAX_REGS_FOR_LOCAL_VARS && CAN_USE_REGS_FOR_LOCALS(emit) && (i != MAX_REGS_FOR_LOCAL_VARS - 1 || emit->scope->num_pos_args == MAX_REGS_FOR_LOCAL_VARS)) {
+ - + + +
+ ]
523 : 918 : ASM_MOV_REG_REG(emit->as, reg_local_table[i], r);
524 : : } else {
525 : 66 : emit_native_mov_state_reg(emit, LOCAL_IDX_LOCAL_VAR(emit, i), r);
526 : : }
527 : : }
528 : : // Get local from the stack back into REG_LOCAL_LAST if this reg couldn't be written to above
529 [ + + + - : 722 : if (emit->scope->num_pos_args >= MAX_REGS_FOR_LOCAL_VARS + 1 && CAN_USE_REGS_FOR_LOCALS(emit)) {
+ - ]
530 : 24 : ASM_MOV_REG_LOCAL(emit->as, REG_LOCAL_LAST, LOCAL_IDX_LOCAL_VAR(emit, MAX_REGS_FOR_LOCAL_VARS - 1));
531 : : }
532 : :
533 : 722 : emit_native_global_exc_entry(emit);
534 : :
535 : : } else {
536 : : // work out size of state (locals plus stack)
537 : 14073 : emit->n_state = scope->num_locals + scope->stack_size;
538 : :
539 : : // Store in the first machine-word an index used to the function's prelude.
540 : : // This is used at runtime by mp_obj_fun_native_get_prelude_ptr().
541 : 14073 : mp_asm_base_data(&emit->as->base, ASM_WORD_SIZE, (uintptr_t)emit->prelude_ptr_index);
542 : :
543 [ + + ]: 14073 : if (emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR) {
544 : 1041 : mp_asm_base_data(&emit->as->base, ASM_WORD_SIZE, (uintptr_t)emit->start_offset);
545 : 1041 : ASM_ENTRY(emit->as, emit->code_state_start);
546 : :
547 : : // Reset the state size for the state pointed to by REG_GENERATOR_STATE
548 : 1041 : emit->code_state_start = 0;
549 : 1041 : emit->stack_start = SIZEOF_CODE_STATE;
550 : :
551 : : // Put address of code_state into REG_GENERATOR_STATE
552 : : #if N_X86
553 : : asm_x86_mov_arg_to_r32(emit->as, 0, REG_GENERATOR_STATE);
554 : : #else
555 : 1041 : ASM_MOV_REG_REG(emit->as, REG_GENERATOR_STATE, REG_PARENT_ARG_1);
556 : : #endif
557 : :
558 : : // Put throw value into LOCAL_IDX_THROW_VAL slot, for yield/yield-from
559 : : #if N_X86
560 : : asm_x86_mov_arg_to_r32(emit->as, 1, REG_PARENT_ARG_2);
561 : : #endif
562 : 1041 : ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_THROW_VAL(emit), REG_PARENT_ARG_2);
563 : :
564 : : // Load REG_FUN_TABLE with a pointer to mp_fun_table, found in the const_table
565 : 1041 : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_TEMP0, REG_GENERATOR_STATE, LOCAL_IDX_FUN_OBJ(emit));
566 : 1041 : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_TEMP0, REG_TEMP0, OFFSETOF_OBJ_FUN_BC_CONTEXT);
567 : : #if MICROPY_PERSISTENT_CODE_SAVE
568 : : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_QSTR_TABLE, REG_TEMP0, OFFSETOF_MODULE_CONTEXT_QSTR_TABLE);
569 : : #endif
570 : 1041 : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_TEMP0, REG_TEMP0, OFFSETOF_MODULE_CONTEXT_OBJ_TABLE);
571 : 1041 : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_FUN_TABLE, REG_TEMP0, fun_table_off);
572 : : } else {
573 : : // The locals and stack start after the code_state structure
574 : 13032 : emit->stack_start = emit->code_state_start + SIZEOF_CODE_STATE;
575 : :
576 : : // Allocate space on C-stack for code_state structure, which includes state
577 : 13032 : ASM_ENTRY(emit->as, emit->stack_start + emit->n_state);
578 : :
579 : : // Prepare incoming arguments for call to mp_setup_code_state
580 : :
581 : : #if N_X86
582 : : asm_x86_mov_arg_to_r32(emit->as, 0, REG_PARENT_ARG_1);
583 : : asm_x86_mov_arg_to_r32(emit->as, 1, REG_PARENT_ARG_2);
584 : : asm_x86_mov_arg_to_r32(emit->as, 2, REG_PARENT_ARG_3);
585 : : asm_x86_mov_arg_to_r32(emit->as, 3, REG_PARENT_ARG_4);
586 : : #endif
587 : :
588 : : // Load REG_FUN_TABLE with a pointer to mp_fun_table, found in the const_table
589 : 13032 : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_FUN_TABLE, REG_PARENT_ARG_1, OFFSETOF_OBJ_FUN_BC_CONTEXT);
590 : : #if MICROPY_PERSISTENT_CODE_SAVE
591 : : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_QSTR_TABLE, REG_FUN_TABLE, OFFSETOF_MODULE_CONTEXT_QSTR_TABLE);
592 : : #endif
593 : 13032 : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_FUN_TABLE, REG_FUN_TABLE, OFFSETOF_MODULE_CONTEXT_OBJ_TABLE);
594 : 13032 : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_FUN_TABLE, REG_FUN_TABLE, fun_table_off);
595 : :
596 : : // Set code_state.fun_bc
597 : 13032 : ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_FUN_OBJ(emit), REG_PARENT_ARG_1);
598 : :
599 : : // Set code_state.n_state (only works on little endian targets due to n_state being uint16_t)
600 : 13032 : emit_native_mov_state_imm_via(emit, emit->code_state_start + OFFSETOF_CODE_STATE_N_STATE, emit->n_state, REG_ARG_1);
601 : :
602 : : // Put address of code_state into first arg
603 : 13032 : ASM_MOV_REG_LOCAL_ADDR(emit->as, REG_ARG_1, emit->code_state_start);
604 : :
605 : : // Copy next 3 args if needed
606 : : #if REG_ARG_2 != REG_PARENT_ARG_2
607 : : ASM_MOV_REG_REG(emit->as, REG_ARG_2, REG_PARENT_ARG_2);
608 : : #endif
609 : : #if REG_ARG_3 != REG_PARENT_ARG_3
610 : : ASM_MOV_REG_REG(emit->as, REG_ARG_3, REG_PARENT_ARG_3);
611 : : #endif
612 : : #if REG_ARG_4 != REG_PARENT_ARG_4
613 : : ASM_MOV_REG_REG(emit->as, REG_ARG_4, REG_PARENT_ARG_4);
614 : : #endif
615 : :
616 : : // Call mp_setup_code_state to prepare code_state structure
617 : : #if N_THUMB
618 : : asm_thumb_bl_ind(emit->as, MP_F_SETUP_CODE_STATE, ASM_THUMB_REG_R4);
619 : : #elif N_ARM
620 : : asm_arm_bl_ind(emit->as, MP_F_SETUP_CODE_STATE, ASM_ARM_REG_R4);
621 : : #else
622 : 13032 : ASM_CALL_IND(emit->as, MP_F_SETUP_CODE_STATE);
623 : : #endif
624 : : }
625 : :
626 : 14073 : emit_native_global_exc_entry(emit);
627 : :
628 : : // cache some locals in registers, but only if no exception handlers
629 [ + + + + ]: 14073 : if (CAN_USE_REGS_FOR_LOCALS(emit)) {
630 [ + + + + ]: 21351 : for (int i = 0; i < MAX_REGS_FOR_LOCAL_VARS && i < scope->num_locals; ++i) {
631 : 10668 : ASM_MOV_REG_LOCAL(emit->as, reg_local_table[i], LOCAL_IDX_LOCAL_VAR(emit, i));
632 : : }
633 : : }
634 : :
635 : : // set the type of closed over variables
636 [ + + ]: 78306 : for (mp_uint_t i = 0; i < scope->id_info_len; i++) {
637 : 64233 : id_info_t *id = &scope->id_info[i];
638 [ + + ]: 64233 : if (id->kind == ID_INFO_KIND_CELL) {
639 : 408 : emit->local_vtype[id->local_num] = VTYPE_PYOBJ;
640 : : }
641 : : }
642 : : }
643 : 14795 : }
644 : :
645 : 103031 : static inline void emit_native_write_code_info_byte(emit_t *emit, byte val) {
646 : 103031 : mp_asm_base_data(&emit->as->base, 1, val);
647 : : }
648 : :
649 : 25242 : static inline void emit_native_write_code_info_qstr(emit_t *emit, qstr qst) {
650 : 25242 : mp_encode_uint(&emit->as->base, mp_asm_base_get_cur_to_write_bytes, mp_emit_common_use_qstr(emit->emit_common, qst));
651 : 25242 : }
652 : :
653 : 14779 : static bool emit_native_end_pass(emit_t *emit) {
654 : 14779 : emit_native_global_exc_exit(emit);
655 : :
656 [ + + ]: 14779 : if (!emit->do_viper_types) {
657 : 14073 : emit->prelude_offset = mp_asm_base_get_code_pos(&emit->as->base);
658 : 14073 : emit->prelude_ptr_index = emit->emit_common->ct_cur_child;
659 : :
660 : 14073 : size_t n_state = emit->n_state;
661 : 14073 : size_t n_exc_stack = 0; // exc-stack not needed for native code
662 [ + + ]: 88880 : MP_BC_PRELUDE_SIG_ENCODE(n_state, n_exc_stack, emit->scope, emit_native_write_code_info_byte, emit);
663 : :
664 : 14073 : size_t n_info = emit->n_info;
665 : 14073 : size_t n_cell = emit->n_cell;
666 [ + + + + ]: 14151 : MP_BC_PRELUDE_SIZE_ENCODE(n_info, n_cell, emit_native_write_code_info_byte, emit);
667 : :
668 : : // bytecode prelude: source info (function and argument qstrs)
669 : 14073 : size_t info_start = mp_asm_base_get_code_pos(&emit->as->base);
670 : 14073 : emit_native_write_code_info_qstr(emit, emit->scope->simple_name);
671 [ + + ]: 25242 : for (int i = 0; i < emit->scope->num_pos_args + emit->scope->num_kwonly_args; i++) {
672 : 21810 : qstr qst = MP_QSTR__star_;
673 [ + + ]: 21810 : for (int j = 0; j < emit->scope->id_info_len; ++j) {
674 : 21027 : id_info_t *id = &emit->scope->id_info[j];
675 [ + + + + ]: 21027 : if ((id->flags & ID_FLAG_IS_PARAM) && id->local_num == i) {
676 : 10386 : qst = id->qst;
677 : 10386 : break;
678 : : }
679 : : }
680 : 11169 : emit_native_write_code_info_qstr(emit, qst);
681 : : }
682 : 14073 : emit->n_info = mp_asm_base_get_code_pos(&emit->as->base) - info_start;
683 : :
684 : : // bytecode prelude: initialise closed over variables
685 : 14073 : size_t cell_start = mp_asm_base_get_code_pos(&emit->as->base);
686 [ + + ]: 78306 : for (int i = 0; i < emit->scope->id_info_len; i++) {
687 : 64233 : id_info_t *id = &emit->scope->id_info[i];
688 [ + + ]: 64233 : if (id->kind == ID_INFO_KIND_CELL) {
689 [ - + ]: 408 : assert(id->local_num <= 255);
690 : 408 : mp_asm_base_data(&emit->as->base, 1, id->local_num); // write the local which should be converted to a cell
691 : : }
692 : : }
693 : 14073 : emit->n_cell = mp_asm_base_get_code_pos(&emit->as->base) - cell_start;
694 : :
695 : : }
696 : :
697 : 14779 : ASM_END_PASS(emit->as);
698 : :
699 : : // check stack is back to zero size
700 [ - + ]: 14779 : assert(emit->stack_size == 0);
701 [ - + ]: 14779 : assert(emit->exc_stack_size == 0);
702 : :
703 [ + + ]: 14779 : if (emit->pass == MP_PASS_EMIT) {
704 [ + + ]: 4901 : void *f = mp_asm_base_get_code(&emit->as->base);
705 [ + + ]: 4901 : mp_uint_t f_len = mp_asm_base_get_code_size(&emit->as->base);
706 : :
707 : 4901 : mp_raw_code_t **children = emit->emit_common->children;
708 [ + + ]: 4901 : if (!emit->do_viper_types) {
709 : : #if MICROPY_EMIT_NATIVE_PRELUDE_SEPARATE_FROM_MACHINE_CODE
710 : : // Executable code cannot be accessed byte-wise on this architecture, so copy
711 : : // the prelude to a separate memory region that is byte-wise readable.
712 : : void *buf = emit->as->base.code_base + emit->prelude_offset;
713 : : size_t n = emit->as->base.code_offset - emit->prelude_offset;
714 : : const uint8_t *prelude_ptr = memcpy(m_new(uint8_t, n), buf, n);
715 : : #else
716 : : // Point to the prelude directly, at the end of the machine code data.
717 : 4691 : const uint8_t *prelude_ptr = (const uint8_t *)f + emit->prelude_offset;
718 : : #endif
719 : :
720 : : // Store the pointer to the prelude using the child_table.
721 [ - + ]: 4691 : assert(emit->prelude_ptr_index == emit->emit_common->ct_cur_child);
722 [ + + ]: 4691 : if (emit->prelude_ptr_index == 0) {
723 : : children = (void *)prelude_ptr;
724 : : } else {
725 : 1120 : children = m_renew(mp_raw_code_t *, children, emit->prelude_ptr_index, emit->prelude_ptr_index + 1);
726 : 1120 : children[emit->prelude_ptr_index] = (void *)prelude_ptr;
727 : : }
728 : : }
729 : :
730 : 4901 : mp_emit_glue_assign_native(emit->scope->raw_code,
731 : 4901 : emit->do_viper_types ? MP_CODE_NATIVE_VIPER : MP_CODE_NATIVE_PY,
732 : : f, f_len,
733 : : children,
734 : : #if MICROPY_PERSISTENT_CODE_SAVE
735 : : emit->emit_common->ct_cur_child,
736 : : emit->prelude_offset,
737 : : #endif
738 [ + + ]: 4901 : emit->scope->scope_flags, 0, 0);
739 : : }
740 : :
741 : 14779 : return true;
742 : : }
743 : :
744 : 413403 : static void ensure_extra_stack(emit_t *emit, size_t delta) {
745 [ + + ]: 413403 : if (emit->stack_size + delta > emit->stack_info_alloc) {
746 : 367 : size_t new_alloc = (emit->stack_size + delta + 8) & ~3;
747 : 367 : emit->stack_info = m_renew(stack_info_t, emit->stack_info, emit->stack_info_alloc, new_alloc);
748 : 367 : emit->stack_info_alloc = new_alloc;
749 : : }
750 : 413403 : }
751 : :
752 : 780205 : static void adjust_stack(emit_t *emit, mp_int_t stack_size_delta) {
753 [ - + ]: 780205 : assert((mp_int_t)emit->stack_size + stack_size_delta >= 0);
754 [ - + ]: 780205 : assert((mp_int_t)emit->stack_size + stack_size_delta <= (mp_int_t)emit->stack_info_alloc);
755 : 780205 : emit->stack_size += stack_size_delta;
756 [ + - + + ]: 780205 : if (emit->pass > MP_PASS_SCOPE && emit->stack_size > emit->scope->stack_size) {
757 : 17984 : emit->scope->stack_size = emit->stack_size;
758 : : }
759 : : #if DEBUG_PRINT
760 : : DEBUG_printf(" adjust_stack; stack_size=%d+%d; stack now:", emit->stack_size - stack_size_delta, stack_size_delta);
761 : : for (int i = 0; i < emit->stack_size; i++) {
762 : : stack_info_t *si = &emit->stack_info[i];
763 : : DEBUG_printf(" (v=%d k=%d %d)", si->vtype, si->kind, si->data.u_reg);
764 : : }
765 : : DEBUG_printf("\n");
766 : : #endif
767 : 780205 : }
768 : :
769 : 11244 : static void emit_native_adjust_stack_size(emit_t *emit, mp_int_t delta) {
770 : 11244 : DEBUG_printf("adjust_stack_size(" INT_FMT ")\n", delta);
771 [ + + ]: 11244 : if (delta > 0) {
772 : 9486 : ensure_extra_stack(emit, delta);
773 : : }
774 : : // If we are adjusting the stack in a positive direction (pushing) then we
775 : : // need to fill in values for the stack kind and vtype of the newly-pushed
776 : : // entries. These should be set to "value" (ie not reg or imm) because we
777 : : // should only need to adjust the stack due to a jump to this part in the
778 : : // code (and hence we have settled the stack before the jump).
779 [ + + ]: 21927 : for (mp_int_t i = 0; i < delta; i++) {
780 : 10683 : stack_info_t *si = &emit->stack_info[emit->stack_size + i];
781 : 10683 : si->kind = STACK_VALUE;
782 : : // TODO we don't know the vtype to use here. At the moment this is a
783 : : // hack to get the case of multi comparison working.
784 [ + + ]: 10683 : if (delta == 1) {
785 : 8703 : si->vtype = emit->saved_stack_vtype;
786 : : } else {
787 : 1980 : si->vtype = VTYPE_PYOBJ;
788 : : }
789 : : }
790 : 11244 : adjust_stack(emit, delta);
791 : 11244 : }
792 : :
793 : 272789 : static void emit_native_set_source_line(emit_t *emit, mp_uint_t source_line) {
794 : 272789 : (void)emit;
795 : 272789 : (void)source_line;
796 : 272789 : }
797 : :
798 : : // this must be called at start of emit functions
799 : : static void emit_native_pre(emit_t *emit) {
800 : : (void)emit;
801 : : }
802 : :
803 : : // depth==0 is top, depth==1 is before top, etc
804 : 14101 : static stack_info_t *peek_stack(emit_t *emit, mp_uint_t depth) {
805 : 14101 : return &emit->stack_info[emit->stack_size - 1 - depth];
806 : : }
807 : :
808 : : // depth==0 is top, depth==1 is before top, etc
809 : 119565 : static vtype_kind_t peek_vtype(emit_t *emit, mp_uint_t depth) {
810 [ + + ]: 119565 : if (emit->do_viper_types) {
811 : 4080 : return peek_stack(emit, depth)->vtype;
812 : : } else {
813 : : // Type is always PYOBJ even if the intermediate stored value is not
814 : : return VTYPE_PYOBJ;
815 : : }
816 : : }
817 : :
818 : : // pos=1 is TOS, pos=2 is next, etc
819 : : // use pos=0 for no skipping
820 : 294188 : static void need_reg_single(emit_t *emit, int reg_needed, int skip_stack_pos) {
821 : 294188 : skip_stack_pos = emit->stack_size - skip_stack_pos;
822 [ + + ]: 933336 : for (int i = 0; i < emit->stack_size; i++) {
823 [ + + ]: 639148 : if (i != skip_stack_pos) {
824 : 401399 : stack_info_t *si = &emit->stack_info[i];
825 [ + + + + ]: 401399 : if (si->kind == STACK_REG && si->data.u_reg == reg_needed) {
826 : 36630 : si->kind = STACK_VALUE;
827 : 36630 : emit_native_mov_state_reg(emit, emit->stack_start + i, si->data.u_reg);
828 : : }
829 : : }
830 : : }
831 : 294188 : }
832 : :
833 : : // Ensures all unsettled registers that hold Python values are copied to the
834 : : // concrete Python stack. All registers are then free to use.
835 : 584786 : static void need_reg_all(emit_t *emit) {
836 [ + + ]: 1320757 : for (int i = 0; i < emit->stack_size; i++) {
837 : 735971 : stack_info_t *si = &emit->stack_info[i];
838 [ + + ]: 735971 : if (si->kind == STACK_REG) {
839 : 116667 : DEBUG_printf(" reg(%u) to local(%u)\n", si->data.u_reg, emit->stack_start + i);
840 : 116667 : si->kind = STACK_VALUE;
841 : 116667 : emit_native_mov_state_reg(emit, emit->stack_start + i, si->data.u_reg);
842 : : }
843 : : }
844 : 584786 : }
845 : :
846 : 67575 : static vtype_kind_t load_reg_stack_imm(emit_t *emit, int reg_dest, const stack_info_t *si, bool convert_to_pyobj) {
847 [ + + + + ]: 67575 : if (!convert_to_pyobj && emit->do_viper_types) {
848 : 644 : ASM_MOV_REG_IMM(emit->as, reg_dest, si->data.u_imm);
849 : 644 : return si->vtype;
850 : : } else {
851 [ + + ]: 66931 : if (si->vtype == VTYPE_PYOBJ) {
852 : 1323 : ASM_MOV_REG_IMM(emit->as, reg_dest, si->data.u_imm);
853 [ + + ]: 65608 : } else if (si->vtype == VTYPE_BOOL) {
854 : 2100 : emit_native_mov_reg_const(emit, reg_dest, MP_F_CONST_FALSE_OBJ + si->data.u_imm);
855 [ + + ]: 63508 : } else if (si->vtype == VTYPE_INT || si->vtype == VTYPE_UINT) {
856 : 36960 : ASM_MOV_REG_IMM(emit->as, reg_dest, (uintptr_t)MP_OBJ_NEW_SMALL_INT(si->data.u_imm));
857 [ + + ]: 26548 : } else if (si->vtype == VTYPE_PTR_NONE) {
858 : 26544 : emit_native_mov_reg_const(emit, reg_dest, MP_F_CONST_NONE_OBJ);
859 : : } else {
860 : 4 : mp_raise_NotImplementedError(MP_ERROR_TEXT("conversion to object"));
861 : : }
862 : 66927 : return VTYPE_PYOBJ;
863 : : }
864 : : }
865 : :
866 : : // Copies all unsettled registers and immediates that are Python values into the
867 : : // concrete Python stack. This ensures the concrete Python stack holds valid
868 : : // values for the current stack_size.
869 : : // This function may clobber REG_TEMP1.
870 : 138180 : static void need_stack_settled(emit_t *emit) {
871 : 138180 : DEBUG_printf(" need_stack_settled; stack_size=%d\n", emit->stack_size);
872 : 138180 : need_reg_all(emit);
873 [ + + ]: 226278 : for (int i = 0; i < emit->stack_size; i++) {
874 : 88098 : stack_info_t *si = &emit->stack_info[i];
875 [ + + ]: 88098 : if (si->kind == STACK_IMM) {
876 : 3675 : DEBUG_printf(" imm(" INT_FMT ") to local(%u)\n", si->data.u_imm, emit->stack_start + i);
877 : 3675 : si->kind = STACK_VALUE;
878 : : // using REG_TEMP1 to avoid clobbering REG_TEMP0 (aka REG_RET)
879 : 3675 : si->vtype = load_reg_stack_imm(emit, REG_TEMP1, si, false);
880 : 3675 : emit_native_mov_state_reg(emit, emit->stack_start + i, REG_TEMP1);
881 : : }
882 : : }
883 : 138180 : }
884 : :
885 : : // pos=1 is TOS, pos=2 is next, etc
886 : 230635 : static void emit_access_stack(emit_t *emit, int pos, vtype_kind_t *vtype, int reg_dest) {
887 : 230635 : need_reg_single(emit, reg_dest, pos);
888 : 230635 : stack_info_t *si = &emit->stack_info[emit->stack_size - pos];
889 : 230635 : *vtype = si->vtype;
890 [ + + + - ]: 230635 : switch (si->kind) {
891 : 73115 : case STACK_VALUE:
892 : 73115 : emit_native_mov_reg_state(emit, reg_dest, emit->stack_start + emit->stack_size - pos);
893 : 73115 : break;
894 : :
895 : 120654 : case STACK_REG:
896 [ + + ]: 120654 : if (si->data.u_reg != reg_dest) {
897 : 112485 : ASM_MOV_REG_REG(emit->as, reg_dest, si->data.u_reg);
898 : : }
899 : : break;
900 : :
901 : 36866 : case STACK_IMM:
902 : 36866 : *vtype = load_reg_stack_imm(emit, reg_dest, si, false);
903 : 36866 : break;
904 : : }
905 : 230635 : }
906 : :
907 : : // does an efficient X=pop(); discard(); push(X)
908 : : // needs a (non-temp) register in case the popped element was stored in the stack
909 : 938 : static void emit_fold_stack_top(emit_t *emit, int reg_dest) {
910 : 938 : stack_info_t *si = &emit->stack_info[emit->stack_size - 2];
911 : 938 : si[0] = si[1];
912 [ + + ]: 938 : if (si->kind == STACK_VALUE) {
913 : : // if folded element was on the stack we need to put it in a register
914 : 924 : emit_native_mov_reg_state(emit, reg_dest, emit->stack_start + emit->stack_size - 1);
915 : 924 : si->kind = STACK_REG;
916 : 924 : si->data.u_reg = reg_dest;
917 : : }
918 : 938 : adjust_stack(emit, -1);
919 : 938 : }
920 : :
921 : : // If stacked value is in a register and the register is not r1 or r2, then
922 : : // *reg_dest is set to that register. Otherwise the value is put in *reg_dest.
923 : 9651 : static void emit_pre_pop_reg_flexible(emit_t *emit, vtype_kind_t *vtype, int *reg_dest, int not_r1, int not_r2) {
924 : 9651 : stack_info_t *si = peek_stack(emit, 0);
925 [ + + + + : 9651 : if (si->kind == STACK_REG && si->data.u_reg != not_r1 && si->data.u_reg != not_r2) {
+ - ]
926 : 7114 : *vtype = si->vtype;
927 : 7114 : *reg_dest = si->data.u_reg;
928 : 7114 : need_reg_single(emit, *reg_dest, 1);
929 : : } else {
930 : 2537 : emit_access_stack(emit, 1, vtype, *reg_dest);
931 : : }
932 : 9651 : adjust_stack(emit, -1);
933 : 9651 : }
934 : :
935 : 51666 : static void emit_pre_pop_discard(emit_t *emit) {
936 : 51666 : adjust_stack(emit, -1);
937 : 51666 : }
938 : :
939 : 224429 : static void emit_pre_pop_reg(emit_t *emit, vtype_kind_t *vtype, int reg_dest) {
940 : 224429 : emit_access_stack(emit, 1, vtype, reg_dest);
941 : 224429 : adjust_stack(emit, -1);
942 : 224429 : }
943 : :
944 : 27359 : static void emit_pre_pop_reg_reg(emit_t *emit, vtype_kind_t *vtypea, int rega, vtype_kind_t *vtypeb, int regb) {
945 : 27359 : emit_pre_pop_reg(emit, vtypea, rega);
946 : 27359 : emit_pre_pop_reg(emit, vtypeb, regb);
947 : 27359 : }
948 : :
949 : 4137 : static void emit_pre_pop_reg_reg_reg(emit_t *emit, vtype_kind_t *vtypea, int rega, vtype_kind_t *vtypeb, int regb, vtype_kind_t *vtypec, int regc) {
950 : 4137 : emit_pre_pop_reg(emit, vtypea, rega);
951 : 4137 : emit_pre_pop_reg(emit, vtypeb, regb);
952 : 4137 : emit_pre_pop_reg(emit, vtypec, regc);
953 : 4137 : }
954 : :
955 : : static void emit_post(emit_t *emit) {
956 : : (void)emit;
957 : : }
958 : :
959 : 14 : static void emit_post_top_set_vtype(emit_t *emit, vtype_kind_t new_vtype) {
960 : 14 : stack_info_t *si = &emit->stack_info[emit->stack_size - 1];
961 : 14 : si->vtype = new_vtype;
962 : 14 : }
963 : :
964 : 316764 : static void emit_post_push_reg(emit_t *emit, vtype_kind_t vtype, int reg) {
965 : 316764 : ensure_extra_stack(emit, 1);
966 : 316764 : stack_info_t *si = &emit->stack_info[emit->stack_size];
967 : 316764 : si->vtype = vtype;
968 : 316764 : si->kind = STACK_REG;
969 : 316764 : si->data.u_reg = reg;
970 : 316764 : adjust_stack(emit, 1);
971 : 316764 : }
972 : :
973 : 67407 : static void emit_post_push_imm(emit_t *emit, vtype_kind_t vtype, mp_int_t imm) {
974 : 67407 : ensure_extra_stack(emit, 1);
975 : 67407 : stack_info_t *si = &emit->stack_info[emit->stack_size];
976 : 67407 : si->vtype = vtype;
977 : 67407 : si->kind = STACK_IMM;
978 : 67407 : si->data.u_imm = imm;
979 : 67407 : adjust_stack(emit, 1);
980 : 67407 : }
981 : :
982 : 7608 : static void emit_post_push_reg_reg(emit_t *emit, vtype_kind_t vtypea, int rega, vtype_kind_t vtypeb, int regb) {
983 : 7608 : emit_post_push_reg(emit, vtypea, rega);
984 : 7608 : emit_post_push_reg(emit, vtypeb, regb);
985 : 7608 : }
986 : :
987 : 339 : static void emit_post_push_reg_reg_reg(emit_t *emit, vtype_kind_t vtypea, int rega, vtype_kind_t vtypeb, int regb, vtype_kind_t vtypec, int regc) {
988 : 339 : emit_post_push_reg(emit, vtypea, rega);
989 : 339 : emit_post_push_reg(emit, vtypeb, regb);
990 : 339 : emit_post_push_reg(emit, vtypec, regc);
991 : 339 : }
992 : :
993 : 456 : static void emit_post_push_reg_reg_reg_reg(emit_t *emit, vtype_kind_t vtypea, int rega, vtype_kind_t vtypeb, int regb, vtype_kind_t vtypec, int regc, vtype_kind_t vtyped, int regd) {
994 : 456 : emit_post_push_reg(emit, vtypea, rega);
995 : 456 : emit_post_push_reg(emit, vtypeb, regb);
996 : 456 : emit_post_push_reg(emit, vtypec, regc);
997 : 456 : emit_post_push_reg(emit, vtyped, regd);
998 : 456 : }
999 : :
1000 : 93375 : static void emit_call(emit_t *emit, mp_fun_kind_t fun_kind) {
1001 : 93375 : need_reg_all(emit);
1002 : 93375 : ASM_CALL_IND(emit->as, fun_kind);
1003 : 93375 : }
1004 : :
1005 : 82747 : static void emit_call_with_imm_arg(emit_t *emit, mp_fun_kind_t fun_kind, mp_int_t arg_val, int arg_reg) {
1006 : 82747 : need_reg_all(emit);
1007 : 82747 : ASM_MOV_REG_IMM(emit->as, arg_reg, arg_val);
1008 : 82747 : ASM_CALL_IND(emit->as, fun_kind);
1009 : 82747 : }
1010 : :
1011 : 18483 : static void emit_call_with_2_imm_args(emit_t *emit, mp_fun_kind_t fun_kind, mp_int_t arg_val1, int arg_reg1, mp_int_t arg_val2, int arg_reg2) {
1012 : 18483 : need_reg_all(emit);
1013 : 18483 : ASM_MOV_REG_IMM(emit->as, arg_reg1, arg_val1);
1014 : 18483 : ASM_MOV_REG_IMM(emit->as, arg_reg2, arg_val2);
1015 : 18483 : ASM_CALL_IND(emit->as, fun_kind);
1016 : 18483 : }
1017 : :
1018 : 147971 : static void emit_call_with_qstr_arg(emit_t *emit, mp_fun_kind_t fun_kind, qstr qst, int arg_reg) {
1019 : 147971 : need_reg_all(emit);
1020 : 147971 : emit_native_mov_reg_qstr(emit, arg_reg, qst);
1021 : 147971 : ASM_CALL_IND(emit->as, fun_kind);
1022 : 147971 : }
1023 : :
1024 : : // vtype of all n_pop objects is VTYPE_PYOBJ
1025 : : // Will convert any items that are not VTYPE_PYOBJ to this type and put them back on the stack.
1026 : : // If any conversions of non-immediate values are needed, then it uses REG_ARG_1, REG_ARG_2 and REG_RET.
1027 : : // Otherwise, it does not use any temporary registers (but may use reg_dest before loading it with stack pointer).
1028 : 74060 : static void emit_get_stack_pointer_to_reg_for_pop(emit_t *emit, mp_uint_t reg_dest, mp_uint_t n_pop) {
1029 : 74060 : need_reg_all(emit);
1030 : :
1031 : : // First, store any immediate values to their respective place on the stack.
1032 [ + + ]: 222075 : for (mp_uint_t i = 0; i < n_pop; i++) {
1033 : 148019 : stack_info_t *si = &emit->stack_info[emit->stack_size - 1 - i];
1034 : : // must push any imm's to stack
1035 : : // must convert them to VTYPE_PYOBJ for viper code
1036 [ + + ]: 148019 : if (si->kind == STACK_IMM) {
1037 : 27034 : si->kind = STACK_VALUE;
1038 : 27034 : si->vtype = load_reg_stack_imm(emit, reg_dest, si, true);
1039 : 27030 : emit_native_mov_state_reg(emit, emit->stack_start + emit->stack_size - 1 - i, reg_dest);
1040 : : }
1041 : :
1042 : : // verify that this value is on the stack
1043 [ - + ]: 148015 : assert(si->kind == STACK_VALUE);
1044 : : }
1045 : :
1046 : : // Second, convert any non-VTYPE_PYOBJ to that type.
1047 [ + + ]: 222071 : for (mp_uint_t i = 0; i < n_pop; i++) {
1048 : 148015 : stack_info_t *si = &emit->stack_info[emit->stack_size - 1 - i];
1049 [ + + ]: 148015 : if (si->vtype != VTYPE_PYOBJ) {
1050 : 544 : mp_uint_t local_num = emit->stack_start + emit->stack_size - 1 - i;
1051 : 544 : emit_native_mov_reg_state(emit, REG_ARG_1, local_num);
1052 : 544 : emit_call_with_imm_arg(emit, MP_F_CONVERT_NATIVE_TO_OBJ, si->vtype, REG_ARG_2); // arg2 = type
1053 : 544 : emit_native_mov_state_reg(emit, local_num, REG_RET);
1054 : 544 : si->vtype = VTYPE_PYOBJ;
1055 : 148015 : DEBUG_printf(" convert_native_to_obj(local_num=" UINT_FMT ")\n", local_num);
1056 : : }
1057 : : }
1058 : :
1059 : : // Adujust the stack for a pop of n_pop items, and load the stack pointer into reg_dest.
1060 : 74056 : adjust_stack(emit, -n_pop);
1061 : 74056 : emit_native_mov_reg_state_addr(emit, reg_dest, emit->stack_start + emit->stack_size);
1062 : 74056 : }
1063 : :
1064 : : // vtype of all n_push objects is VTYPE_PYOBJ
1065 : 19746 : static void emit_get_stack_pointer_to_reg_for_push(emit_t *emit, mp_uint_t reg_dest, mp_uint_t n_push) {
1066 : 19746 : need_reg_all(emit);
1067 : 19746 : ensure_extra_stack(emit, n_push);
1068 [ + + ]: 62196 : for (mp_uint_t i = 0; i < n_push; i++) {
1069 : 42450 : emit->stack_info[emit->stack_size + i].kind = STACK_VALUE;
1070 : 42450 : emit->stack_info[emit->stack_size + i].vtype = VTYPE_PYOBJ;
1071 : : }
1072 : 19746 : emit_native_mov_reg_state_addr(emit, reg_dest, emit->stack_start + emit->stack_size);
1073 : 19746 : adjust_stack(emit, n_push);
1074 : 19746 : }
1075 : :
1076 : 6459 : static void emit_native_push_exc_stack(emit_t *emit, uint label, bool is_finally) {
1077 [ + + ]: 6459 : if (emit->exc_stack_size + 1 > emit->exc_stack_alloc) {
1078 : 6 : size_t new_alloc = emit->exc_stack_alloc + 4;
1079 : 6 : emit->exc_stack = m_renew(exc_stack_entry_t, emit->exc_stack, emit->exc_stack_alloc, new_alloc);
1080 : 6 : emit->exc_stack_alloc = new_alloc;
1081 : : }
1082 : :
1083 : 6459 : exc_stack_entry_t *e = &emit->exc_stack[emit->exc_stack_size++];
1084 : 6459 : e->label = label;
1085 : 6459 : e->is_finally = is_finally;
1086 : 6459 : e->unwind_label = UNWIND_LABEL_UNUSED;
1087 : 6459 : e->is_active = true;
1088 : :
1089 : 6459 : ASM_MOV_REG_PCREL(emit->as, REG_RET, label);
1090 : 6459 : ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_HANDLER_PC(emit), REG_RET);
1091 : 6459 : }
1092 : :
1093 : 11766 : static void emit_native_leave_exc_stack(emit_t *emit, bool start_of_handler) {
1094 [ - + ]: 11766 : assert(emit->exc_stack_size > 0);
1095 : :
1096 : : // Get current exception handler and deactivate it
1097 : 11766 : exc_stack_entry_t *e = &emit->exc_stack[emit->exc_stack_size - 1];
1098 : 11766 : e->is_active = false;
1099 : :
1100 : : // Find next innermost active exception handler, to restore as current handler
1101 [ + + + + ]: 12642 : for (--e; e >= emit->exc_stack && !e->is_active; --e) {
1102 : 876 : }
1103 : :
1104 : : // Update the PC of the new exception handler
1105 [ + + ]: 11766 : if (e < emit->exc_stack) {
1106 : : // No active handler, clear handler PC to zero
1107 [ + + ]: 11133 : if (start_of_handler) {
1108 : : // Optimisation: PC is already cleared by global exc handler
1109 : : return;
1110 : : }
1111 : 6039 : ASM_XOR_REG_REG(emit->as, REG_RET, REG_RET);
1112 : : } else {
1113 : : // Found new active handler, get its PC
1114 : 633 : ASM_MOV_REG_PCREL(emit->as, REG_RET, e->label);
1115 : : }
1116 : 6672 : ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_HANDLER_PC(emit), REG_RET);
1117 : : }
1118 : :
1119 : 6459 : static exc_stack_entry_t *emit_native_pop_exc_stack(emit_t *emit) {
1120 [ - + ]: 6459 : assert(emit->exc_stack_size > 0);
1121 : 6459 : exc_stack_entry_t *e = &emit->exc_stack[--emit->exc_stack_size];
1122 [ - + ]: 6459 : assert(e->is_active == false);
1123 : 6459 : return e;
1124 : : }
1125 : :
1126 : : // This function will clobber REG_TEMP0 (and `reg` can be REG_TEMP0).
1127 : 12108 : static void emit_load_reg_with_object(emit_t *emit, int reg, mp_obj_t obj) {
1128 : 12108 : emit->scope->scope_flags |= MP_SCOPE_FLAG_HASCONSTS;
1129 : 12108 : size_t table_off = mp_emit_common_use_const_obj(emit->emit_common, obj);
1130 : 12108 : emit_native_mov_reg_state(emit, REG_TEMP0, LOCAL_IDX_FUN_OBJ(emit));
1131 : 12108 : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_TEMP0, REG_TEMP0, OFFSETOF_OBJ_FUN_BC_CONTEXT);
1132 : 12108 : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_TEMP0, REG_TEMP0, OFFSETOF_MODULE_CONTEXT_OBJ_TABLE);
1133 : 12108 : ASM_LOAD_REG_REG_OFFSET(emit->as, reg, REG_TEMP0, table_off);
1134 : 12108 : }
1135 : :
1136 : 10224 : static void emit_load_reg_with_child(emit_t *emit, int reg, mp_raw_code_t *rc) {
1137 [ + + ]: 10224 : size_t table_off = mp_emit_common_alloc_const_child(emit->emit_common, rc);
1138 : 10224 : emit_native_mov_reg_state(emit, REG_TEMP0, LOCAL_IDX_FUN_OBJ(emit));
1139 : 10224 : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_TEMP0, REG_TEMP0, OFFSETOF_OBJ_FUN_BC_CHILD_TABLE);
1140 : 10224 : ASM_LOAD_REG_REG_OFFSET(emit->as, reg, REG_TEMP0, table_off);
1141 : 10224 : }
1142 : :
1143 : 82020 : static void emit_native_label_assign(emit_t *emit, mp_uint_t l) {
1144 : 82020 : DEBUG_printf("label_assign(" UINT_FMT ")\n", l);
1145 : :
1146 : 82020 : bool is_finally = false;
1147 [ + + ]: 82020 : if (emit->exc_stack_size > 0) {
1148 : 16131 : exc_stack_entry_t *e = &emit->exc_stack[emit->exc_stack_size - 1];
1149 [ + + + + ]: 16131 : is_finally = e->is_finally && e->label == l;
1150 : : }
1151 : :
1152 : 1152 : if (is_finally) {
1153 : : // Label is at start of finally handler: store TOS into exception slot
1154 : 1152 : vtype_kind_t vtype;
1155 : 1152 : emit_access_stack(emit, 1, &vtype, REG_TEMP0);
1156 : 1152 : ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_VAL(emit), REG_TEMP0);
1157 : : }
1158 : :
1159 : 82020 : emit_native_pre(emit);
1160 : : // need to commit stack because we can jump here from elsewhere
1161 : 82020 : need_stack_settled(emit);
1162 : 82020 : mp_asm_base_label_assign(&emit->as->base, l);
1163 : 82020 : emit_post(emit);
1164 : :
1165 [ + + ]: 82020 : if (is_finally) {
1166 : : // Label is at start of finally handler: pop exception stack
1167 : 1152 : emit_native_leave_exc_stack(emit, false);
1168 : : }
1169 : 82020 : }
1170 : :
1171 : 14795 : static void emit_native_global_exc_entry(emit_t *emit) {
1172 : : // Note: 4 labels are reserved for this function, starting at *emit->label_slot
1173 : :
1174 : 14795 : emit->exit_label = *emit->label_slot;
1175 : :
1176 [ + + + + ]: 14795 : if (NEED_GLOBAL_EXC_HANDLER(emit)) {
1177 : 10323 : mp_uint_t nlr_label = *emit->label_slot + 1;
1178 : 10323 : mp_uint_t start_label = *emit->label_slot + 2;
1179 : 10323 : mp_uint_t global_except_label = *emit->label_slot + 3;
1180 : :
1181 [ + + ]: 10323 : if (!(emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR)) {
1182 : : // Set new globals
1183 : 9274 : emit_native_mov_reg_state(emit, REG_ARG_1, LOCAL_IDX_FUN_OBJ(emit));
1184 : 9274 : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_ARG_1, REG_ARG_1, OFFSETOF_OBJ_FUN_BC_CONTEXT);
1185 : 9274 : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_ARG_1, REG_ARG_1, OFFSETOF_MODULE_CONTEXT_GLOBALS);
1186 : 9274 : emit_call(emit, MP_F_NATIVE_SWAP_GLOBALS);
1187 : :
1188 : : // Save old globals (or NULL if globals didn't change)
1189 : 9274 : emit_native_mov_state_reg(emit, LOCAL_IDX_OLD_GLOBALS(emit), REG_RET);
1190 : : }
1191 : :
1192 [ + + ]: 10323 : if (emit->scope->exc_stack_size == 0) {
1193 [ + + ]: 7650 : if (!(emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR)) {
1194 : : // Optimisation: if globals didn't change don't push the nlr context
1195 : 6895 : ASM_JUMP_IF_REG_ZERO(emit->as, REG_RET, start_label, false);
1196 : : }
1197 : :
1198 : : // Wrap everything in an nlr context
1199 : 7650 : ASM_MOV_REG_LOCAL_ADDR(emit->as, REG_ARG_1, 0);
1200 : 7650 : emit_call(emit, MP_F_NLR_PUSH);
1201 : : #if N_NLR_SETJMP
1202 : : ASM_MOV_REG_LOCAL_ADDR(emit->as, REG_ARG_1, 2);
1203 : : emit_call(emit, MP_F_SETJMP);
1204 : : #endif
1205 : 7650 : ASM_JUMP_IF_REG_ZERO(emit->as, REG_RET, start_label, true);
1206 : : } else {
1207 : : // Clear the unwind state
1208 : 2673 : ASM_CLR_REG(emit->as, REG_ZERO);
1209 : 2673 : ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_HANDLER_UNWIND(emit), REG_ZERO);
1210 : :
1211 : : // clear nlr.ret_val, because it's passed to mp_native_raise regardless
1212 : : // of whether there was an exception or not
1213 : 2673 : ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_VAL(emit), REG_ZERO);
1214 : :
1215 : : // Put PC of start code block into REG_LOCAL_1
1216 : 2673 : ASM_MOV_REG_PCREL(emit->as, REG_LOCAL_1, start_label);
1217 : :
1218 : : // Wrap everything in an nlr context
1219 : 2673 : emit_native_label_assign(emit, nlr_label);
1220 : 2673 : ASM_MOV_REG_LOCAL_ADDR(emit->as, REG_ARG_1, 0);
1221 : 2673 : emit_call(emit, MP_F_NLR_PUSH);
1222 : : #if N_NLR_SETJMP
1223 : : ASM_MOV_REG_LOCAL_ADDR(emit->as, REG_ARG_1, 2);
1224 : : emit_call(emit, MP_F_SETJMP);
1225 : : #endif
1226 : 2673 : ASM_JUMP_IF_REG_NONZERO(emit->as, REG_RET, global_except_label, true);
1227 : :
1228 : : // Clear PC of current code block, and jump there to resume execution
1229 : 2673 : ASM_CLR_REG(emit->as, REG_ZERO);
1230 : 2673 : ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_HANDLER_PC(emit), REG_ZERO);
1231 : 2673 : ASM_JUMP_REG(emit->as, REG_LOCAL_1);
1232 : :
1233 : : // Global exception handler: check for valid exception handler
1234 : 2673 : emit_native_label_assign(emit, global_except_label);
1235 : 2673 : ASM_MOV_REG_LOCAL(emit->as, REG_LOCAL_1, LOCAL_IDX_EXC_HANDLER_PC(emit));
1236 : 2673 : ASM_JUMP_IF_REG_NONZERO(emit->as, REG_LOCAL_1, nlr_label, false);
1237 : : }
1238 : :
1239 [ + + ]: 10323 : if (!(emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR)) {
1240 : : // Restore old globals
1241 : 9274 : emit_native_mov_reg_state(emit, REG_ARG_1, LOCAL_IDX_OLD_GLOBALS(emit));
1242 : 9274 : emit_call(emit, MP_F_NATIVE_SWAP_GLOBALS);
1243 : : }
1244 : :
1245 [ + + ]: 10323 : if (emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR) {
1246 : : // Store return value in state[0]
1247 : 1049 : ASM_MOV_REG_LOCAL(emit->as, REG_TEMP0, LOCAL_IDX_EXC_VAL(emit));
1248 : 1049 : ASM_STORE_REG_REG_OFFSET(emit->as, REG_TEMP0, REG_GENERATOR_STATE, OFFSETOF_CODE_STATE_STATE);
1249 : :
1250 : : // Load return kind
1251 : 1049 : ASM_MOV_REG_IMM(emit->as, REG_PARENT_RET, MP_VM_RETURN_EXCEPTION);
1252 : :
1253 : 1049 : ASM_EXIT(emit->as);
1254 : : } else {
1255 : : // Re-raise exception out to caller
1256 : 9274 : ASM_MOV_REG_LOCAL(emit->as, REG_ARG_1, LOCAL_IDX_EXC_VAL(emit));
1257 : 9274 : emit_call(emit, MP_F_NATIVE_RAISE);
1258 : : }
1259 : :
1260 : : // Label for start of function
1261 : 10323 : emit_native_label_assign(emit, start_label);
1262 : :
1263 [ + + ]: 10323 : if (emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR) {
1264 : 1049 : emit_native_mov_reg_state(emit, REG_TEMP0, LOCAL_IDX_GEN_PC(emit));
1265 : 1049 : ASM_JUMP_REG(emit->as, REG_TEMP0);
1266 : 1049 : emit->start_offset = mp_asm_base_get_code_pos(&emit->as->base);
1267 : :
1268 : : // This is the first entry of the generator
1269 : :
1270 : : // Check LOCAL_IDX_THROW_VAL for any injected value
1271 : 1049 : ASM_MOV_REG_LOCAL(emit->as, REG_ARG_1, LOCAL_IDX_THROW_VAL(emit));
1272 : 1049 : ASM_MOV_REG_IMM(emit->as, REG_ARG_2, (mp_uint_t)MP_OBJ_NULL);
1273 : 1049 : ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_THROW_VAL(emit), REG_ARG_2);
1274 : 1049 : emit_call(emit, MP_F_NATIVE_RAISE);
1275 : : }
1276 : : }
1277 : 14795 : }
1278 : :
1279 : 14779 : static void emit_native_global_exc_exit(emit_t *emit) {
1280 : : // Label for end of function
1281 : 14779 : emit_native_label_assign(emit, emit->exit_label);
1282 : :
1283 [ + + + + ]: 14779 : if (NEED_GLOBAL_EXC_HANDLER(emit)) {
1284 : : // Get old globals
1285 [ + + ]: 10311 : if (!(emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR)) {
1286 : 9270 : emit_native_mov_reg_state(emit, REG_ARG_1, LOCAL_IDX_OLD_GLOBALS(emit));
1287 : :
1288 [ + + ]: 9270 : if (emit->scope->exc_stack_size == 0) {
1289 : : // Optimisation: if globals didn't change then don't restore them and don't do nlr_pop
1290 : 6891 : ASM_JUMP_IF_REG_ZERO(emit->as, REG_ARG_1, emit->exit_label + 1, false);
1291 : : }
1292 : :
1293 : : // Restore old globals
1294 : 9270 : emit_call(emit, MP_F_NATIVE_SWAP_GLOBALS);
1295 : : }
1296 : :
1297 : : // Pop the nlr context
1298 : 10311 : emit_call(emit, MP_F_NLR_POP);
1299 : :
1300 [ + + ]: 10311 : if (!(emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR)) {
1301 [ + + ]: 9270 : if (emit->scope->exc_stack_size == 0) {
1302 : : // Destination label for above optimisation
1303 : 6891 : emit_native_label_assign(emit, emit->exit_label + 1);
1304 : : }
1305 : : }
1306 : :
1307 : : // Load return value
1308 : 10311 : ASM_MOV_REG_LOCAL(emit->as, REG_PARENT_RET, LOCAL_IDX_RET_VAL(emit));
1309 : : }
1310 : :
1311 : 14779 : ASM_EXIT(emit->as);
1312 : 14779 : }
1313 : :
1314 : 2604 : static void emit_native_import_name(emit_t *emit, qstr qst) {
1315 : 2604 : DEBUG_printf("import_name %s\n", qstr_str(qst));
1316 : :
1317 : : // get arguments from stack: arg2 = fromlist, arg3 = level
1318 : : // If using viper types these arguments must be converted to proper objects, and
1319 : : // to accomplish this viper types are turned off for the emit_pre_pop_reg_reg call.
1320 : 2604 : bool orig_do_viper_types = emit->do_viper_types;
1321 : 2604 : emit->do_viper_types = false;
1322 : 2604 : vtype_kind_t vtype_fromlist;
1323 : 2604 : vtype_kind_t vtype_level;
1324 : 2604 : emit_pre_pop_reg_reg(emit, &vtype_fromlist, REG_ARG_2, &vtype_level, REG_ARG_3);
1325 [ - + ]: 2604 : assert(vtype_fromlist == VTYPE_PYOBJ);
1326 [ - + ]: 2604 : assert(vtype_level == VTYPE_PYOBJ);
1327 : 2604 : emit->do_viper_types = orig_do_viper_types;
1328 : :
1329 : 2604 : emit_call_with_qstr_arg(emit, MP_F_IMPORT_NAME, qst, REG_ARG_1); // arg1 = import name
1330 : 2604 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
1331 : 2604 : }
1332 : :
1333 : 1062 : static void emit_native_import_from(emit_t *emit, qstr qst) {
1334 : 1062 : DEBUG_printf("import_from %s\n", qstr_str(qst));
1335 : 1062 : emit_native_pre(emit);
1336 : 1062 : vtype_kind_t vtype_module;
1337 : 1062 : emit_access_stack(emit, 1, &vtype_module, REG_ARG_1); // arg1 = module
1338 [ - + ]: 1062 : assert(vtype_module == VTYPE_PYOBJ);
1339 : 1062 : emit_call_with_qstr_arg(emit, MP_F_IMPORT_FROM, qst, REG_ARG_2); // arg2 = import name
1340 : 1062 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
1341 : 1062 : }
1342 : :
1343 : 123 : static void emit_native_import_star(emit_t *emit) {
1344 : 123 : DEBUG_printf("import_star\n");
1345 : 123 : vtype_kind_t vtype_module;
1346 : 123 : emit_pre_pop_reg(emit, &vtype_module, REG_ARG_1); // arg1 = module
1347 [ - + ]: 123 : assert(vtype_module == VTYPE_PYOBJ);
1348 : 123 : emit_call(emit, MP_F_IMPORT_ALL);
1349 : 123 : emit_post(emit);
1350 : 123 : }
1351 : :
1352 : 3789 : static void emit_native_import(emit_t *emit, qstr qst, int kind) {
1353 [ + + ]: 3789 : if (kind == MP_EMIT_IMPORT_NAME) {
1354 : 2604 : emit_native_import_name(emit, qst);
1355 [ + + ]: 1185 : } else if (kind == MP_EMIT_IMPORT_FROM) {
1356 : 1062 : emit_native_import_from(emit, qst);
1357 : : } else {
1358 : 123 : emit_native_import_star(emit);
1359 : : }
1360 : 3789 : }
1361 : :
1362 : 27014 : static void emit_native_load_const_tok(emit_t *emit, mp_token_kind_t tok) {
1363 : 27014 : DEBUG_printf("load_const_tok(tok=%u)\n", tok);
1364 [ + + ]: 27014 : if (tok == MP_TOKEN_ELLIPSIS) {
1365 : 12 : emit_native_load_const_obj(emit, MP_OBJ_FROM_PTR(&mp_const_ellipsis_obj));
1366 : : } else {
1367 : 27002 : emit_native_pre(emit);
1368 [ + + ]: 27002 : if (tok == MP_TOKEN_KW_NONE) {
1369 : 24890 : emit_post_push_imm(emit, VTYPE_PTR_NONE, 0);
1370 : : } else {
1371 : 2112 : emit_post_push_imm(emit, VTYPE_BOOL, tok == MP_TOKEN_KW_FALSE ? 0 : 1);
1372 : : }
1373 : : }
1374 : 27014 : }
1375 : :
1376 : 37686 : static void emit_native_load_const_small_int(emit_t *emit, mp_int_t arg) {
1377 : 37686 : DEBUG_printf("load_const_small_int(int=" INT_FMT ")\n", arg);
1378 : 37686 : emit_native_pre(emit);
1379 : 37686 : emit_post_push_imm(emit, VTYPE_INT, arg);
1380 : 37686 : }
1381 : :
1382 : 26142 : static void emit_native_load_const_str(emit_t *emit, qstr qst) {
1383 : 26142 : emit_native_pre(emit);
1384 : : // TODO: Eventually we want to be able to work with raw pointers in viper to
1385 : : // do native array access. For now we just load them as any other object.
1386 : : /*
1387 : : if (emit->do_viper_types) {
1388 : : // load a pointer to the asciiz string?
1389 : : emit_post_push_imm(emit, VTYPE_PTR, (mp_uint_t)qstr_str(qst));
1390 : : } else
1391 : : */
1392 : : {
1393 : 26142 : need_reg_single(emit, REG_TEMP0, 0);
1394 : 26142 : emit_native_mov_reg_qstr_obj(emit, REG_TEMP0, qst);
1395 : 26142 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_TEMP0);
1396 : : }
1397 : 26142 : }
1398 : :
1399 : 12108 : static void emit_native_load_const_obj(emit_t *emit, mp_obj_t obj) {
1400 : 12108 : emit_native_pre(emit);
1401 : 12108 : need_reg_single(emit, REG_TEMP0, 0);
1402 : 12108 : emit_load_reg_with_object(emit, REG_TEMP0, obj);
1403 : 12108 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_TEMP0);
1404 : 12108 : }
1405 : :
1406 : 1323 : static void emit_native_load_null(emit_t *emit) {
1407 : 1323 : emit_native_pre(emit);
1408 : 1323 : emit_post_push_imm(emit, VTYPE_PYOBJ, 0);
1409 : 1323 : }
1410 : :
1411 : 34737 : static void emit_native_load_fast(emit_t *emit, qstr qst, mp_uint_t local_num) {
1412 : 34737 : DEBUG_printf("load_fast(%s, " UINT_FMT ")\n", qstr_str(qst), local_num);
1413 : 34737 : vtype_kind_t vtype = emit->local_vtype[local_num];
1414 [ + + ]: 34737 : if (vtype == VTYPE_UNBOUND) {
1415 : 4 : EMIT_NATIVE_VIPER_TYPE_ERROR(emit, MP_ERROR_TEXT("local '%q' used before type known"), qst);
1416 : : }
1417 : 34737 : emit_native_pre(emit);
1418 [ + + + + : 34737 : if (local_num < MAX_REGS_FOR_LOCAL_VARS && CAN_USE_REGS_FOR_LOCALS(emit)) {
+ + ]
1419 : 18897 : emit_post_push_reg(emit, vtype, reg_local_table[local_num]);
1420 : : } else {
1421 : 15840 : need_reg_single(emit, REG_TEMP0, 0);
1422 : 15840 : emit_native_mov_reg_state(emit, REG_TEMP0, LOCAL_IDX_LOCAL_VAR(emit, local_num));
1423 : 15840 : emit_post_push_reg(emit, vtype, REG_TEMP0);
1424 : : }
1425 : 34737 : }
1426 : :
1427 : 1023 : static void emit_native_load_deref(emit_t *emit, qstr qst, mp_uint_t local_num) {
1428 : 1023 : DEBUG_printf("load_deref(%s, " UINT_FMT ")\n", qstr_str(qst), local_num);
1429 : 1023 : need_reg_single(emit, REG_RET, 0);
1430 : 1023 : emit_native_load_fast(emit, qst, local_num);
1431 : 1023 : vtype_kind_t vtype;
1432 : 1023 : int reg_base = REG_RET;
1433 : 1023 : emit_pre_pop_reg_flexible(emit, &vtype, ®_base, -1, -1);
1434 : 1023 : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_RET, reg_base, 1);
1435 : : // closed over vars are always Python objects
1436 : 1023 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
1437 : 1023 : }
1438 : :
1439 : 34302 : static void emit_native_load_local(emit_t *emit, qstr qst, mp_uint_t local_num, int kind) {
1440 [ + + ]: 34302 : if (kind == MP_EMIT_IDOP_LOCAL_FAST) {
1441 : 33279 : emit_native_load_fast(emit, qst, local_num);
1442 : : } else {
1443 : 1023 : emit_native_load_deref(emit, qst, local_num);
1444 : : }
1445 : 34302 : }
1446 : :
1447 : 87099 : static void emit_native_load_global(emit_t *emit, qstr qst, int kind) {
1448 : 87099 : MP_STATIC_ASSERT(MP_F_LOAD_NAME + MP_EMIT_IDOP_GLOBAL_NAME == MP_F_LOAD_NAME);
1449 : 87099 : MP_STATIC_ASSERT(MP_F_LOAD_NAME + MP_EMIT_IDOP_GLOBAL_GLOBAL == MP_F_LOAD_GLOBAL);
1450 : 87099 : emit_native_pre(emit);
1451 [ + + ]: 87099 : if (kind == MP_EMIT_IDOP_GLOBAL_NAME) {
1452 : : DEBUG_printf("load_name(%s)\n", qstr_str(qst));
1453 : : } else {
1454 : 20403 : DEBUG_printf("load_global(%s)\n", qstr_str(qst));
1455 [ + + ]: 20403 : if (emit->do_viper_types) {
1456 : : // check for builtin casting operators
1457 : 594 : int native_type = mp_native_type_from_qstr(qst);
1458 [ + + ]: 594 : if (native_type >= MP_NATIVE_TYPE_BOOL) {
1459 : 100 : emit_post_push_imm(emit, VTYPE_BUILTIN_CAST, native_type);
1460 : 100 : return;
1461 : : }
1462 : : }
1463 : : }
1464 : 86999 : emit_call_with_qstr_arg(emit, MP_F_LOAD_NAME + kind, qst, REG_ARG_1);
1465 : 86999 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
1466 : : }
1467 : :
1468 : 11133 : static void emit_native_load_attr(emit_t *emit, qstr qst) {
1469 : : // depends on type of subject:
1470 : : // - integer, function, pointer to integers: error
1471 : : // - pointer to structure: get member, quite easy
1472 : : // - Python object: call mp_load_attr, and needs to be typed to convert result
1473 : 11133 : vtype_kind_t vtype_base;
1474 : 11133 : emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1); // arg1 = base
1475 [ - + ]: 11133 : assert(vtype_base == VTYPE_PYOBJ);
1476 : 11133 : emit_call_with_qstr_arg(emit, MP_F_LOAD_ATTR, qst, REG_ARG_2); // arg2 = attribute name
1477 : 11133 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
1478 : 11133 : }
1479 : :
1480 : 17136 : static void emit_native_load_method(emit_t *emit, qstr qst, bool is_super) {
1481 : 17136 : DEBUG_printf("load_method(%s, %d)\n", qstr_str(qst), is_super);
1482 [ + + ]: 17136 : if (is_super) {
1483 : 63 : emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_2, 3); // arg2 = dest ptr
1484 : 63 : emit_get_stack_pointer_to_reg_for_push(emit, REG_ARG_2, 2); // arg2 = dest ptr
1485 : 63 : emit_call_with_qstr_arg(emit, MP_F_LOAD_SUPER_METHOD, qst, REG_ARG_1); // arg1 = method name
1486 : : } else {
1487 : 17073 : vtype_kind_t vtype_base;
1488 : 17073 : emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1); // arg1 = base
1489 [ - + ]: 17073 : assert(vtype_base == VTYPE_PYOBJ);
1490 : 17073 : emit_get_stack_pointer_to_reg_for_push(emit, REG_ARG_3, 2); // arg3 = dest ptr
1491 : 17073 : emit_call_with_qstr_arg(emit, MP_F_LOAD_METHOD, qst, REG_ARG_2); // arg2 = method name
1492 : : }
1493 : 17136 : }
1494 : :
1495 : 1662 : static void emit_native_load_build_class(emit_t *emit) {
1496 : 1662 : emit_native_pre(emit);
1497 : 1662 : emit_call(emit, MP_F_LOAD_BUILD_CLASS);
1498 : 1662 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
1499 : 1662 : }
1500 : :
1501 : 4253 : static void emit_native_load_subscr(emit_t *emit) {
1502 : 4253 : DEBUG_printf("load_subscr\n");
1503 : : // need to compile: base[index]
1504 : :
1505 : : // pop: index, base
1506 : : // optimise case where index is an immediate
1507 : 4253 : vtype_kind_t vtype_base = peek_vtype(emit, 1);
1508 : :
1509 [ + + ]: 4253 : if (vtype_base == VTYPE_PYOBJ) {
1510 : : // standard Python subscr
1511 : : // TODO factor this implicit cast code with other uses of it
1512 : 4107 : vtype_kind_t vtype_index = peek_vtype(emit, 0);
1513 [ + + ]: 4107 : if (vtype_index == VTYPE_PYOBJ) {
1514 : 4101 : emit_pre_pop_reg(emit, &vtype_index, REG_ARG_2);
1515 : : } else {
1516 : 6 : emit_pre_pop_reg(emit, &vtype_index, REG_ARG_1);
1517 : 6 : emit_call_with_imm_arg(emit, MP_F_CONVERT_NATIVE_TO_OBJ, vtype_index, REG_ARG_2); // arg2 = type
1518 : 6 : ASM_MOV_REG_REG(emit->as, REG_ARG_2, REG_RET);
1519 : : }
1520 : 4107 : emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1);
1521 : 4107 : emit_call_with_imm_arg(emit, MP_F_OBJ_SUBSCR, (mp_uint_t)MP_OBJ_SENTINEL, REG_ARG_3);
1522 : 4107 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
1523 : : } else {
1524 : : // viper load
1525 : : // TODO The different machine architectures have very different
1526 : : // capabilities and requirements for loads, so probably best to
1527 : : // write a completely separate load-optimiser for each one.
1528 : 146 : stack_info_t *top = peek_stack(emit, 0);
1529 [ + + ]: 146 : if (top->vtype == VTYPE_INT && top->kind == STACK_IMM) {
1530 : : // index is an immediate
1531 : 70 : mp_int_t index_value = top->data.u_imm;
1532 : 70 : emit_pre_pop_discard(emit); // discard index
1533 : 70 : int reg_base = REG_ARG_1;
1534 : 70 : int reg_index = REG_ARG_2;
1535 : 70 : emit_pre_pop_reg_flexible(emit, &vtype_base, ®_base, reg_index, reg_index);
1536 : 70 : need_reg_single(emit, REG_RET, 0);
1537 [ + + + + ]: 70 : switch (vtype_base) {
1538 : 42 : case VTYPE_PTR8: {
1539 : : // pointer to 8-bit memory
1540 : : // TODO optimise to use thumb ldrb r1, [r2, r3]
1541 [ + + ]: 42 : if (index_value != 0) {
1542 : : // index is non-zero
1543 : : #if N_THUMB
1544 : : if (index_value > 0 && index_value < 32) {
1545 : : asm_thumb_ldrb_rlo_rlo_i5(emit->as, REG_RET, reg_base, index_value);
1546 : : break;
1547 : : }
1548 : : #elif N_RV32
1549 : : if (FIT_SIGNED(index_value, 12)) {
1550 : : asm_rv32_opcode_lbu(emit->as, REG_RET, reg_base, index_value);
1551 : : break;
1552 : : }
1553 : : #elif N_XTENSA || N_XTENSAWIN
1554 : : if (index_value > 0 && index_value < 256) {
1555 : : asm_xtensa_op_l8ui(emit->as, REG_RET, reg_base, index_value);
1556 : : break;
1557 : : }
1558 : : #endif
1559 : 30 : need_reg_single(emit, reg_index, 0);
1560 : 30 : ASM_MOV_REG_IMM(emit->as, reg_index, index_value);
1561 : 30 : ASM_ADD_REG_REG(emit->as, reg_index, reg_base); // add index to base
1562 : 30 : reg_base = reg_index;
1563 : : }
1564 : 42 : ASM_LOAD8_REG_REG(emit->as, REG_RET, reg_base); // load from (base+index)
1565 : 42 : break;
1566 : : }
1567 : 12 : case VTYPE_PTR16: {
1568 : : // pointer to 16-bit memory
1569 [ + + ]: 12 : if (index_value != 0) {
1570 : : // index is a non-zero immediate
1571 : : #if N_THUMB
1572 : : if (index_value > 0 && index_value < 32) {
1573 : : asm_thumb_ldrh_rlo_rlo_i5(emit->as, REG_RET, reg_base, index_value);
1574 : : break;
1575 : : }
1576 : : #elif N_RV32
1577 : : if (FIT_SIGNED(index_value, 11)) {
1578 : : asm_rv32_opcode_lhu(emit->as, REG_RET, reg_base, index_value << 1);
1579 : : break;
1580 : : }
1581 : : #elif N_XTENSA || N_XTENSAWIN
1582 : : if (index_value > 0 && index_value < 256) {
1583 : : asm_xtensa_op_l16ui(emit->as, REG_RET, reg_base, index_value);
1584 : : break;
1585 : : }
1586 : : #endif
1587 : 6 : need_reg_single(emit, reg_index, 0);
1588 : 6 : ASM_MOV_REG_IMM(emit->as, reg_index, index_value << 1);
1589 : 6 : ASM_ADD_REG_REG(emit->as, reg_index, reg_base); // add 2*index to base
1590 : 6 : reg_base = reg_index;
1591 : : }
1592 : 12 : ASM_LOAD16_REG_REG(emit->as, REG_RET, reg_base); // load from (base+2*index)
1593 : 12 : break;
1594 : : }
1595 : 12 : case VTYPE_PTR32: {
1596 : : // pointer to 32-bit memory
1597 [ + + ]: 12 : if (index_value != 0) {
1598 : : // index is a non-zero immediate
1599 : : #if N_THUMB
1600 : : if (index_value > 0 && index_value < 32) {
1601 : : asm_thumb_ldr_rlo_rlo_i5(emit->as, REG_RET, reg_base, index_value);
1602 : : break;
1603 : : }
1604 : : #elif N_RV32
1605 : : if (FIT_SIGNED(index_value, 10)) {
1606 : : asm_rv32_opcode_lw(emit->as, REG_RET, reg_base, index_value << 2);
1607 : : break;
1608 : : }
1609 : : #elif N_XTENSA || N_XTENSAWIN
1610 : : if (index_value > 0 && index_value < 256) {
1611 : : asm_xtensa_l32i_optimised(emit->as, REG_RET, reg_base, index_value);
1612 : : break;
1613 : : }
1614 : : #endif
1615 : 6 : need_reg_single(emit, reg_index, 0);
1616 : 6 : ASM_MOV_REG_IMM(emit->as, reg_index, index_value << 2);
1617 : 6 : ASM_ADD_REG_REG(emit->as, reg_index, reg_base); // add 4*index to base
1618 : 6 : reg_base = reg_index;
1619 : : }
1620 : 12 : ASM_LOAD32_REG_REG(emit->as, REG_RET, reg_base); // load from (base+4*index)
1621 : 12 : break;
1622 : : }
1623 : 4 : default:
1624 : 70 : EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
1625 : : MP_ERROR_TEXT("can't load from '%q'"), vtype_to_qstr(vtype_base));
1626 : : }
1627 : : } else {
1628 : : // index is not an immediate
1629 : 76 : vtype_kind_t vtype_index;
1630 : 76 : int reg_index = REG_ARG_2;
1631 : 76 : emit_pre_pop_reg_flexible(emit, &vtype_index, ®_index, REG_ARG_1, REG_ARG_1);
1632 : 76 : emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1);
1633 : 76 : need_reg_single(emit, REG_RET, 0);
1634 [ + + ]: 76 : if (vtype_index != VTYPE_INT && vtype_index != VTYPE_UINT) {
1635 : 4 : EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
1636 : : MP_ERROR_TEXT("can't load with '%q' index"), vtype_to_qstr(vtype_index));
1637 : : }
1638 [ + + + + ]: 76 : switch (vtype_base) {
1639 : 48 : case VTYPE_PTR8: {
1640 : : // pointer to 8-bit memory
1641 : : // TODO optimise to use thumb ldrb r1, [r2, r3]
1642 : 48 : ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
1643 : 48 : ASM_LOAD8_REG_REG(emit->as, REG_RET, REG_ARG_1); // store value to (base+index)
1644 : 48 : break;
1645 : : }
1646 : 12 : case VTYPE_PTR16: {
1647 : : // pointer to 16-bit memory
1648 : : #if N_XTENSA || N_XTENSAWIN
1649 : : asm_xtensa_op_addx2(emit->as, REG_ARG_1, reg_index, REG_ARG_1);
1650 : : asm_xtensa_op_l16ui(emit->as, REG_RET, REG_ARG_1, 0);
1651 : : break;
1652 : : #endif
1653 : 12 : ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
1654 : 12 : ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
1655 : 12 : ASM_LOAD16_REG_REG(emit->as, REG_RET, REG_ARG_1); // load from (base+2*index)
1656 : 12 : break;
1657 : : }
1658 : 12 : case VTYPE_PTR32: {
1659 : : // pointer to word-size memory
1660 : : #if N_RV32
1661 : : asm_rv32_opcode_slli(emit->as, REG_TEMP2, reg_index, 2);
1662 : : asm_rv32_opcode_cadd(emit->as, REG_ARG_1, REG_TEMP2);
1663 : : asm_rv32_opcode_lw(emit->as, REG_RET, REG_ARG_1, 0);
1664 : : break;
1665 : : #elif N_XTENSA || N_XTENSAWIN
1666 : : asm_xtensa_op_addx4(emit->as, REG_ARG_1, reg_index, REG_ARG_1);
1667 : : asm_xtensa_op_l32i_n(emit->as, REG_RET, REG_ARG_1, 0);
1668 : : break;
1669 : : #endif
1670 : 12 : ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
1671 : 12 : ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
1672 : 12 : ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
1673 : 12 : ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
1674 : 12 : ASM_LOAD32_REG_REG(emit->as, REG_RET, REG_ARG_1); // load from (base+4*index)
1675 : 12 : break;
1676 : : }
1677 : 4 : default:
1678 : 76 : EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
1679 : : MP_ERROR_TEXT("can't load from '%q'"), vtype_to_qstr(vtype_base));
1680 : : }
1681 : : }
1682 : 146 : emit_post_push_reg(emit, VTYPE_INT, REG_RET);
1683 : : }
1684 : 4253 : }
1685 : :
1686 : 8293 : static void emit_native_store_fast(emit_t *emit, qstr qst, mp_uint_t local_num) {
1687 : 8293 : vtype_kind_t vtype;
1688 [ + + + + : 8293 : if (local_num < MAX_REGS_FOR_LOCAL_VARS && CAN_USE_REGS_FOR_LOCALS(emit)) {
+ + ]
1689 : 1690 : emit_pre_pop_reg(emit, &vtype, reg_local_table[local_num]);
1690 : : } else {
1691 : 6603 : emit_pre_pop_reg(emit, &vtype, REG_TEMP0);
1692 : 6603 : emit_native_mov_state_reg(emit, LOCAL_IDX_LOCAL_VAR(emit, local_num), REG_TEMP0);
1693 : : }
1694 : 8293 : emit_post(emit);
1695 : :
1696 : : // check types
1697 [ + + ]: 8293 : if (emit->local_vtype[local_num] == VTYPE_UNBOUND) {
1698 : : // first time this local is assigned, so give it a type of the object stored in it
1699 : 312 : emit->local_vtype[local_num] = vtype;
1700 [ + + ]: 7981 : } else if (emit->local_vtype[local_num] != vtype) {
1701 : : // type of local is not the same as object stored in it
1702 : 4 : EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
1703 : : MP_ERROR_TEXT("local '%q' has type '%q' but source is '%q'"),
1704 : : qst, vtype_to_qstr(emit->local_vtype[local_num]), vtype_to_qstr(vtype));
1705 : : }
1706 : 8293 : }
1707 : :
1708 : 435 : static void emit_native_store_deref(emit_t *emit, qstr qst, mp_uint_t local_num) {
1709 : 435 : DEBUG_printf("store_deref(%s, " UINT_FMT ")\n", qstr_str(qst), local_num);
1710 : 435 : need_reg_single(emit, REG_TEMP0, 0);
1711 : 435 : need_reg_single(emit, REG_TEMP1, 0);
1712 : 435 : emit_native_load_fast(emit, qst, local_num);
1713 : 435 : vtype_kind_t vtype;
1714 : 435 : int reg_base = REG_TEMP0;
1715 : 435 : emit_pre_pop_reg_flexible(emit, &vtype, ®_base, -1, -1);
1716 : 435 : int reg_src = REG_TEMP1;
1717 : 435 : emit_pre_pop_reg_flexible(emit, &vtype, ®_src, reg_base, reg_base);
1718 : 435 : ASM_STORE_REG_REG_OFFSET(emit->as, reg_src, reg_base, 1);
1719 : 435 : emit_post(emit);
1720 : 435 : }
1721 : :
1722 : 8359 : static void emit_native_store_local(emit_t *emit, qstr qst, mp_uint_t local_num, int kind) {
1723 [ + + ]: 8359 : if (kind == MP_EMIT_IDOP_LOCAL_FAST) {
1724 : 7924 : emit_native_store_fast(emit, qst, local_num);
1725 : : } else {
1726 : 435 : emit_native_store_deref(emit, qst, local_num);
1727 : : }
1728 : 8359 : }
1729 : :
1730 : 25146 : static void emit_native_store_global(emit_t *emit, qstr qst, int kind) {
1731 : 25146 : MP_STATIC_ASSERT(MP_F_STORE_NAME + MP_EMIT_IDOP_GLOBAL_NAME == MP_F_STORE_NAME);
1732 : 25146 : MP_STATIC_ASSERT(MP_F_STORE_NAME + MP_EMIT_IDOP_GLOBAL_GLOBAL == MP_F_STORE_GLOBAL);
1733 [ + + ]: 25146 : if (kind == MP_EMIT_IDOP_GLOBAL_NAME) {
1734 : : // mp_store_name, but needs conversion of object (maybe have mp_viper_store_name(obj, type))
1735 : 24180 : vtype_kind_t vtype;
1736 : 24180 : emit_pre_pop_reg(emit, &vtype, REG_ARG_2);
1737 [ - + ]: 24180 : assert(vtype == VTYPE_PYOBJ);
1738 : : } else {
1739 : 966 : vtype_kind_t vtype = peek_vtype(emit, 0);
1740 [ + + ]: 966 : if (vtype == VTYPE_PYOBJ) {
1741 : 960 : emit_pre_pop_reg(emit, &vtype, REG_ARG_2);
1742 : : } else {
1743 : 6 : emit_pre_pop_reg(emit, &vtype, REG_ARG_1);
1744 : 6 : emit_call_with_imm_arg(emit, MP_F_CONVERT_NATIVE_TO_OBJ, vtype, REG_ARG_2); // arg2 = type
1745 : 6 : ASM_MOV_REG_REG(emit->as, REG_ARG_2, REG_RET);
1746 : : }
1747 : : }
1748 : 25146 : emit_call_with_qstr_arg(emit, MP_F_STORE_NAME + kind, qst, REG_ARG_1); // arg1 = name
1749 : 25146 : emit_post(emit);
1750 : 25146 : }
1751 : :
1752 : 2835 : static void emit_native_store_attr(emit_t *emit, qstr qst) {
1753 : 2835 : vtype_kind_t vtype_base;
1754 : 2835 : vtype_kind_t vtype_val = peek_vtype(emit, 1);
1755 [ + + ]: 2835 : if (vtype_val == VTYPE_PYOBJ) {
1756 : 2823 : emit_pre_pop_reg_reg(emit, &vtype_base, REG_ARG_1, &vtype_val, REG_ARG_3); // arg1 = base, arg3 = value
1757 : : } else {
1758 : 12 : emit_access_stack(emit, 2, &vtype_val, REG_ARG_1); // arg1 = value
1759 : 12 : emit_call_with_imm_arg(emit, MP_F_CONVERT_NATIVE_TO_OBJ, vtype_val, REG_ARG_2); // arg2 = type
1760 : 12 : ASM_MOV_REG_REG(emit->as, REG_ARG_3, REG_RET); // arg3 = value (converted)
1761 : 12 : emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1); // arg1 = base
1762 : 12 : adjust_stack(emit, -1); // pop value
1763 : : }
1764 [ - + ]: 2835 : assert(vtype_base == VTYPE_PYOBJ);
1765 : 2835 : emit_call_with_qstr_arg(emit, MP_F_STORE_ATTR, qst, REG_ARG_2); // arg2 = attribute name
1766 : 2835 : emit_post(emit);
1767 : 2835 : }
1768 : :
1769 : 1532 : static void emit_native_store_subscr(emit_t *emit) {
1770 : 1532 : DEBUG_printf("store_subscr\n");
1771 : : // need to compile: base[index] = value
1772 : :
1773 : : // pop: index, base, value
1774 : : // optimise case where index is an immediate
1775 : 1532 : vtype_kind_t vtype_base = peek_vtype(emit, 1);
1776 : :
1777 [ + + ]: 1532 : if (vtype_base == VTYPE_PYOBJ) {
1778 : : // standard Python subscr
1779 : 1404 : vtype_kind_t vtype_index = peek_vtype(emit, 0);
1780 : 1404 : vtype_kind_t vtype_value = peek_vtype(emit, 2);
1781 [ + + ]: 1404 : if (vtype_index != VTYPE_PYOBJ || vtype_value != VTYPE_PYOBJ) {
1782 : : // need to implicitly convert non-objects to objects
1783 : : // TODO do this properly
1784 : 6 : emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_1, 3);
1785 : 6 : adjust_stack(emit, 3);
1786 : : }
1787 : 1404 : emit_pre_pop_reg_reg_reg(emit, &vtype_index, REG_ARG_2, &vtype_base, REG_ARG_1, &vtype_value, REG_ARG_3);
1788 : 1404 : emit_call(emit, MP_F_OBJ_SUBSCR);
1789 : : } else {
1790 : : // viper store
1791 : : // TODO The different machine architectures have very different
1792 : : // capabilities and requirements for stores, so probably best to
1793 : : // write a completely separate store-optimiser for each one.
1794 : 128 : stack_info_t *top = peek_stack(emit, 0);
1795 [ + + ]: 128 : if (top->vtype == VTYPE_INT && top->kind == STACK_IMM) {
1796 : : // index is an immediate
1797 : 48 : mp_int_t index_value = top->data.u_imm;
1798 : 48 : emit_pre_pop_discard(emit); // discard index
1799 : 48 : vtype_kind_t vtype_value;
1800 : 48 : int reg_base = REG_ARG_1;
1801 : 48 : int reg_index = REG_ARG_2;
1802 : 48 : int reg_value = REG_ARG_3;
1803 : 48 : emit_pre_pop_reg_flexible(emit, &vtype_base, ®_base, reg_index, reg_value);
1804 : : #if N_X64 || N_X86
1805 : : // special case: x86 needs byte stores to be from lower 4 regs (REG_ARG_3 is EDX)
1806 : 48 : emit_pre_pop_reg(emit, &vtype_value, reg_value);
1807 : : #else
1808 : : emit_pre_pop_reg_flexible(emit, &vtype_value, ®_value, reg_base, reg_index);
1809 : : #endif
1810 [ + + ]: 48 : if (vtype_value != VTYPE_BOOL && vtype_value != VTYPE_INT && vtype_value != VTYPE_UINT) {
1811 : 4 : EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
1812 : : MP_ERROR_TEXT("can't store '%q'"), vtype_to_qstr(vtype_value));
1813 : : }
1814 [ + + + + ]: 48 : switch (vtype_base) {
1815 : 12 : case VTYPE_PTR8: {
1816 : : // pointer to 8-bit memory
1817 : : // TODO optimise to use thumb strb r1, [r2, r3]
1818 [ + + ]: 12 : if (index_value != 0) {
1819 : : // index is non-zero
1820 : : #if N_THUMB
1821 : : if (index_value > 0 && index_value < 32) {
1822 : : asm_thumb_strb_rlo_rlo_i5(emit->as, reg_value, reg_base, index_value);
1823 : : break;
1824 : : }
1825 : : #elif N_RV32
1826 : : if (FIT_SIGNED(index_value, 12)) {
1827 : : asm_rv32_opcode_sb(emit->as, reg_value, reg_base, index_value);
1828 : : break;
1829 : : }
1830 : : #elif N_XTENSA || N_XTENSAWIN
1831 : : if (index_value > 0 && index_value < 256) {
1832 : : asm_xtensa_op_s8i(emit->as, REG_RET, reg_base, index_value);
1833 : : break;
1834 : : }
1835 : : #endif
1836 : 6 : ASM_MOV_REG_IMM(emit->as, reg_index, index_value);
1837 : : #if N_ARM
1838 : : asm_arm_strb_reg_reg_reg(emit->as, reg_value, reg_base, reg_index);
1839 : : return;
1840 : : #endif
1841 : 6 : ASM_ADD_REG_REG(emit->as, reg_index, reg_base); // add index to base
1842 : 6 : reg_base = reg_index;
1843 : : }
1844 : 12 : ASM_STORE8_REG_REG(emit->as, reg_value, reg_base); // store value to (base+index)
1845 : 12 : break;
1846 : : }
1847 : 12 : case VTYPE_PTR16: {
1848 : : // pointer to 16-bit memory
1849 [ + + ]: 12 : if (index_value != 0) {
1850 : : // index is a non-zero immediate
1851 : : #if N_THUMB
1852 : : if (index_value > 0 && index_value < 32) {
1853 : : asm_thumb_strh_rlo_rlo_i5(emit->as, reg_value, reg_base, index_value);
1854 : : break;
1855 : : }
1856 : : #elif N_RV32
1857 : : if (FIT_SIGNED(index_value, 11)) {
1858 : : asm_rv32_opcode_sh(emit->as, reg_value, reg_base, index_value << 1);
1859 : : break;
1860 : : }
1861 : : #elif N_XTENSA || N_XTENSAWIN
1862 : : if (index_value > 0 && index_value < 256) {
1863 : : asm_xtensa_op_s16i(emit->as, REG_RET, reg_base, index_value);
1864 : : break;
1865 : : }
1866 : : #endif
1867 : 6 : ASM_MOV_REG_IMM(emit->as, reg_index, index_value << 1);
1868 : 6 : ASM_ADD_REG_REG(emit->as, reg_index, reg_base); // add 2*index to base
1869 : 6 : reg_base = reg_index;
1870 : : }
1871 : 12 : ASM_STORE16_REG_REG(emit->as, reg_value, reg_base); // store value to (base+2*index)
1872 : 12 : break;
1873 : : }
1874 : 16 : case VTYPE_PTR32: {
1875 : : // pointer to 32-bit memory
1876 [ + + ]: 16 : if (index_value != 0) {
1877 : : // index is a non-zero immediate
1878 : : #if N_THUMB
1879 : : if (index_value > 0 && index_value < 32) {
1880 : : asm_thumb_str_rlo_rlo_i5(emit->as, reg_value, reg_base, index_value);
1881 : : break;
1882 : : }
1883 : : #elif N_RV32
1884 : : if (FIT_SIGNED(index_value, 10)) {
1885 : : asm_rv32_opcode_sw(emit->as, reg_value, reg_base, index_value << 2);
1886 : : break;
1887 : : }
1888 : : #elif N_XTENSA || N_XTENSAWIN
1889 : : if (index_value > 0 && index_value < 256) {
1890 : : asm_xtensa_s32i_optimised(emit->as, REG_RET, reg_base, index_value);
1891 : : break;
1892 : : }
1893 : : #elif N_ARM
1894 : : ASM_MOV_REG_IMM(emit->as, reg_index, index_value);
1895 : : asm_arm_str_reg_reg_reg(emit->as, reg_value, reg_base, reg_index);
1896 : : return;
1897 : : #endif
1898 : 6 : ASM_MOV_REG_IMM(emit->as, reg_index, index_value << 2);
1899 : 6 : ASM_ADD_REG_REG(emit->as, reg_index, reg_base); // add 4*index to base
1900 : 6 : reg_base = reg_index;
1901 : : }
1902 : 16 : ASM_STORE32_REG_REG(emit->as, reg_value, reg_base); // store value to (base+4*index)
1903 : 16 : break;
1904 : : }
1905 : 8 : default:
1906 : 48 : EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
1907 : : MP_ERROR_TEXT("can't store to '%q'"), vtype_to_qstr(vtype_base));
1908 : : }
1909 : : } else {
1910 : : // index is not an immediate
1911 : 80 : vtype_kind_t vtype_index, vtype_value;
1912 : 80 : int reg_index = REG_ARG_2;
1913 : 80 : int reg_value = REG_ARG_3;
1914 : 80 : emit_pre_pop_reg_flexible(emit, &vtype_index, ®_index, REG_ARG_1, reg_value);
1915 : 80 : emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1);
1916 [ + + ]: 80 : if (vtype_index != VTYPE_INT && vtype_index != VTYPE_UINT) {
1917 : 8 : EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
1918 : : MP_ERROR_TEXT("can't store with '%q' index"), vtype_to_qstr(vtype_index));
1919 : : }
1920 : : #if N_X64 || N_X86
1921 : : // special case: x86 needs byte stores to be from lower 4 regs (REG_ARG_3 is EDX)
1922 : 80 : emit_pre_pop_reg(emit, &vtype_value, reg_value);
1923 : : #else
1924 : : emit_pre_pop_reg_flexible(emit, &vtype_value, ®_value, REG_ARG_1, reg_index);
1925 : : #endif
1926 [ + + ]: 80 : if (vtype_value != VTYPE_BOOL && vtype_value != VTYPE_INT && vtype_value != VTYPE_UINT) {
1927 : 4 : EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
1928 : : MP_ERROR_TEXT("can't store '%q'"), vtype_to_qstr(vtype_value));
1929 : : }
1930 [ + + + + ]: 80 : switch (vtype_base) {
1931 : 48 : case VTYPE_PTR8: {
1932 : : // pointer to 8-bit memory
1933 : : // TODO optimise to use thumb strb r1, [r2, r3]
1934 : : #if N_ARM
1935 : : asm_arm_strb_reg_reg_reg(emit->as, reg_value, REG_ARG_1, reg_index);
1936 : : break;
1937 : : #endif
1938 : 48 : ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
1939 : 48 : ASM_STORE8_REG_REG(emit->as, reg_value, REG_ARG_1); // store value to (base+index)
1940 : 48 : break;
1941 : : }
1942 : 12 : case VTYPE_PTR16: {
1943 : : // pointer to 16-bit memory
1944 : : #if N_ARM
1945 : : asm_arm_strh_reg_reg_reg(emit->as, reg_value, REG_ARG_1, reg_index);
1946 : : break;
1947 : : #elif N_XTENSA || N_XTENSAWIN
1948 : : asm_xtensa_op_addx2(emit->as, REG_ARG_1, reg_index, REG_ARG_1);
1949 : : asm_xtensa_op_s16i(emit->as, reg_value, REG_ARG_1, 0);
1950 : : break;
1951 : : #endif
1952 : 12 : ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
1953 : 12 : ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
1954 : 12 : ASM_STORE16_REG_REG(emit->as, reg_value, REG_ARG_1); // store value to (base+2*index)
1955 : 12 : break;
1956 : : }
1957 : 16 : case VTYPE_PTR32: {
1958 : : // pointer to 32-bit memory
1959 : : #if N_ARM
1960 : : asm_arm_str_reg_reg_reg(emit->as, reg_value, REG_ARG_1, reg_index);
1961 : : break;
1962 : : #elif N_RV32
1963 : : asm_rv32_opcode_slli(emit->as, REG_TEMP2, reg_index, 2);
1964 : : asm_rv32_opcode_cadd(emit->as, REG_ARG_1, REG_TEMP2);
1965 : : asm_rv32_opcode_sw(emit->as, reg_value, REG_ARG_1, 0);
1966 : : break;
1967 : : #elif N_XTENSA || N_XTENSAWIN
1968 : : asm_xtensa_op_addx4(emit->as, REG_ARG_1, reg_index, REG_ARG_1);
1969 : : asm_xtensa_op_s32i_n(emit->as, reg_value, REG_ARG_1, 0);
1970 : : break;
1971 : : #endif
1972 : 16 : ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
1973 : 16 : ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
1974 : 16 : ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
1975 : 16 : ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
1976 : 16 : ASM_STORE32_REG_REG(emit->as, reg_value, REG_ARG_1); // store value to (base+4*index)
1977 : 16 : break;
1978 : : }
1979 : 4 : default:
1980 : 80 : EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
1981 : : MP_ERROR_TEXT("can't store to '%q'"), vtype_to_qstr(vtype_base));
1982 : : }
1983 : : }
1984 : :
1985 : : }
1986 : 1532 : }
1987 : :
1988 : 369 : static void emit_native_delete_local(emit_t *emit, qstr qst, mp_uint_t local_num, int kind) {
1989 [ + - ]: 369 : if (kind == MP_EMIT_IDOP_LOCAL_FAST) {
1990 : : // TODO: This is not compliant implementation. We could use MP_OBJ_SENTINEL
1991 : : // to mark deleted vars but then every var would need to be checked on
1992 : : // each access. Very inefficient, so just set value to None to enable GC.
1993 : 369 : emit_native_load_const_tok(emit, MP_TOKEN_KW_NONE);
1994 : 369 : emit_native_store_fast(emit, qst, local_num);
1995 : : } else {
1996 : : // TODO implement me!
1997 : 369 : }
1998 : 369 : }
1999 : :
2000 : 360 : static void emit_native_delete_global(emit_t *emit, qstr qst, int kind) {
2001 : 360 : MP_STATIC_ASSERT(MP_F_DELETE_NAME + MP_EMIT_IDOP_GLOBAL_NAME == MP_F_DELETE_NAME);
2002 : 360 : MP_STATIC_ASSERT(MP_F_DELETE_NAME + MP_EMIT_IDOP_GLOBAL_GLOBAL == MP_F_DELETE_GLOBAL);
2003 : 360 : emit_native_pre(emit);
2004 : 360 : emit_call_with_qstr_arg(emit, MP_F_DELETE_NAME + kind, qst, REG_ARG_1);
2005 : 360 : emit_post(emit);
2006 : 360 : }
2007 : :
2008 : 48 : static void emit_native_delete_attr(emit_t *emit, qstr qst) {
2009 : 48 : vtype_kind_t vtype_base;
2010 : 48 : emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1); // arg1 = base
2011 [ - + ]: 48 : assert(vtype_base == VTYPE_PYOBJ);
2012 : 48 : ASM_XOR_REG_REG(emit->as, REG_ARG_3, REG_ARG_3); // arg3 = value (null for delete)
2013 : 48 : emit_call_with_qstr_arg(emit, MP_F_STORE_ATTR, qst, REG_ARG_2); // arg2 = attribute name
2014 : 48 : emit_post(emit);
2015 : 48 : }
2016 : :
2017 : 189 : static void emit_native_delete_subscr(emit_t *emit) {
2018 : 189 : vtype_kind_t vtype_index, vtype_base;
2019 : 189 : emit_pre_pop_reg_reg(emit, &vtype_index, REG_ARG_2, &vtype_base, REG_ARG_1); // index, base
2020 [ - + ]: 189 : assert(vtype_index == VTYPE_PYOBJ);
2021 [ - + ]: 189 : assert(vtype_base == VTYPE_PYOBJ);
2022 : 189 : emit_call_with_imm_arg(emit, MP_F_OBJ_SUBSCR, (mp_uint_t)MP_OBJ_NULL, REG_ARG_3);
2023 : 189 : }
2024 : :
2025 : 5974 : static void emit_native_subscr(emit_t *emit, int kind) {
2026 [ + + ]: 5974 : if (kind == MP_EMIT_SUBSCR_LOAD) {
2027 : 4253 : emit_native_load_subscr(emit);
2028 [ + + ]: 1721 : } else if (kind == MP_EMIT_SUBSCR_STORE) {
2029 : 1532 : emit_native_store_subscr(emit);
2030 : : } else {
2031 : 189 : emit_native_delete_subscr(emit);
2032 : : }
2033 : 5974 : }
2034 : :
2035 : 14016 : static void emit_native_attr(emit_t *emit, qstr qst, int kind) {
2036 [ + + ]: 14016 : if (kind == MP_EMIT_ATTR_LOAD) {
2037 : 11133 : emit_native_load_attr(emit, qst);
2038 [ + + ]: 2883 : } else if (kind == MP_EMIT_ATTR_STORE) {
2039 : 2835 : emit_native_store_attr(emit, qst);
2040 : : } else {
2041 : 48 : emit_native_delete_attr(emit, qst);
2042 : : }
2043 : 14016 : }
2044 : :
2045 : 6714 : static void emit_native_dup_top(emit_t *emit) {
2046 : 6714 : DEBUG_printf("dup_top\n");
2047 : 6714 : vtype_kind_t vtype;
2048 : 6714 : int reg = REG_TEMP0;
2049 : 6714 : emit_pre_pop_reg_flexible(emit, &vtype, ®, -1, -1);
2050 : 6714 : emit_post_push_reg_reg(emit, vtype, reg, vtype, reg);
2051 : 6714 : }
2052 : :
2053 : 456 : static void emit_native_dup_top_two(emit_t *emit) {
2054 : 456 : vtype_kind_t vtype0, vtype1;
2055 : 456 : emit_pre_pop_reg_reg(emit, &vtype0, REG_TEMP0, &vtype1, REG_TEMP1);
2056 : 456 : emit_post_push_reg_reg_reg_reg(emit, vtype1, REG_TEMP1, vtype0, REG_TEMP0, vtype1, REG_TEMP1, vtype0, REG_TEMP0);
2057 : 456 : }
2058 : :
2059 : 44257 : static void emit_native_pop_top(emit_t *emit) {
2060 : 44257 : DEBUG_printf("pop_top\n");
2061 : 44257 : emit_pre_pop_discard(emit);
2062 : 44257 : emit_post(emit);
2063 : 44257 : }
2064 : :
2065 : 894 : static void emit_native_rot_two(emit_t *emit) {
2066 : 894 : DEBUG_printf("rot_two\n");
2067 : 894 : vtype_kind_t vtype0, vtype1;
2068 : 894 : emit_pre_pop_reg_reg(emit, &vtype0, REG_TEMP0, &vtype1, REG_TEMP1);
2069 : 894 : emit_post_push_reg_reg(emit, vtype0, REG_TEMP0, vtype1, REG_TEMP1);
2070 : 894 : }
2071 : :
2072 : 339 : static void emit_native_rot_three(emit_t *emit) {
2073 : 339 : DEBUG_printf("rot_three\n");
2074 : 339 : vtype_kind_t vtype0, vtype1, vtype2;
2075 : 339 : emit_pre_pop_reg_reg_reg(emit, &vtype0, REG_TEMP0, &vtype1, REG_TEMP1, &vtype2, REG_TEMP2);
2076 : 339 : emit_post_push_reg_reg_reg(emit, vtype0, REG_TEMP0, vtype2, REG_TEMP2, vtype1, REG_TEMP1);
2077 : 339 : }
2078 : :
2079 : 35675 : static void emit_native_jump(emit_t *emit, mp_uint_t label) {
2080 : 35675 : DEBUG_printf("jump(label=" UINT_FMT ")\n", label);
2081 : 35675 : emit_native_pre(emit);
2082 : : // need to commit stack because we are jumping elsewhere
2083 : 35675 : need_stack_settled(emit);
2084 : 35675 : ASM_JUMP(emit->as, label);
2085 : 35675 : emit_post(emit);
2086 : 35675 : mp_asm_base_suppress_code(&emit->as->base);
2087 : 35675 : }
2088 : :
2089 : 12544 : static void emit_native_jump_helper(emit_t *emit, bool cond, mp_uint_t label, bool pop) {
2090 : 12544 : vtype_kind_t vtype = peek_vtype(emit, 0);
2091 [ + + ]: 12544 : if (vtype == VTYPE_PYOBJ) {
2092 : 12276 : emit_pre_pop_reg(emit, &vtype, REG_ARG_1);
2093 [ + + ]: 12276 : if (!pop) {
2094 : 279 : adjust_stack(emit, 1);
2095 : : }
2096 : 12276 : emit_call(emit, MP_F_OBJ_IS_TRUE);
2097 : : } else {
2098 : 268 : emit_pre_pop_reg(emit, &vtype, REG_RET);
2099 [ + + ]: 268 : if (!pop) {
2100 : 36 : adjust_stack(emit, 1);
2101 : : }
2102 [ + + ]: 268 : if (!(vtype == VTYPE_BOOL || vtype == VTYPE_INT || vtype == VTYPE_UINT)) {
2103 : 4 : EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
2104 : : MP_ERROR_TEXT("can't implicitly convert '%q' to 'bool'"), vtype_to_qstr(vtype));
2105 : : }
2106 : : }
2107 : : // For non-pop need to save the vtype so that emit_native_adjust_stack_size
2108 : : // can use it. This is a bit of a hack.
2109 [ + + ]: 12544 : if (!pop) {
2110 : 315 : emit->saved_stack_vtype = vtype;
2111 : : }
2112 : : // need to commit stack because we may jump elsewhere
2113 : 12544 : need_stack_settled(emit);
2114 : : // Emit the jump
2115 [ + + ]: 12544 : if (cond) {
2116 [ + + ]: 3318 : ASM_JUMP_IF_REG_NONZERO(emit->as, REG_RET, label, vtype == VTYPE_PYOBJ);
2117 : : } else {
2118 [ + + ]: 9226 : ASM_JUMP_IF_REG_ZERO(emit->as, REG_RET, label, vtype == VTYPE_PYOBJ);
2119 : : }
2120 [ + + ]: 12544 : if (!pop) {
2121 : 315 : adjust_stack(emit, -1);
2122 : : }
2123 : 12544 : emit_post(emit);
2124 : 12544 : }
2125 : :
2126 : 12229 : static void emit_native_pop_jump_if(emit_t *emit, bool cond, mp_uint_t label) {
2127 : 12229 : DEBUG_printf("pop_jump_if(cond=%u, label=" UINT_FMT ")\n", cond, label);
2128 : 12229 : emit_native_jump_helper(emit, cond, label, true);
2129 : 12229 : }
2130 : :
2131 : 315 : static void emit_native_jump_if_or_pop(emit_t *emit, bool cond, mp_uint_t label) {
2132 : 315 : DEBUG_printf("jump_if_or_pop(cond=%u, label=" UINT_FMT ")\n", cond, label);
2133 : 315 : emit_native_jump_helper(emit, cond, label, false);
2134 : 315 : }
2135 : :
2136 : 19208 : static void emit_native_unwind_jump(emit_t *emit, mp_uint_t label, mp_uint_t except_depth) {
2137 [ + + ]: 19208 : if (except_depth > 0) {
2138 : 375 : exc_stack_entry_t *first_finally = NULL;
2139 : 375 : exc_stack_entry_t *prev_finally = NULL;
2140 : 375 : exc_stack_entry_t *e = &emit->exc_stack[emit->exc_stack_size - 1];
2141 [ + + ]: 1095 : for (; except_depth > 0; --except_depth, --e) {
2142 [ + + + + ]: 720 : if (e->is_finally && e->is_active) {
2143 : : // Found an active finally handler
2144 [ + + ]: 333 : if (first_finally == NULL) {
2145 : 285 : first_finally = e;
2146 : : }
2147 [ + + ]: 333 : if (prev_finally != NULL) {
2148 : : // Mark prev finally as needed to unwind a jump
2149 : 48 : prev_finally->unwind_label = e->label;
2150 : : }
2151 : : prev_finally = e;
2152 : : }
2153 : : }
2154 [ + + ]: 375 : if (prev_finally == NULL) {
2155 : : // No finally, handle the jump ourselves
2156 : : // First, restore the exception handler address for the jump
2157 [ + - ]: 90 : if (e < emit->exc_stack) {
2158 : 90 : ASM_XOR_REG_REG(emit->as, REG_RET, REG_RET);
2159 : : } else {
2160 : 0 : ASM_MOV_REG_PCREL(emit->as, REG_RET, e->label);
2161 : : }
2162 : 90 : ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_HANDLER_PC(emit), REG_RET);
2163 : : } else {
2164 : : // Last finally should do our jump for us
2165 : : // Mark finally as needing to decide the type of jump
2166 : 285 : prev_finally->unwind_label = UNWIND_LABEL_DO_FINAL_UNWIND;
2167 : 285 : ASM_MOV_REG_PCREL(emit->as, REG_RET, label & ~MP_EMIT_BREAK_FROM_FOR);
2168 : 285 : ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_HANDLER_UNWIND(emit), REG_RET);
2169 : : // Cancel any active exception (see also emit_native_pop_except_jump)
2170 : 285 : ASM_MOV_REG_IMM(emit->as, REG_RET, (mp_uint_t)MP_OBJ_NULL);
2171 : 285 : ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_VAL(emit), REG_RET);
2172 : : // Jump to the innermost active finally
2173 : 285 : label = first_finally->label;
2174 : : }
2175 : : }
2176 : 19208 : emit_native_jump(emit, label & ~MP_EMIT_BREAK_FROM_FOR);
2177 : 19208 : }
2178 : :
2179 : 324 : static void emit_native_setup_with(emit_t *emit, mp_uint_t label) {
2180 : : // the context manager is on the top of the stack
2181 : : // stack: (..., ctx_mgr)
2182 : :
2183 : : // get __exit__ method
2184 : 324 : vtype_kind_t vtype;
2185 : 324 : emit_access_stack(emit, 1, &vtype, REG_ARG_1); // arg1 = ctx_mgr
2186 [ - + ]: 324 : assert(vtype == VTYPE_PYOBJ);
2187 : 324 : emit_get_stack_pointer_to_reg_for_push(emit, REG_ARG_3, 2); // arg3 = dest ptr
2188 : 324 : emit_call_with_qstr_arg(emit, MP_F_LOAD_METHOD, MP_QSTR___exit__, REG_ARG_2);
2189 : : // stack: (..., ctx_mgr, __exit__, self)
2190 : :
2191 : 324 : emit_pre_pop_reg(emit, &vtype, REG_ARG_3); // self
2192 : 324 : emit_pre_pop_reg(emit, &vtype, REG_ARG_2); // __exit__
2193 : 324 : emit_pre_pop_reg(emit, &vtype, REG_ARG_1); // ctx_mgr
2194 : 324 : emit_post_push_reg(emit, vtype, REG_ARG_2); // __exit__
2195 : 324 : emit_post_push_reg(emit, vtype, REG_ARG_3); // self
2196 : : // stack: (..., __exit__, self)
2197 : : // REG_ARG_1=ctx_mgr
2198 : :
2199 : : // get __enter__ method
2200 : 324 : emit_get_stack_pointer_to_reg_for_push(emit, REG_ARG_3, 2); // arg3 = dest ptr
2201 : 324 : emit_call_with_qstr_arg(emit, MP_F_LOAD_METHOD, MP_QSTR___enter__, REG_ARG_2); // arg2 = method name
2202 : : // stack: (..., __exit__, self, __enter__, self)
2203 : :
2204 : : // call __enter__ method
2205 : 324 : emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, 2); // pointer to items, including meth and self
2206 : 324 : emit_call_with_2_imm_args(emit, MP_F_CALL_METHOD_N_KW, 0, REG_ARG_1, 0, REG_ARG_2);
2207 : 324 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET); // push return value of __enter__
2208 : : // stack: (..., __exit__, self, as_value)
2209 : :
2210 : : // need to commit stack because we may jump elsewhere
2211 : 324 : need_stack_settled(emit);
2212 : 324 : emit_native_push_exc_stack(emit, label, true);
2213 : :
2214 : 324 : emit_native_dup_top(emit);
2215 : : // stack: (..., __exit__, self, as_value, as_value)
2216 : 324 : }
2217 : :
2218 : 6459 : static void emit_native_setup_block(emit_t *emit, mp_uint_t label, int kind) {
2219 : 6459 : DEBUG_printf("setup_block(%d, %d)\n", (int)label, kind);
2220 [ + + ]: 6459 : if (kind == MP_EMIT_SETUP_BLOCK_WITH) {
2221 : 324 : emit_native_setup_with(emit, label);
2222 : : } else {
2223 : : // Set up except and finally
2224 : 6135 : emit_native_pre(emit);
2225 : 6135 : need_stack_settled(emit);
2226 : 6135 : emit_native_push_exc_stack(emit, label, kind == MP_EMIT_SETUP_BLOCK_FINALLY);
2227 : 6135 : emit_post(emit);
2228 : : }
2229 : 6459 : }
2230 : :
2231 : 324 : static void emit_native_with_cleanup(emit_t *emit, mp_uint_t label) {
2232 : : // Note: 3 labels are reserved for this function, starting at *emit->label_slot
2233 : :
2234 : : // stack: (..., __exit__, self, as_value)
2235 : 324 : emit_native_pre(emit);
2236 : 324 : emit_native_leave_exc_stack(emit, false);
2237 : 324 : adjust_stack(emit, -1);
2238 : : // stack: (..., __exit__, self)
2239 : :
2240 : : // Label for case where __exit__ is called from an unwind jump
2241 : 324 : emit_native_label_assign(emit, *emit->label_slot + 2);
2242 : :
2243 : : // call __exit__
2244 : 324 : emit_post_push_imm(emit, VTYPE_PTR_NONE, 0);
2245 : 324 : emit_post_push_imm(emit, VTYPE_PTR_NONE, 0);
2246 : 324 : emit_post_push_imm(emit, VTYPE_PTR_NONE, 0);
2247 : 324 : emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, 5);
2248 : 324 : emit_call_with_2_imm_args(emit, MP_F_CALL_METHOD_N_KW, 3, REG_ARG_1, 0, REG_ARG_2);
2249 : :
2250 : : // Replace exc with None and finish
2251 : 324 : emit_native_jump(emit, *emit->label_slot);
2252 : :
2253 : : // nlr_catch
2254 : : // Don't use emit_native_label_assign because this isn't a real finally label
2255 : 324 : mp_asm_base_label_assign(&emit->as->base, label);
2256 : :
2257 : : // Leave with's exception handler
2258 : 324 : emit_native_leave_exc_stack(emit, true);
2259 : :
2260 : : // Adjust stack counter for: __exit__, self (implicitly discard as_value which is above self)
2261 : 324 : emit_native_adjust_stack_size(emit, 2);
2262 : : // stack: (..., __exit__, self)
2263 : :
2264 : 324 : ASM_MOV_REG_LOCAL(emit->as, REG_ARG_1, LOCAL_IDX_EXC_VAL(emit)); // get exc
2265 : :
2266 : : // Check if exc is MP_OBJ_NULL (i.e. zero) and jump to non-exc handler if it is
2267 : 324 : ASM_JUMP_IF_REG_ZERO(emit->as, REG_ARG_1, *emit->label_slot + 2, false);
2268 : :
2269 : 324 : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_ARG_2, REG_ARG_1, 0); // get type(exc)
2270 : 324 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_ARG_2); // push type(exc)
2271 : 324 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_ARG_1); // push exc value
2272 : 324 : emit_post_push_imm(emit, VTYPE_PTR_NONE, 0); // traceback info
2273 : : // Stack: (..., __exit__, self, type(exc), exc, traceback)
2274 : :
2275 : : // call __exit__ method
2276 : 324 : emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, 5);
2277 : 324 : emit_call_with_2_imm_args(emit, MP_F_CALL_METHOD_N_KW, 3, REG_ARG_1, 0, REG_ARG_2);
2278 : : // Stack: (...)
2279 : :
2280 : : // If REG_RET is true then we need to replace exception with None (swallow exception)
2281 : 324 : if (REG_ARG_1 != REG_RET) {
2282 : 324 : ASM_MOV_REG_REG(emit->as, REG_ARG_1, REG_RET);
2283 : : }
2284 : 324 : emit_call(emit, MP_F_OBJ_IS_TRUE);
2285 : 324 : ASM_JUMP_IF_REG_ZERO(emit->as, REG_RET, *emit->label_slot + 1, true);
2286 : :
2287 : : // Replace exception with MP_OBJ_NULL.
2288 : 324 : emit_native_label_assign(emit, *emit->label_slot);
2289 : 324 : ASM_MOV_REG_IMM(emit->as, REG_TEMP0, (mp_uint_t)MP_OBJ_NULL);
2290 : 324 : ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_VAL(emit), REG_TEMP0);
2291 : :
2292 : : // end of with cleanup nlr_catch block
2293 : 324 : emit_native_label_assign(emit, *emit->label_slot + 1);
2294 : :
2295 : : // Exception is in nlr_buf.ret_val slot
2296 : 324 : adjust_stack(emit, 1);
2297 : 324 : }
2298 : :
2299 : : #if MICROPY_PY_ASYNC_AWAIT
2300 : 42 : static void emit_native_async_with_setup_finally(emit_t *emit, mp_uint_t label_aexit_no_exc, mp_uint_t label_finally_block, mp_uint_t label_ret_unwind_jump) {
2301 : : // The async-with body has executed and no exception was raised, the execution fell through to this point.
2302 : : // Stack: (..., ctx_mgr)
2303 : :
2304 : : // Insert a dummy value into the stack so the stack has the same layout to execute the code starting at label_aexit_no_exc
2305 : 42 : emit_native_adjust_stack_size(emit, 1); // push dummy value, it won't ever be used
2306 : 42 : emit_native_rot_two(emit);
2307 : 42 : emit_native_load_const_tok(emit, MP_TOKEN_KW_NONE); // to tell end_finally there's no exception
2308 : 42 : emit_native_rot_two(emit);
2309 : : // Stack: (..., <dummy>, None, ctx_mgr)
2310 : 42 : emit_native_jump(emit, label_aexit_no_exc); // jump to code to call __aexit__
2311 : 42 : emit_native_adjust_stack_size(emit, -1);
2312 : :
2313 : : // Start of "finally" block which is entered via one of: an exception propagating out, a return, an unwind jump.
2314 : 42 : emit_native_label_assign(emit, label_finally_block);
2315 : :
2316 : : // Detect which case we have by the local exception slot holding an exception or not.
2317 : 42 : emit_pre_pop_discard(emit);
2318 : 42 : ASM_MOV_REG_LOCAL(emit->as, REG_ARG_1, LOCAL_IDX_EXC_VAL(emit)); // get exception
2319 : 42 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_ARG_1);
2320 : 42 : ASM_JUMP_IF_REG_ZERO(emit->as, REG_ARG_1, label_ret_unwind_jump, false); // if not an exception then we have return or unwind jump.
2321 : 42 : }
2322 : : #endif
2323 : :
2324 : 6459 : static void emit_native_end_finally(emit_t *emit) {
2325 : : // logic:
2326 : : // exc = pop_stack
2327 : : // if exc == None: pass
2328 : : // else: raise exc
2329 : : // the check if exc is None is done in the MP_F_NATIVE_RAISE stub
2330 : 6459 : DEBUG_printf("end_finally\n");
2331 : :
2332 : 6459 : emit_pre_pop_discard(emit);
2333 : 6459 : ASM_MOV_REG_LOCAL(emit->as, REG_ARG_1, LOCAL_IDX_EXC_VAL(emit));
2334 : 6459 : emit_call(emit, MP_F_NATIVE_RAISE);
2335 : :
2336 : : // Get state for this finally and see if we need to unwind
2337 : 6459 : exc_stack_entry_t *e = emit_native_pop_exc_stack(emit);
2338 [ + + ]: 6459 : if (e->unwind_label != UNWIND_LABEL_UNUSED) {
2339 : 330 : ASM_MOV_REG_LOCAL(emit->as, REG_RET, LOCAL_IDX_EXC_HANDLER_UNWIND(emit));
2340 : 330 : ASM_JUMP_IF_REG_ZERO(emit->as, REG_RET, *emit->label_slot, false);
2341 [ + + ]: 330 : if (e->unwind_label == UNWIND_LABEL_DO_FINAL_UNWIND) {
2342 : 282 : ASM_JUMP_REG(emit->as, REG_RET);
2343 : : } else {
2344 : 48 : emit_native_jump(emit, e->unwind_label);
2345 : : }
2346 : 330 : emit_native_label_assign(emit, *emit->label_slot);
2347 : : }
2348 : :
2349 : 6459 : emit_post(emit);
2350 : 6459 : }
2351 : :
2352 : 2425 : static void emit_native_get_iter(emit_t *emit, bool use_stack) {
2353 : : // perhaps the difficult one, as we want to rewrite for loops using native code
2354 : : // in cases where we iterate over a Python object, can we use normal runtime calls?
2355 : :
2356 : 2425 : DEBUG_printf("get_iter(%d)\n", use_stack);
2357 : :
2358 : 2425 : vtype_kind_t vtype;
2359 : 2425 : emit_pre_pop_reg(emit, &vtype, REG_ARG_1);
2360 [ - + ]: 2425 : assert(vtype == VTYPE_PYOBJ);
2361 [ + + ]: 2425 : if (use_stack) {
2362 : 1377 : emit_get_stack_pointer_to_reg_for_push(emit, REG_ARG_2, MP_OBJ_ITER_BUF_NSLOTS);
2363 : 1377 : emit_call(emit, MP_F_NATIVE_GETITER);
2364 : : } else {
2365 : : // mp_getiter will allocate the iter_buf on the heap
2366 : 1048 : ASM_MOV_REG_IMM(emit->as, REG_ARG_2, 0);
2367 : 1048 : emit_call(emit, MP_F_NATIVE_GETITER);
2368 : 1048 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
2369 : : }
2370 : 2425 : }
2371 : :
2372 : 1497 : static void emit_native_for_iter(emit_t *emit, mp_uint_t label) {
2373 : 1497 : emit_native_pre(emit);
2374 : 1497 : emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_1, MP_OBJ_ITER_BUF_NSLOTS);
2375 : 1497 : adjust_stack(emit, MP_OBJ_ITER_BUF_NSLOTS);
2376 : 1497 : emit_call(emit, MP_F_NATIVE_ITERNEXT);
2377 : : #if MICROPY_DEBUG_MP_OBJ_SENTINELS
2378 : : ASM_MOV_REG_IMM(emit->as, REG_TEMP1, (mp_uint_t)MP_OBJ_STOP_ITERATION);
2379 : : ASM_JUMP_IF_REG_EQ(emit->as, REG_RET, REG_TEMP1, label);
2380 : : #else
2381 : 1497 : MP_STATIC_ASSERT(MP_OBJ_STOP_ITERATION == 0);
2382 : 1497 : ASM_JUMP_IF_REG_ZERO(emit->as, REG_RET, label, false);
2383 : : #endif
2384 : 1497 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
2385 : 1497 : }
2386 : :
2387 : 1497 : static void emit_native_for_iter_end(emit_t *emit) {
2388 : : // adjust stack counter (we get here from for_iter ending, which popped the value for us)
2389 : 1497 : emit_native_pre(emit);
2390 : 1497 : adjust_stack(emit, -MP_OBJ_ITER_BUF_NSLOTS);
2391 : 1497 : emit_post(emit);
2392 : 1497 : }
2393 : :
2394 : 10011 : static void emit_native_pop_except_jump(emit_t *emit, mp_uint_t label, bool within_exc_handler) {
2395 [ + + ]: 10011 : if (within_exc_handler) {
2396 : : // Cancel any active exception so subsequent handlers don't see it
2397 : 5028 : ASM_MOV_REG_IMM(emit->as, REG_TEMP0, (mp_uint_t)MP_OBJ_NULL);
2398 : 5028 : ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_VAL(emit), REG_TEMP0);
2399 : : } else {
2400 : 4983 : emit_native_leave_exc_stack(emit, false);
2401 : : }
2402 : 10011 : emit_native_jump(emit, label);
2403 : 10011 : }
2404 : :
2405 : 1304 : static void emit_native_unary_op(emit_t *emit, mp_unary_op_t op) {
2406 : 1304 : vtype_kind_t vtype = peek_vtype(emit, 0);
2407 [ + + ]: 1304 : if (vtype == VTYPE_INT || vtype == VTYPE_UINT) {
2408 [ + + ]: 22 : if (op == MP_UNARY_OP_POSITIVE) {
2409 : : // No-operation, just leave the argument on the stack.
2410 [ + + ]: 16 : } else if (op == MP_UNARY_OP_NEGATIVE) {
2411 : 6 : int reg = REG_RET;
2412 : 6 : emit_pre_pop_reg_flexible(emit, &vtype, ®, reg, reg);
2413 : 6 : ASM_NEG_REG(emit->as, reg);
2414 : 6 : emit_post_push_reg(emit, vtype, reg);
2415 [ + + ]: 10 : } else if (op == MP_UNARY_OP_INVERT) {
2416 : : #ifdef ASM_NOT_REG
2417 : 6 : int reg = REG_RET;
2418 : 6 : emit_pre_pop_reg_flexible(emit, &vtype, ®, reg, reg);
2419 : 6 : ASM_NOT_REG(emit->as, reg);
2420 : : #else
2421 : : int reg = REG_RET;
2422 : : emit_pre_pop_reg_flexible(emit, &vtype, ®, REG_ARG_1, reg);
2423 : : ASM_MOV_REG_IMM(emit->as, REG_ARG_1, -1);
2424 : : ASM_XOR_REG_REG(emit->as, reg, REG_ARG_1);
2425 : : #endif
2426 : 6 : emit_post_push_reg(emit, vtype, reg);
2427 : : } else {
2428 : 4 : EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
2429 : : MP_ERROR_TEXT("'not' not implemented"), mp_binary_op_method_name[op]);
2430 : : }
2431 [ + + ]: 1282 : } else if (vtype == VTYPE_PYOBJ) {
2432 : 1278 : emit_pre_pop_reg(emit, &vtype, REG_ARG_2);
2433 : 1278 : emit_call_with_imm_arg(emit, MP_F_UNARY_OP, op, REG_ARG_1);
2434 : 1278 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
2435 : : } else {
2436 : 4 : EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
2437 : : MP_ERROR_TEXT("can't do unary op of '%q'"), vtype_to_qstr(vtype));
2438 : : }
2439 : 1304 : }
2440 : :
2441 : 20378 : static void emit_native_binary_op(emit_t *emit, mp_binary_op_t op) {
2442 : 20378 : DEBUG_printf("binary_op(" UINT_FMT ")\n", op);
2443 : 20378 : vtype_kind_t vtype_lhs = peek_vtype(emit, 1);
2444 : 20378 : vtype_kind_t vtype_rhs = peek_vtype(emit, 0);
2445 [ + + ]: 20378 : if ((vtype_lhs == VTYPE_INT || vtype_lhs == VTYPE_UINT)
2446 [ + + ]: 878 : && (vtype_rhs == VTYPE_INT || vtype_rhs == VTYPE_UINT)) {
2447 : : // for integers, inplace and normal ops are equivalent, so use just normal ops
2448 [ + + ]: 874 : if (MP_BINARY_OP_INPLACE_OR <= op && op <= MP_BINARY_OP_INPLACE_POWER) {
2449 : 186 : op += MP_BINARY_OP_OR - MP_BINARY_OP_INPLACE_OR;
2450 : : }
2451 : :
2452 : : #if N_X64 || N_X86
2453 : : // special cases for x86 and shifting
2454 [ + + ]: 874 : if (op == MP_BINARY_OP_LSHIFT || op == MP_BINARY_OP_RSHIFT) {
2455 : : #if N_X64
2456 : 96 : emit_pre_pop_reg_reg(emit, &vtype_rhs, ASM_X64_REG_RCX, &vtype_lhs, REG_RET);
2457 : : #else
2458 : : emit_pre_pop_reg_reg(emit, &vtype_rhs, ASM_X86_REG_ECX, &vtype_lhs, REG_RET);
2459 : : #endif
2460 [ + + ]: 96 : if (op == MP_BINARY_OP_LSHIFT) {
2461 : 48 : ASM_LSL_REG(emit->as, REG_RET);
2462 : : } else {
2463 [ + + ]: 48 : if (vtype_lhs == VTYPE_UINT) {
2464 : 6 : ASM_LSR_REG(emit->as, REG_RET);
2465 : : } else {
2466 : 42 : ASM_ASR_REG(emit->as, REG_RET);
2467 : : }
2468 : : }
2469 : 96 : emit_post_push_reg(emit, vtype_lhs, REG_RET);
2470 : 212 : return;
2471 : : }
2472 : : #endif
2473 : :
2474 : : // special cases for floor-divide and module because we dispatch to helper functions
2475 [ + + ]: 778 : if (op == MP_BINARY_OP_FLOOR_DIVIDE || op == MP_BINARY_OP_MODULO) {
2476 : 20 : emit_pre_pop_reg_reg(emit, &vtype_rhs, REG_ARG_2, &vtype_lhs, REG_ARG_1);
2477 [ + + ]: 20 : if (vtype_lhs != VTYPE_INT) {
2478 : 8 : EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
2479 : : MP_ERROR_TEXT("div/mod not implemented for uint"), mp_binary_op_method_name[op]);
2480 : : }
2481 [ + + ]: 20 : if (op == MP_BINARY_OP_FLOOR_DIVIDE) {
2482 : 10 : emit_call(emit, MP_F_SMALL_INT_FLOOR_DIVIDE);
2483 : : } else {
2484 : 10 : emit_call(emit, MP_F_SMALL_INT_MODULO);
2485 : : }
2486 : 20 : emit_post_push_reg(emit, VTYPE_INT, REG_RET);
2487 : 20 : return;
2488 : : }
2489 : :
2490 : 758 : int reg_rhs = REG_ARG_3;
2491 : 758 : emit_pre_pop_reg_flexible(emit, &vtype_rhs, ®_rhs, REG_RET, REG_ARG_2);
2492 : 758 : emit_pre_pop_reg(emit, &vtype_lhs, REG_ARG_2);
2493 : :
2494 : : #if !(N_X64 || N_X86)
2495 : : if (op == MP_BINARY_OP_LSHIFT || op == MP_BINARY_OP_RSHIFT) {
2496 : : if (op == MP_BINARY_OP_LSHIFT) {
2497 : : ASM_LSL_REG_REG(emit->as, REG_ARG_2, reg_rhs);
2498 : : } else {
2499 : : if (vtype_lhs == VTYPE_UINT) {
2500 : : ASM_LSR_REG_REG(emit->as, REG_ARG_2, reg_rhs);
2501 : : } else {
2502 : : ASM_ASR_REG_REG(emit->as, REG_ARG_2, reg_rhs);
2503 : : }
2504 : : }
2505 : : emit_post_push_reg(emit, vtype_lhs, REG_ARG_2);
2506 : : return;
2507 : : }
2508 : : #endif
2509 : :
2510 [ + + + + : 758 : if (op == MP_BINARY_OP_OR) {
+ + + + ]
2511 : 60 : ASM_OR_REG_REG(emit->as, REG_ARG_2, reg_rhs);
2512 : 60 : emit_post_push_reg(emit, vtype_lhs, REG_ARG_2);
2513 : : } else if (op == MP_BINARY_OP_XOR) {
2514 : 24 : ASM_XOR_REG_REG(emit->as, REG_ARG_2, reg_rhs);
2515 : 24 : emit_post_push_reg(emit, vtype_lhs, REG_ARG_2);
2516 : : } else if (op == MP_BINARY_OP_AND) {
2517 : 60 : ASM_AND_REG_REG(emit->as, REG_ARG_2, reg_rhs);
2518 : 60 : emit_post_push_reg(emit, vtype_lhs, REG_ARG_2);
2519 : : } else if (op == MP_BINARY_OP_ADD) {
2520 : 294 : ASM_ADD_REG_REG(emit->as, REG_ARG_2, reg_rhs);
2521 : 294 : emit_post_push_reg(emit, vtype_lhs, REG_ARG_2);
2522 : : } else if (op == MP_BINARY_OP_SUBTRACT) {
2523 : 24 : ASM_SUB_REG_REG(emit->as, REG_ARG_2, reg_rhs);
2524 : 24 : emit_post_push_reg(emit, vtype_lhs, REG_ARG_2);
2525 : : } else if (op == MP_BINARY_OP_MULTIPLY) {
2526 : 24 : ASM_MUL_REG_REG(emit->as, REG_ARG_2, reg_rhs);
2527 : 24 : emit_post_push_reg(emit, vtype_lhs, REG_ARG_2);
2528 : : } else if (op == MP_BINARY_OP_LESS
2529 : : || op == MP_BINARY_OP_MORE
2530 : : || op == MP_BINARY_OP_EQUAL
2531 : : || op == MP_BINARY_OP_LESS_EQUAL
2532 : : || op == MP_BINARY_OP_MORE_EQUAL
2533 : : || op == MP_BINARY_OP_NOT_EQUAL) {
2534 : : // comparison ops
2535 : :
2536 [ + + ]: 268 : if (vtype_lhs != vtype_rhs) {
2537 : 4 : EMIT_NATIVE_VIPER_TYPE_ERROR(emit, MP_ERROR_TEXT("comparison of int and uint"));
2538 : : }
2539 : :
2540 [ + + ]: 268 : size_t op_idx = op - MP_BINARY_OP_LESS + (vtype_lhs == VTYPE_UINT ? 0 : 6);
2541 : :
2542 : 268 : need_reg_single(emit, REG_RET, 0);
2543 : : #if N_X64
2544 : 268 : asm_x64_xor_r64_r64(emit->as, REG_RET, REG_RET);
2545 : 268 : asm_x64_cmp_r64_with_r64(emit->as, reg_rhs, REG_ARG_2);
2546 : 268 : static const byte ops[6 + 6] = {
2547 : : // unsigned
2548 : : ASM_X64_CC_JB,
2549 : : ASM_X64_CC_JA,
2550 : : ASM_X64_CC_JE,
2551 : : ASM_X64_CC_JBE,
2552 : : ASM_X64_CC_JAE,
2553 : : ASM_X64_CC_JNE,
2554 : : // signed
2555 : : ASM_X64_CC_JL,
2556 : : ASM_X64_CC_JG,
2557 : : ASM_X64_CC_JE,
2558 : : ASM_X64_CC_JLE,
2559 : : ASM_X64_CC_JGE,
2560 : : ASM_X64_CC_JNE,
2561 : : };
2562 : 268 : asm_x64_setcc_r8(emit->as, ops[op_idx], REG_RET);
2563 : : #elif N_X86
2564 : : asm_x86_xor_r32_r32(emit->as, REG_RET, REG_RET);
2565 : : asm_x86_cmp_r32_with_r32(emit->as, reg_rhs, REG_ARG_2);
2566 : : static const byte ops[6 + 6] = {
2567 : : // unsigned
2568 : : ASM_X86_CC_JB,
2569 : : ASM_X86_CC_JA,
2570 : : ASM_X86_CC_JE,
2571 : : ASM_X86_CC_JBE,
2572 : : ASM_X86_CC_JAE,
2573 : : ASM_X86_CC_JNE,
2574 : : // signed
2575 : : ASM_X86_CC_JL,
2576 : : ASM_X86_CC_JG,
2577 : : ASM_X86_CC_JE,
2578 : : ASM_X86_CC_JLE,
2579 : : ASM_X86_CC_JGE,
2580 : : ASM_X86_CC_JNE,
2581 : : };
2582 : : asm_x86_setcc_r8(emit->as, ops[op_idx], REG_RET);
2583 : : #elif N_THUMB
2584 : : asm_thumb_cmp_rlo_rlo(emit->as, REG_ARG_2, reg_rhs);
2585 : : if (asm_thumb_allow_armv7m(emit->as)) {
2586 : : static const uint16_t ops[6 + 6] = {
2587 : : // unsigned
2588 : : ASM_THUMB_OP_ITE_CC,
2589 : : ASM_THUMB_OP_ITE_HI,
2590 : : ASM_THUMB_OP_ITE_EQ,
2591 : : ASM_THUMB_OP_ITE_LS,
2592 : : ASM_THUMB_OP_ITE_CS,
2593 : : ASM_THUMB_OP_ITE_NE,
2594 : : // signed
2595 : : ASM_THUMB_OP_ITE_LT,
2596 : : ASM_THUMB_OP_ITE_GT,
2597 : : ASM_THUMB_OP_ITE_EQ,
2598 : : ASM_THUMB_OP_ITE_LE,
2599 : : ASM_THUMB_OP_ITE_GE,
2600 : : ASM_THUMB_OP_ITE_NE,
2601 : : };
2602 : : asm_thumb_op16(emit->as, ops[op_idx]);
2603 : : asm_thumb_mov_rlo_i8(emit->as, REG_RET, 1);
2604 : : asm_thumb_mov_rlo_i8(emit->as, REG_RET, 0);
2605 : : } else {
2606 : : static const uint16_t ops[6 + 6] = {
2607 : : // unsigned
2608 : : ASM_THUMB_CC_CC,
2609 : : ASM_THUMB_CC_HI,
2610 : : ASM_THUMB_CC_EQ,
2611 : : ASM_THUMB_CC_LS,
2612 : : ASM_THUMB_CC_CS,
2613 : : ASM_THUMB_CC_NE,
2614 : : // signed
2615 : : ASM_THUMB_CC_LT,
2616 : : ASM_THUMB_CC_GT,
2617 : : ASM_THUMB_CC_EQ,
2618 : : ASM_THUMB_CC_LE,
2619 : : ASM_THUMB_CC_GE,
2620 : : ASM_THUMB_CC_NE,
2621 : : };
2622 : : asm_thumb_bcc_rel9(emit->as, ops[op_idx], 6);
2623 : : asm_thumb_mov_rlo_i8(emit->as, REG_RET, 0);
2624 : : asm_thumb_b_rel12(emit->as, 4);
2625 : : asm_thumb_mov_rlo_i8(emit->as, REG_RET, 1);
2626 : : }
2627 : : #elif N_ARM
2628 : : asm_arm_cmp_reg_reg(emit->as, REG_ARG_2, reg_rhs);
2629 : : static const uint ccs[6 + 6] = {
2630 : : // unsigned
2631 : : ASM_ARM_CC_CC,
2632 : : ASM_ARM_CC_HI,
2633 : : ASM_ARM_CC_EQ,
2634 : : ASM_ARM_CC_LS,
2635 : : ASM_ARM_CC_CS,
2636 : : ASM_ARM_CC_NE,
2637 : : // signed
2638 : : ASM_ARM_CC_LT,
2639 : : ASM_ARM_CC_GT,
2640 : : ASM_ARM_CC_EQ,
2641 : : ASM_ARM_CC_LE,
2642 : : ASM_ARM_CC_GE,
2643 : : ASM_ARM_CC_NE,
2644 : : };
2645 : : asm_arm_setcc_reg(emit->as, REG_RET, ccs[op_idx]);
2646 : : #elif N_XTENSA || N_XTENSAWIN
2647 : : static const uint8_t ccs[6 + 6] = {
2648 : : // unsigned
2649 : : ASM_XTENSA_CC_LTU,
2650 : : 0x80 | ASM_XTENSA_CC_LTU, // for GTU we'll swap args
2651 : : ASM_XTENSA_CC_EQ,
2652 : : 0x80 | ASM_XTENSA_CC_GEU, // for LEU we'll swap args
2653 : : ASM_XTENSA_CC_GEU,
2654 : : ASM_XTENSA_CC_NE,
2655 : : // signed
2656 : : ASM_XTENSA_CC_LT,
2657 : : 0x80 | ASM_XTENSA_CC_LT, // for GT we'll swap args
2658 : : ASM_XTENSA_CC_EQ,
2659 : : 0x80 | ASM_XTENSA_CC_GE, // for LE we'll swap args
2660 : : ASM_XTENSA_CC_GE,
2661 : : ASM_XTENSA_CC_NE,
2662 : : };
2663 : : uint8_t cc = ccs[op_idx];
2664 : : if ((cc & 0x80) == 0) {
2665 : : asm_xtensa_setcc_reg_reg_reg(emit->as, cc, REG_RET, REG_ARG_2, reg_rhs);
2666 : : } else {
2667 : : asm_xtensa_setcc_reg_reg_reg(emit->as, cc & ~0x80, REG_RET, reg_rhs, REG_ARG_2);
2668 : : }
2669 : : #elif N_RV32
2670 : : (void)op_idx;
2671 : : switch (op) {
2672 : : case MP_BINARY_OP_LESS:
2673 : : asm_rv32_meta_comparison_lt(emit->as, REG_ARG_2, reg_rhs, REG_RET, vtype_lhs == VTYPE_UINT);
2674 : : break;
2675 : :
2676 : : case MP_BINARY_OP_MORE:
2677 : : asm_rv32_meta_comparison_lt(emit->as, reg_rhs, REG_ARG_2, REG_RET, vtype_lhs == VTYPE_UINT);
2678 : : break;
2679 : :
2680 : : case MP_BINARY_OP_EQUAL:
2681 : : asm_rv32_meta_comparison_eq(emit->as, REG_ARG_2, reg_rhs, REG_RET);
2682 : : break;
2683 : :
2684 : : case MP_BINARY_OP_LESS_EQUAL:
2685 : : asm_rv32_meta_comparison_le(emit->as, REG_ARG_2, reg_rhs, REG_RET, vtype_lhs == VTYPE_UINT);
2686 : : break;
2687 : :
2688 : : case MP_BINARY_OP_MORE_EQUAL:
2689 : : asm_rv32_meta_comparison_le(emit->as, reg_rhs, REG_ARG_2, REG_RET, vtype_lhs == VTYPE_UINT);
2690 : : break;
2691 : :
2692 : : case MP_BINARY_OP_NOT_EQUAL:
2693 : : asm_rv32_meta_comparison_ne(emit->as, reg_rhs, REG_ARG_2, REG_RET);
2694 : : break;
2695 : :
2696 : : default:
2697 : : break;
2698 : : }
2699 : : #elif N_DEBUG
2700 : : asm_debug_setcc_reg_reg_reg(emit->as, op_idx, REG_RET, REG_ARG_2, reg_rhs);
2701 : : #else
2702 : : #error not implemented
2703 : : #endif
2704 : 268 : emit_post_push_reg(emit, VTYPE_BOOL, REG_RET);
2705 : : } else {
2706 : : // TODO other ops not yet implemented
2707 : 4 : adjust_stack(emit, 1);
2708 : 758 : EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
2709 : : MP_ERROR_TEXT("binary op %q not implemented"), mp_binary_op_method_name[op]);
2710 : : }
2711 [ + + + - ]: 39004 : } else if (vtype_lhs == VTYPE_PYOBJ && vtype_rhs == VTYPE_PYOBJ) {
2712 : 19500 : emit_pre_pop_reg_reg(emit, &vtype_rhs, REG_ARG_3, &vtype_lhs, REG_ARG_2);
2713 : 19500 : bool invert = false;
2714 [ + + ]: 19500 : if (op == MP_BINARY_OP_NOT_IN) {
2715 : : invert = true;
2716 : : op = MP_BINARY_OP_IN;
2717 [ + + ]: 19344 : } else if (op == MP_BINARY_OP_IS_NOT) {
2718 : 756 : invert = true;
2719 : 756 : op = MP_BINARY_OP_IS;
2720 : : }
2721 : 19500 : emit_call_with_imm_arg(emit, MP_F_BINARY_OP, op, REG_ARG_1);
2722 [ + + ]: 19500 : if (invert) {
2723 : 912 : ASM_MOV_REG_REG(emit->as, REG_ARG_2, REG_RET);
2724 : 912 : emit_call_with_imm_arg(emit, MP_F_UNARY_OP, MP_UNARY_OP_NOT, REG_ARG_1);
2725 : : }
2726 : 19500 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
2727 : : } else {
2728 : 4 : adjust_stack(emit, -1);
2729 : 20262 : EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
2730 : : MP_ERROR_TEXT("can't do binary op between '%q' and '%q'"),
2731 : : vtype_to_qstr(vtype_lhs), vtype_to_qstr(vtype_rhs));
2732 : : }
2733 : : }
2734 : :
2735 : : #if MICROPY_PY_BUILTINS_SLICE
2736 : : static void emit_native_build_slice(emit_t *emit, mp_uint_t n_args);
2737 : : #endif
2738 : :
2739 : 8313 : static void emit_native_build(emit_t *emit, mp_uint_t n_args, int kind) {
2740 : : // for viper: call runtime, with types of args
2741 : : // if wrapped in byte_array, or something, allocates memory and fills it
2742 : 8313 : MP_STATIC_ASSERT(MP_F_BUILD_TUPLE + MP_EMIT_BUILD_TUPLE == MP_F_BUILD_TUPLE);
2743 : 8313 : MP_STATIC_ASSERT(MP_F_BUILD_TUPLE + MP_EMIT_BUILD_LIST == MP_F_BUILD_LIST);
2744 : 8313 : MP_STATIC_ASSERT(MP_F_BUILD_TUPLE + MP_EMIT_BUILD_MAP == MP_F_BUILD_MAP);
2745 : 8313 : MP_STATIC_ASSERT(MP_F_BUILD_TUPLE + MP_EMIT_BUILD_SET == MP_F_BUILD_SET);
2746 : : #if MICROPY_PY_BUILTINS_SLICE
2747 [ + + ]: 8313 : if (kind == MP_EMIT_BUILD_SLICE) {
2748 : 924 : emit_native_build_slice(emit, n_args);
2749 : 924 : return;
2750 : : }
2751 : : #endif
2752 : 7389 : emit_native_pre(emit);
2753 [ + + ]: 7389 : if (kind == MP_EMIT_BUILD_TUPLE || kind == MP_EMIT_BUILD_LIST || kind == MP_EMIT_BUILD_SET) {
2754 : 6204 : emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_2, n_args); // pointer to items
2755 : : }
2756 : 7389 : emit_call_with_imm_arg(emit, MP_F_BUILD_TUPLE + kind, n_args, REG_ARG_1);
2757 : 7389 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET); // new tuple/list/map/set
2758 : : }
2759 : :
2760 : 2223 : static void emit_native_store_map(emit_t *emit) {
2761 : 2223 : vtype_kind_t vtype_key, vtype_value, vtype_map;
2762 : 2223 : emit_pre_pop_reg_reg_reg(emit, &vtype_key, REG_ARG_2, &vtype_value, REG_ARG_3, &vtype_map, REG_ARG_1); // key, value, map
2763 [ - + ]: 2223 : assert(vtype_key == VTYPE_PYOBJ);
2764 [ - + ]: 2223 : assert(vtype_value == VTYPE_PYOBJ);
2765 [ - + ]: 2223 : assert(vtype_map == VTYPE_PYOBJ);
2766 : 2223 : emit_call(emit, MP_F_STORE_MAP);
2767 : 2223 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET); // map
2768 : 2223 : }
2769 : :
2770 : : #if MICROPY_PY_BUILTINS_SLICE
2771 : 924 : static void emit_native_build_slice(emit_t *emit, mp_uint_t n_args) {
2772 : 924 : DEBUG_printf("build_slice %d\n", n_args);
2773 [ + + ]: 924 : if (n_args == 2) {
2774 : 753 : vtype_kind_t vtype_start, vtype_stop;
2775 : 753 : emit_pre_pop_reg_reg(emit, &vtype_stop, REG_ARG_2, &vtype_start, REG_ARG_1); // arg1 = start, arg2 = stop
2776 [ - + ]: 753 : assert(vtype_start == VTYPE_PYOBJ);
2777 [ - + ]: 753 : assert(vtype_stop == VTYPE_PYOBJ);
2778 : 753 : emit_native_mov_reg_const(emit, REG_ARG_3, MP_F_CONST_NONE_OBJ); // arg3 = step
2779 : : } else {
2780 [ - + ]: 171 : assert(n_args == 3);
2781 : 171 : vtype_kind_t vtype_start, vtype_stop, vtype_step;
2782 : 171 : emit_pre_pop_reg_reg_reg(emit, &vtype_step, REG_ARG_3, &vtype_stop, REG_ARG_2, &vtype_start, REG_ARG_1); // arg1 = start, arg2 = stop, arg3 = step
2783 [ - + ]: 171 : assert(vtype_start == VTYPE_PYOBJ);
2784 [ - + ]: 171 : assert(vtype_stop == VTYPE_PYOBJ);
2785 [ - + ]: 171 : assert(vtype_step == VTYPE_PYOBJ);
2786 : : }
2787 : 924 : emit_call(emit, MP_F_NEW_SLICE);
2788 : 924 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
2789 : 924 : }
2790 : : #endif
2791 : :
2792 : 195 : static void emit_native_store_comp(emit_t *emit, scope_kind_t kind, mp_uint_t collection_index) {
2793 : 195 : mp_fun_kind_t f;
2794 [ + + ]: 195 : if (kind == SCOPE_LIST_COMP) {
2795 : 168 : vtype_kind_t vtype_item;
2796 : 168 : emit_pre_pop_reg(emit, &vtype_item, REG_ARG_2);
2797 [ - + ]: 168 : assert(vtype_item == VTYPE_PYOBJ);
2798 : 168 : f = MP_F_LIST_APPEND;
2799 : : #if MICROPY_PY_BUILTINS_SET
2800 [ + + ]: 27 : } else if (kind == SCOPE_SET_COMP) {
2801 : 3 : vtype_kind_t vtype_item;
2802 : 3 : emit_pre_pop_reg(emit, &vtype_item, REG_ARG_2);
2803 [ - + ]: 3 : assert(vtype_item == VTYPE_PYOBJ);
2804 : 3 : f = MP_F_STORE_SET;
2805 : : #endif
2806 : : } else {
2807 : : // SCOPE_DICT_COMP
2808 : 24 : vtype_kind_t vtype_key, vtype_value;
2809 : 24 : emit_pre_pop_reg_reg(emit, &vtype_key, REG_ARG_2, &vtype_value, REG_ARG_3);
2810 [ - + ]: 24 : assert(vtype_key == VTYPE_PYOBJ);
2811 [ - + ]: 24 : assert(vtype_value == VTYPE_PYOBJ);
2812 : 24 : f = MP_F_STORE_MAP;
2813 : : }
2814 : 195 : vtype_kind_t vtype_collection;
2815 : 195 : emit_access_stack(emit, collection_index, &vtype_collection, REG_ARG_1);
2816 [ - + ]: 195 : assert(vtype_collection == VTYPE_PYOBJ);
2817 : 195 : emit_call(emit, f);
2818 : 195 : emit_post(emit);
2819 : 195 : }
2820 : :
2821 : 486 : static void emit_native_unpack_sequence(emit_t *emit, mp_uint_t n_args) {
2822 : 486 : DEBUG_printf("unpack_sequence %d\n", n_args);
2823 : 486 : vtype_kind_t vtype_base;
2824 : 486 : emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1); // arg1 = seq
2825 [ - + ]: 486 : assert(vtype_base == VTYPE_PYOBJ);
2826 : 486 : emit_get_stack_pointer_to_reg_for_push(emit, REG_ARG_3, n_args); // arg3 = dest ptr
2827 : 486 : emit_call_with_imm_arg(emit, MP_F_UNPACK_SEQUENCE, n_args, REG_ARG_2); // arg2 = n_args
2828 : 486 : }
2829 : :
2830 : 99 : static void emit_native_unpack_ex(emit_t *emit, mp_uint_t n_left, mp_uint_t n_right) {
2831 : 99 : DEBUG_printf("unpack_ex %d %d\n", n_left, n_right);
2832 : 99 : vtype_kind_t vtype_base;
2833 : 99 : emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1); // arg1 = seq
2834 [ - + ]: 99 : assert(vtype_base == VTYPE_PYOBJ);
2835 : 99 : emit_get_stack_pointer_to_reg_for_push(emit, REG_ARG_3, n_left + n_right + 1); // arg3 = dest ptr
2836 : 99 : emit_call_with_imm_arg(emit, MP_F_UNPACK_EX, n_left | (n_right << 8), REG_ARG_2); // arg2 = n_left + n_right
2837 : 99 : }
2838 : :
2839 : 9939 : static void emit_native_make_function(emit_t *emit, scope_t *scope, mp_uint_t n_pos_defaults, mp_uint_t n_kw_defaults) {
2840 : : // call runtime, with type info for args, or don't support dict/default params, or only support Python objects for them
2841 : 9939 : emit_native_pre(emit);
2842 : 9939 : emit_native_mov_reg_state(emit, REG_ARG_2, LOCAL_IDX_FUN_OBJ(emit));
2843 : 9939 : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_ARG_2, REG_ARG_2, OFFSETOF_OBJ_FUN_BC_CONTEXT);
2844 [ + + ]: 9939 : if (n_pos_defaults == 0 && n_kw_defaults == 0) {
2845 : 9159 : need_reg_all(emit);
2846 : 9159 : ASM_MOV_REG_IMM(emit->as, REG_ARG_3, 0);
2847 : : } else {
2848 : 780 : emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, 2);
2849 : 780 : need_reg_all(emit);
2850 : : }
2851 : 9939 : emit_load_reg_with_child(emit, REG_ARG_1, scope->raw_code);
2852 : 9939 : ASM_CALL_IND(emit->as, MP_F_MAKE_FUNCTION_FROM_PROTO_FUN);
2853 : 9939 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
2854 : 9939 : }
2855 : :
2856 : 285 : static void emit_native_make_closure(emit_t *emit, scope_t *scope, mp_uint_t n_closed_over, mp_uint_t n_pos_defaults, mp_uint_t n_kw_defaults) {
2857 : : // make function
2858 : 285 : emit_native_pre(emit);
2859 : 285 : emit_native_mov_reg_state(emit, REG_ARG_2, LOCAL_IDX_FUN_OBJ(emit));
2860 : 285 : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_ARG_2, REG_ARG_2, OFFSETOF_OBJ_FUN_BC_CONTEXT);
2861 [ + + ]: 285 : if (n_pos_defaults == 0 && n_kw_defaults == 0) {
2862 : 282 : need_reg_all(emit);
2863 : 282 : ASM_MOV_REG_IMM(emit->as, REG_ARG_3, 0);
2864 : : } else {
2865 : 3 : emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, 2 + n_closed_over);
2866 : 3 : adjust_stack(emit, 2 + n_closed_over);
2867 : 3 : need_reg_all(emit);
2868 : : }
2869 : 285 : emit_load_reg_with_child(emit, REG_ARG_1, scope->raw_code);
2870 : 285 : ASM_CALL_IND(emit->as, MP_F_MAKE_FUNCTION_FROM_PROTO_FUN);
2871 : :
2872 : : // make closure
2873 : : #if REG_ARG_1 != REG_RET
2874 : 285 : ASM_MOV_REG_REG(emit->as, REG_ARG_1, REG_RET);
2875 : : #endif
2876 : 285 : ASM_MOV_REG_IMM(emit->as, REG_ARG_2, n_closed_over);
2877 : 285 : emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, n_closed_over);
2878 [ + + ]: 285 : if (n_pos_defaults != 0 || n_kw_defaults != 0) {
2879 : 3 : adjust_stack(emit, -2);
2880 : : }
2881 : 285 : ASM_CALL_IND(emit->as, MP_F_NEW_CLOSURE);
2882 : 285 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
2883 : 285 : }
2884 : :
2885 : 47336 : static void emit_native_call_function(emit_t *emit, mp_uint_t n_positional, mp_uint_t n_keyword, mp_uint_t star_flags) {
2886 : 47336 : DEBUG_printf("call_function(n_pos=" UINT_FMT ", n_kw=" UINT_FMT ", star_flags=" UINT_FMT ")\n", n_positional, n_keyword, star_flags);
2887 : :
2888 : : // TODO: in viper mode, call special runtime routine with type info for args,
2889 : : // and wanted type info for return, to remove need for boxing/unboxing
2890 : :
2891 : 47336 : emit_native_pre(emit);
2892 : 47336 : vtype_kind_t vtype_fun = peek_vtype(emit, n_positional + 2 * n_keyword);
2893 [ + + ]: 47336 : if (vtype_fun == VTYPE_BUILTIN_CAST) {
2894 : : // casting operator
2895 [ - + ]: 96 : assert(n_positional == 1 && n_keyword == 0);
2896 [ - + ]: 96 : assert(!star_flags);
2897 : 96 : DEBUG_printf(" cast to %d\n", vtype_fun);
2898 : 96 : vtype_kind_t vtype_cast = peek_stack(emit, 1)->data.u_imm;
2899 [ + + + ]: 96 : switch (peek_vtype(emit, 0)) {
2900 : 78 : case VTYPE_PYOBJ: {
2901 : 78 : vtype_kind_t vtype;
2902 : 78 : emit_pre_pop_reg(emit, &vtype, REG_ARG_1);
2903 : 78 : emit_pre_pop_discard(emit);
2904 : 78 : emit_call_with_imm_arg(emit, MP_F_CONVERT_OBJ_TO_NATIVE, vtype_cast, REG_ARG_2); // arg2 = type
2905 : 78 : emit_post_push_reg(emit, vtype_cast, REG_RET);
2906 : 78 : break;
2907 : : }
2908 : 14 : case VTYPE_BOOL:
2909 : : case VTYPE_INT:
2910 : : case VTYPE_UINT:
2911 : : case VTYPE_PTR:
2912 : : case VTYPE_PTR8:
2913 : : case VTYPE_PTR16:
2914 : : case VTYPE_PTR32:
2915 : : case VTYPE_PTR_NONE:
2916 : 14 : emit_fold_stack_top(emit, REG_ARG_1);
2917 : 14 : emit_post_top_set_vtype(emit, vtype_cast);
2918 : 14 : break;
2919 : : default:
2920 : : // this can happen when casting a cast: int(int)
2921 : 4 : mp_raise_NotImplementedError(MP_ERROR_TEXT("casting"));
2922 : : }
2923 : : } else {
2924 [ - + ]: 47240 : assert(vtype_fun == VTYPE_PYOBJ);
2925 [ + + ]: 47240 : if (star_flags) {
2926 : 375 : emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, n_positional + 2 * n_keyword + 2); // pointer to args
2927 : 375 : emit_call_with_2_imm_args(emit, MP_F_CALL_METHOD_N_KW_VAR, 0, REG_ARG_1, n_positional | (n_keyword << 8), REG_ARG_2);
2928 : 375 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
2929 : : } else {
2930 [ + + ]: 46865 : if (n_positional != 0 || n_keyword != 0) {
2931 : 43016 : emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, n_positional + 2 * n_keyword); // pointer to args
2932 : : }
2933 : 46861 : emit_pre_pop_reg(emit, &vtype_fun, REG_ARG_1); // the function
2934 : 46861 : emit_call_with_imm_arg(emit, MP_F_NATIVE_CALL_FUNCTION_N_KW, n_positional | (n_keyword << 8), REG_ARG_2);
2935 : 46861 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
2936 : : }
2937 : : }
2938 : 47328 : }
2939 : :
2940 : 17136 : static void emit_native_call_method(emit_t *emit, mp_uint_t n_positional, mp_uint_t n_keyword, mp_uint_t star_flags) {
2941 : 17136 : DEBUG_printf("call_method(%d, %d, %d)\n", n_positional, n_keyword, star_flags);
2942 [ + + ]: 17136 : if (star_flags) {
2943 : 120 : emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, n_positional + 2 * n_keyword + 3); // pointer to args
2944 : 120 : emit_call_with_2_imm_args(emit, MP_F_CALL_METHOD_N_KW_VAR, 1, REG_ARG_1, n_positional | (n_keyword << 8), REG_ARG_2);
2945 : 120 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
2946 : : } else {
2947 : 17016 : emit_native_pre(emit);
2948 : 17016 : emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, 2 + n_positional + 2 * n_keyword); // pointer to items, including meth and self
2949 : 17016 : emit_call_with_2_imm_args(emit, MP_F_CALL_METHOD_N_KW, n_positional, REG_ARG_1, n_keyword, REG_ARG_2);
2950 : 17016 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
2951 : : }
2952 : 17136 : }
2953 : :
2954 : 18821 : static void emit_native_return_value(emit_t *emit) {
2955 : 18821 : DEBUG_printf("return_value\n");
2956 : :
2957 [ + + ]: 18821 : if (emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR) {
2958 : : // Save pointer to current stack position for caller to access return value
2959 : 1317 : emit_get_stack_pointer_to_reg_for_pop(emit, REG_TEMP0, 1);
2960 : 1317 : emit_native_mov_state_reg(emit, OFFSETOF_CODE_STATE_SP, REG_TEMP0);
2961 : :
2962 : : // Put return type in return value slot
2963 : 1317 : ASM_MOV_REG_IMM(emit->as, REG_TEMP0, MP_VM_RETURN_NORMAL);
2964 : 1317 : ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_RET_VAL(emit), REG_TEMP0);
2965 : :
2966 : : // Do the unwinding jump to get to the return handler
2967 : 1317 : emit_native_unwind_jump(emit, emit->exit_label, emit->exc_stack_size);
2968 : 1317 : return;
2969 : : }
2970 : :
2971 [ + + ]: 17504 : if (emit->do_viper_types) {
2972 : 1028 : vtype_kind_t return_vtype = emit->scope->scope_flags >> MP_SCOPE_FLAG_VIPERRET_POS;
2973 [ + + ]: 1028 : if (peek_vtype(emit, 0) == VTYPE_PTR_NONE) {
2974 : 712 : emit_pre_pop_discard(emit);
2975 [ + + ]: 712 : if (return_vtype == VTYPE_PYOBJ) {
2976 : 504 : emit_native_mov_reg_const(emit, REG_PARENT_RET, MP_F_CONST_NONE_OBJ);
2977 : : } else {
2978 : 208 : ASM_MOV_REG_IMM(emit->as, REG_ARG_1, 0);
2979 : : }
2980 : : } else {
2981 : 316 : vtype_kind_t vtype;
2982 [ + + ]: 512 : emit_pre_pop_reg(emit, &vtype, return_vtype == VTYPE_PYOBJ ? REG_PARENT_RET : REG_ARG_1);
2983 [ + + ]: 316 : if (vtype != return_vtype) {
2984 : 316 : EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
2985 : : MP_ERROR_TEXT("return expected '%q' but got '%q'"),
2986 : : vtype_to_qstr(return_vtype), vtype_to_qstr(vtype));
2987 : : }
2988 : : }
2989 [ + + ]: 1028 : if (return_vtype != VTYPE_PYOBJ) {
2990 : 404 : emit_call_with_imm_arg(emit, MP_F_CONVERT_NATIVE_TO_OBJ, return_vtype, REG_ARG_2);
2991 : : #if REG_RET != REG_PARENT_RET
2992 : : ASM_MOV_REG_REG(emit->as, REG_PARENT_RET, REG_RET);
2993 : : #endif
2994 : : }
2995 : : } else {
2996 : 16476 : vtype_kind_t vtype;
2997 : 16476 : emit_pre_pop_reg(emit, &vtype, REG_PARENT_RET);
2998 [ - + ]: 16476 : assert(vtype == VTYPE_PYOBJ);
2999 : : }
3000 [ + + + + ]: 17504 : if (NEED_GLOBAL_EXC_HANDLER(emit)) {
3001 : : // Save return value for the global exception handler to use
3002 : 12108 : ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_RET_VAL(emit), REG_PARENT_RET);
3003 : : }
3004 : 17504 : emit_native_unwind_jump(emit, emit->exit_label, emit->exc_stack_size);
3005 : : }
3006 : :
3007 : 3586 : static void emit_native_raise_varargs(emit_t *emit, mp_uint_t n_args) {
3008 : 3586 : DEBUG_printf("raise_varargs(%d)\n", n_args);
3009 : 3586 : (void)n_args;
3010 [ - + ]: 3586 : assert(n_args == 1);
3011 : 3586 : vtype_kind_t vtype_exc;
3012 : 3586 : emit_pre_pop_reg(emit, &vtype_exc, REG_ARG_1); // arg1 = object to raise
3013 [ + + ]: 3586 : if (vtype_exc != VTYPE_PYOBJ) {
3014 : 4 : EMIT_NATIVE_VIPER_TYPE_ERROR(emit, MP_ERROR_TEXT("must raise an object"));
3015 : : }
3016 : : // TODO probably make this 1 call to the runtime (which could even call convert, native_raise(obj, type))
3017 : 3586 : emit_call(emit, MP_F_NATIVE_RAISE);
3018 : 3586 : mp_asm_base_suppress_code(&emit->as->base);
3019 : 3586 : }
3020 : :
3021 : 1490 : static void emit_native_yield(emit_t *emit, int kind) {
3022 : : // Note: 1 (yield) or 3 (yield from) labels are reserved for this function, starting at *emit->label_slot
3023 : :
3024 : 1490 : DEBUG_printf("yield(%d)\n", kind);
3025 : :
3026 [ + + ]: 1490 : if (emit->do_viper_types) {
3027 : 8 : mp_raise_NotImplementedError(MP_ERROR_TEXT("native yield"));
3028 : : }
3029 : 1482 : emit->scope->scope_flags |= MP_SCOPE_FLAG_GENERATOR;
3030 : :
3031 : 1482 : need_stack_settled(emit);
3032 : :
3033 [ + + ]: 1482 : if (kind == MP_EMIT_YIELD_FROM) {
3034 : :
3035 : : // Top of yield-from loop, conceptually implementing:
3036 : : // for item in generator:
3037 : : // yield item
3038 : :
3039 : : // Jump to start of loop
3040 : 924 : emit_native_jump(emit, *emit->label_slot + 2);
3041 : :
3042 : : // Label for top of loop
3043 : 924 : emit_native_label_assign(emit, *emit->label_slot + 1);
3044 : : }
3045 : :
3046 : : // Save pointer to current stack position for caller to access yielded value
3047 : 1482 : emit_get_stack_pointer_to_reg_for_pop(emit, REG_TEMP0, 1);
3048 : 1482 : emit_native_mov_state_reg(emit, OFFSETOF_CODE_STATE_SP, REG_TEMP0);
3049 : :
3050 : : // Put return type in return value slot
3051 : 1482 : ASM_MOV_REG_IMM(emit->as, REG_TEMP0, MP_VM_RETURN_YIELD);
3052 : 1482 : ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_RET_VAL(emit), REG_TEMP0);
3053 : :
3054 : : // Save re-entry PC
3055 : 1482 : ASM_MOV_REG_PCREL(emit->as, REG_TEMP0, *emit->label_slot);
3056 : 1482 : emit_native_mov_state_reg(emit, LOCAL_IDX_GEN_PC(emit), REG_TEMP0);
3057 : :
3058 : : // Jump to exit handler
3059 : 1482 : ASM_JUMP(emit->as, emit->exit_label);
3060 : :
3061 : : // Label re-entry point
3062 : 1482 : mp_asm_base_label_assign(&emit->as->base, *emit->label_slot);
3063 : :
3064 : : // Re-open any active exception handler
3065 [ + + ]: 1482 : if (emit->exc_stack_size > 0) {
3066 : : // Find innermost active exception handler, to restore as current handler
3067 : 417 : exc_stack_entry_t *e = &emit->exc_stack[emit->exc_stack_size - 1];
3068 [ + + ]: 513 : for (; e >= emit->exc_stack; --e) {
3069 [ + + ]: 453 : if (e->is_active) {
3070 : : // Found active handler, get its PC
3071 : 357 : ASM_MOV_REG_PCREL(emit->as, REG_RET, e->label);
3072 : 357 : ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_HANDLER_PC(emit), REG_RET);
3073 : 357 : break;
3074 : : }
3075 : : }
3076 : : }
3077 : :
3078 : 1482 : emit_native_adjust_stack_size(emit, 1); // send_value
3079 : :
3080 [ + + ]: 1482 : if (kind == MP_EMIT_YIELD_VALUE) {
3081 : : // Check LOCAL_IDX_THROW_VAL for any injected value
3082 : 558 : ASM_MOV_REG_LOCAL(emit->as, REG_ARG_1, LOCAL_IDX_THROW_VAL(emit));
3083 : 558 : ASM_MOV_REG_IMM(emit->as, REG_ARG_2, (mp_uint_t)MP_OBJ_NULL);
3084 : 558 : ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_THROW_VAL(emit), REG_ARG_2);
3085 : 558 : emit_call(emit, MP_F_NATIVE_RAISE);
3086 : : } else {
3087 : : // Label loop entry
3088 : 924 : emit_native_label_assign(emit, *emit->label_slot + 2);
3089 : :
3090 : : // Get the next item from the delegate generator
3091 : 924 : ASM_MOV_REG_LOCAL(emit->as, REG_ARG_3, LOCAL_IDX_THROW_VAL(emit)); // throw_value
3092 : 924 : ASM_MOV_REG_IMM(emit->as, REG_ARG_2, (mp_uint_t)MP_OBJ_NULL);
3093 : 924 : ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_THROW_VAL(emit), REG_ARG_2);
3094 : 924 : vtype_kind_t vtype;
3095 : 924 : emit_pre_pop_reg(emit, &vtype, REG_ARG_2); // send_value
3096 : 924 : emit_access_stack(emit, 1, &vtype, REG_ARG_1); // generator
3097 : 924 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_ARG_3);
3098 : 924 : emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, 1); // ret_value
3099 : 924 : emit_call(emit, MP_F_NATIVE_YIELD_FROM);
3100 : :
3101 : : // If returned non-zero then generator continues
3102 : 924 : ASM_JUMP_IF_REG_NONZERO(emit->as, REG_RET, *emit->label_slot + 1, true);
3103 : :
3104 : : // Pop exhausted gen, replace with ret_value
3105 : 924 : emit_native_adjust_stack_size(emit, 1); // ret_value
3106 : 924 : emit_fold_stack_top(emit, REG_ARG_1);
3107 : : }
3108 : 1482 : }
3109 : :
3110 : 4983 : static void emit_native_start_except_handler(emit_t *emit) {
3111 : : // Protected block has finished so leave the current exception handler
3112 : 4983 : emit_native_leave_exc_stack(emit, true);
3113 : :
3114 : : // Get and push nlr_buf.ret_val
3115 : 4983 : ASM_MOV_REG_LOCAL(emit->as, REG_TEMP0, LOCAL_IDX_EXC_VAL(emit));
3116 : 4983 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_TEMP0);
3117 : 4983 : }
3118 : :
3119 : 4983 : static void emit_native_end_except_handler(emit_t *emit) {
3120 : 4983 : }
3121 : :
3122 : : const emit_method_table_t EXPORT_FUN(method_table) = {
3123 : : #if MICROPY_DYNAMIC_COMPILER
3124 : : EXPORT_FUN(new),
3125 : : EXPORT_FUN(free),
3126 : : #endif
3127 : :
3128 : : emit_native_start_pass,
3129 : : emit_native_end_pass,
3130 : : emit_native_adjust_stack_size,
3131 : : emit_native_set_source_line,
3132 : :
3133 : : {
3134 : : emit_native_load_local,
3135 : : emit_native_load_global,
3136 : : },
3137 : : {
3138 : : emit_native_store_local,
3139 : : emit_native_store_global,
3140 : : },
3141 : : {
3142 : : emit_native_delete_local,
3143 : : emit_native_delete_global,
3144 : : },
3145 : :
3146 : : emit_native_label_assign,
3147 : : emit_native_import,
3148 : : emit_native_load_const_tok,
3149 : : emit_native_load_const_small_int,
3150 : : emit_native_load_const_str,
3151 : : emit_native_load_const_obj,
3152 : : emit_native_load_null,
3153 : : emit_native_load_method,
3154 : : emit_native_load_build_class,
3155 : : emit_native_subscr,
3156 : : emit_native_attr,
3157 : : emit_native_dup_top,
3158 : : emit_native_dup_top_two,
3159 : : emit_native_pop_top,
3160 : : emit_native_rot_two,
3161 : : emit_native_rot_three,
3162 : : emit_native_jump,
3163 : : emit_native_pop_jump_if,
3164 : : emit_native_jump_if_or_pop,
3165 : : emit_native_unwind_jump,
3166 : : emit_native_setup_block,
3167 : : emit_native_with_cleanup,
3168 : : #if MICROPY_PY_ASYNC_AWAIT
3169 : : emit_native_async_with_setup_finally,
3170 : : #endif
3171 : : emit_native_end_finally,
3172 : : emit_native_get_iter,
3173 : : emit_native_for_iter,
3174 : : emit_native_for_iter_end,
3175 : : emit_native_pop_except_jump,
3176 : : emit_native_unary_op,
3177 : : emit_native_binary_op,
3178 : : emit_native_build,
3179 : : emit_native_store_map,
3180 : : emit_native_store_comp,
3181 : : emit_native_unpack_sequence,
3182 : : emit_native_unpack_ex,
3183 : : emit_native_make_function,
3184 : : emit_native_make_closure,
3185 : : emit_native_call_function,
3186 : : emit_native_call_method,
3187 : : emit_native_return_value,
3188 : : emit_native_raise_varargs,
3189 : : emit_native_yield,
3190 : :
3191 : : emit_native_start_except_handler,
3192 : : emit_native_end_except_handler,
3193 : : };
3194 : :
3195 : : #endif
|