Branch data Line data Source code
1 : : /*
2 : : * This file is part of the MicroPython project, http://micropython.org/
3 : : *
4 : : * The MIT License (MIT)
5 : : *
6 : : * Copyright (c) 2013, 2014 Damien P. George
7 : : *
8 : : * Permission is hereby granted, free of charge, to any person obtaining a copy
9 : : * of this software and associated documentation files (the "Software"), to deal
10 : : * in the Software without restriction, including without limitation the rights
11 : : * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
12 : : * copies of the Software, and to permit persons to whom the Software is
13 : : * furnished to do so, subject to the following conditions:
14 : : *
15 : : * The above copyright notice and this permission notice shall be included in
16 : : * all copies or substantial portions of the Software.
17 : : *
18 : : * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
19 : : * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
20 : : * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
21 : : * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
22 : : * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
23 : : * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
24 : : * THE SOFTWARE.
25 : : */
26 : :
27 : : // Essentially normal Python has 1 type: Python objects
28 : : // Viper has more than 1 type, and is just a more complicated (a superset of) Python.
29 : : // If you declare everything in Viper as a Python object (ie omit type decls) then
30 : : // it should in principle be exactly the same as Python native.
31 : : // Having types means having more opcodes, like binary_op_nat_nat, binary_op_nat_obj etc.
32 : : // In practice we won't have a VM but rather do this in asm which is actually very minimal.
33 : :
34 : : // Because it breaks strict Python equivalence it should be a completely separate
35 : : // decorator. It breaks equivalence because overflow on integers wraps around.
36 : : // It shouldn't break equivalence if you don't use the new types, but since the
37 : : // type decls might be used in normal Python for other reasons, it's probably safest,
38 : : // cleanest and clearest to make it a separate decorator.
39 : :
40 : : // Actually, it does break equivalence because integers default to native integers,
41 : : // not Python objects.
42 : :
43 : : // for x in l[0:8]: can be compiled into a native loop if l has pointer type
44 : :
45 : : #include <stdio.h>
46 : : #include <string.h>
47 : : #include <assert.h>
48 : :
49 : : #include "py/emit.h"
50 : : #include "py/nativeglue.h"
51 : : #include "py/objfun.h"
52 : : #include "py/objstr.h"
53 : :
54 : : #if MICROPY_DEBUG_VERBOSE // print debugging info
55 : : #define DEBUG_PRINT (1)
56 : : #define DEBUG_printf DEBUG_printf
57 : : #else // don't print debugging info
58 : : #define DEBUG_printf(...) (void)0
59 : : #endif
60 : :
61 : : // wrapper around everything in this file
62 : : #if N_X64 || N_X86 || N_THUMB || N_ARM || N_XTENSA || N_XTENSAWIN
63 : :
64 : : // C stack layout for native functions:
65 : : // 0: nlr_buf_t [optional]
66 : : // return_value [optional word]
67 : : // exc_handler_unwind [optional word]
68 : : // emit->code_state_start: mp_code_state_native_t
69 : : // emit->stack_start: Python object stack | emit->n_state
70 : : // locals (reversed, L0 at end) |
71 : : //
72 : : // C stack layout for native generator functions:
73 : : // 0=emit->stack_start: nlr_buf_t
74 : : // return_value
75 : : // exc_handler_unwind [optional word]
76 : : //
77 : : // Then REG_GENERATOR_STATE points to:
78 : : // 0=emit->code_state_start: mp_code_state_native_t
79 : : // emit->stack_start: Python object stack | emit->n_state
80 : : // locals (reversed, L0 at end) |
81 : : //
82 : : // C stack layout for viper functions:
83 : : // 0: nlr_buf_t [optional]
84 : : // return_value [optional word]
85 : : // exc_handler_unwind [optional word]
86 : : // emit->code_state_start: fun_obj, old_globals [optional]
87 : : // emit->stack_start: Python object stack | emit->n_state
88 : : // locals (reversed, L0 at end) |
89 : : // (L0-L2 may be in regs instead)
90 : :
91 : : // Native emitter needs to know the following sizes and offsets of C structs (on the target):
92 : : #if MICROPY_DYNAMIC_COMPILER
93 : : #define SIZEOF_NLR_BUF (2 + mp_dynamic_compiler.nlr_buf_num_regs + 1) // the +1 is conservative in case MICROPY_ENABLE_PYSTACK enabled
94 : : #else
95 : : #define SIZEOF_NLR_BUF (sizeof(nlr_buf_t) / sizeof(uintptr_t))
96 : : #endif
97 : : #define SIZEOF_CODE_STATE (sizeof(mp_code_state_native_t) / sizeof(uintptr_t))
98 : : #define OFFSETOF_CODE_STATE_STATE (offsetof(mp_code_state_native_t, state) / sizeof(uintptr_t))
99 : : #define OFFSETOF_CODE_STATE_FUN_BC (offsetof(mp_code_state_native_t, fun_bc) / sizeof(uintptr_t))
100 : : #define OFFSETOF_CODE_STATE_IP (offsetof(mp_code_state_native_t, ip) / sizeof(uintptr_t))
101 : : #define OFFSETOF_CODE_STATE_SP (offsetof(mp_code_state_native_t, sp) / sizeof(uintptr_t))
102 : : #define OFFSETOF_CODE_STATE_N_STATE (offsetof(mp_code_state_native_t, n_state) / sizeof(uintptr_t))
103 : : #define OFFSETOF_OBJ_FUN_BC_CONTEXT (offsetof(mp_obj_fun_bc_t, context) / sizeof(uintptr_t))
104 : : #define OFFSETOF_OBJ_FUN_BC_CHILD_TABLE (offsetof(mp_obj_fun_bc_t, child_table) / sizeof(uintptr_t))
105 : : #define OFFSETOF_OBJ_FUN_BC_BYTECODE (offsetof(mp_obj_fun_bc_t, bytecode) / sizeof(uintptr_t))
106 : : #define OFFSETOF_MODULE_CONTEXT_QSTR_TABLE (offsetof(mp_module_context_t, constants.qstr_table) / sizeof(uintptr_t))
107 : : #define OFFSETOF_MODULE_CONTEXT_OBJ_TABLE (offsetof(mp_module_context_t, constants.obj_table) / sizeof(uintptr_t))
108 : : #define OFFSETOF_MODULE_CONTEXT_GLOBALS (offsetof(mp_module_context_t, module.globals) / sizeof(uintptr_t))
109 : :
110 : : // If not already defined, set parent args to same as child call registers
111 : : #ifndef REG_PARENT_RET
112 : : #define REG_PARENT_RET REG_RET
113 : : #define REG_PARENT_ARG_1 REG_ARG_1
114 : : #define REG_PARENT_ARG_2 REG_ARG_2
115 : : #define REG_PARENT_ARG_3 REG_ARG_3
116 : : #define REG_PARENT_ARG_4 REG_ARG_4
117 : : #endif
118 : :
119 : : // Word index of nlr_buf_t.ret_val
120 : : #define NLR_BUF_IDX_RET_VAL (1)
121 : :
122 : : // Whether the viper function needs access to fun_obj
123 : : #define NEED_FUN_OBJ(emit) ((emit)->scope->exc_stack_size > 0 \
124 : : || ((emit)->scope->scope_flags & (MP_SCOPE_FLAG_REFGLOBALS | MP_SCOPE_FLAG_HASCONSTS)))
125 : :
126 : : // Whether the native/viper function needs to be wrapped in an exception handler
127 : : #define NEED_GLOBAL_EXC_HANDLER(emit) ((emit)->scope->exc_stack_size > 0 \
128 : : || ((emit)->scope->scope_flags & (MP_SCOPE_FLAG_GENERATOR | MP_SCOPE_FLAG_REFGLOBALS)))
129 : :
130 : : // Whether a slot is needed to store LOCAL_IDX_EXC_HANDLER_UNWIND
131 : : #define NEED_EXC_HANDLER_UNWIND(emit) ((emit)->scope->exc_stack_size > 0)
132 : :
133 : : // Whether registers can be used to store locals (only true if there are no
134 : : // exception handlers, because otherwise an nlr_jump will restore registers to
135 : : // their state at the start of the function and updates to locals will be lost)
136 : : #define CAN_USE_REGS_FOR_LOCALS(emit) ((emit)->scope->exc_stack_size == 0 && !(emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR))
137 : :
138 : : // Indices within the local C stack for various variables
139 : : #define LOCAL_IDX_EXC_VAL(emit) (NLR_BUF_IDX_RET_VAL)
140 : : #define LOCAL_IDX_EXC_HANDLER_PC(emit) (NLR_BUF_IDX_LOCAL_1)
141 : : #define LOCAL_IDX_EXC_HANDLER_UNWIND(emit) (SIZEOF_NLR_BUF + 1) // this needs a dedicated variable outside nlr_buf_t
142 : : #define LOCAL_IDX_RET_VAL(emit) (SIZEOF_NLR_BUF) // needed when NEED_GLOBAL_EXC_HANDLER is true
143 : : #define LOCAL_IDX_FUN_OBJ(emit) ((emit)->code_state_start + OFFSETOF_CODE_STATE_FUN_BC)
144 : : #define LOCAL_IDX_OLD_GLOBALS(emit) ((emit)->code_state_start + OFFSETOF_CODE_STATE_IP)
145 : : #define LOCAL_IDX_GEN_PC(emit) ((emit)->code_state_start + OFFSETOF_CODE_STATE_IP)
146 : : #define LOCAL_IDX_LOCAL_VAR(emit, local_num) ((emit)->stack_start + (emit)->n_state - 1 - (local_num))
147 : :
148 : : #if MICROPY_PERSISTENT_CODE_SAVE
149 : :
150 : : // When building with the ability to save native code to .mpy files:
151 : : // - Qstrs are indirect via qstr_table, and REG_LOCAL_3 always points to qstr_table.
152 : : // - In a generator no registers are used to store locals, and REG_LOCAL_2 points to the generator state.
153 : : // - At most 2 registers hold local variables (see CAN_USE_REGS_FOR_LOCALS for when this is possible).
154 : :
155 : : #define REG_GENERATOR_STATE (REG_LOCAL_2)
156 : : #define REG_QSTR_TABLE (REG_LOCAL_3)
157 : : #define MAX_REGS_FOR_LOCAL_VARS (2)
158 : :
159 : : STATIC const uint8_t reg_local_table[MAX_REGS_FOR_LOCAL_VARS] = {REG_LOCAL_1, REG_LOCAL_2};
160 : :
161 : : #else
162 : :
163 : : // When building without the ability to save native code to .mpy files:
164 : : // - Qstrs values are written directly into the machine code.
165 : : // - In a generator no registers are used to store locals, and REG_LOCAL_3 points to the generator state.
166 : : // - At most 3 registers hold local variables (see CAN_USE_REGS_FOR_LOCALS for when this is possible).
167 : :
168 : : #define REG_GENERATOR_STATE (REG_LOCAL_3)
169 : : #define MAX_REGS_FOR_LOCAL_VARS (3)
170 : :
171 : : STATIC const uint8_t reg_local_table[MAX_REGS_FOR_LOCAL_VARS] = {REG_LOCAL_1, REG_LOCAL_2, REG_LOCAL_3};
172 : :
173 : : #endif
174 : :
175 : : #define REG_LOCAL_LAST (reg_local_table[MAX_REGS_FOR_LOCAL_VARS - 1])
176 : :
177 : : #define EMIT_NATIVE_VIPER_TYPE_ERROR(emit, ...) do { \
178 : : *emit->error_slot = mp_obj_new_exception_msg_varg(&mp_type_ViperTypeError, __VA_ARGS__); \
179 : : } while (0)
180 : :
181 : : typedef enum {
182 : : STACK_VALUE,
183 : : STACK_REG,
184 : : STACK_IMM,
185 : : } stack_info_kind_t;
186 : :
187 : : // these enums must be distinct and the bottom 4 bits
188 : : // must correspond to the correct MP_NATIVE_TYPE_xxx value
189 : : typedef enum {
190 : : VTYPE_PYOBJ = 0x00 | MP_NATIVE_TYPE_OBJ,
191 : : VTYPE_BOOL = 0x00 | MP_NATIVE_TYPE_BOOL,
192 : : VTYPE_INT = 0x00 | MP_NATIVE_TYPE_INT,
193 : : VTYPE_UINT = 0x00 | MP_NATIVE_TYPE_UINT,
194 : : VTYPE_PTR = 0x00 | MP_NATIVE_TYPE_PTR,
195 : : VTYPE_PTR8 = 0x00 | MP_NATIVE_TYPE_PTR8,
196 : : VTYPE_PTR16 = 0x00 | MP_NATIVE_TYPE_PTR16,
197 : : VTYPE_PTR32 = 0x00 | MP_NATIVE_TYPE_PTR32,
198 : :
199 : : VTYPE_PTR_NONE = 0x50 | MP_NATIVE_TYPE_PTR,
200 : :
201 : : VTYPE_UNBOUND = 0x60 | MP_NATIVE_TYPE_OBJ,
202 : : VTYPE_BUILTIN_CAST = 0x70 | MP_NATIVE_TYPE_OBJ,
203 : : } vtype_kind_t;
204 : :
205 : 68 : STATIC qstr vtype_to_qstr(vtype_kind_t vtype) {
206 [ - + - + : 68 : switch (vtype) {
- - + +
+ ]
207 : : case VTYPE_PYOBJ:
208 : : return MP_QSTR_object;
209 : 0 : case VTYPE_BOOL:
210 : 0 : return MP_QSTR_bool;
211 : 32 : case VTYPE_INT:
212 : 32 : return MP_QSTR_int;
213 : 0 : case VTYPE_UINT:
214 : 0 : return MP_QSTR_uint;
215 : 4 : case VTYPE_PTR:
216 : 4 : return MP_QSTR_ptr;
217 : 0 : case VTYPE_PTR8:
218 : 0 : return MP_QSTR_ptr8;
219 : 0 : case VTYPE_PTR16:
220 : 0 : return MP_QSTR_ptr16;
221 : 4 : case VTYPE_PTR32:
222 : 4 : return MP_QSTR_ptr32;
223 : 8 : case VTYPE_PTR_NONE:
224 : : default:
225 : 8 : return MP_QSTR_None;
226 : : }
227 : : }
228 : :
229 : : typedef struct _stack_info_t {
230 : : vtype_kind_t vtype;
231 : : stack_info_kind_t kind;
232 : : union {
233 : : int u_reg;
234 : : mp_int_t u_imm;
235 : : } data;
236 : : } stack_info_t;
237 : :
238 : : #define UNWIND_LABEL_UNUSED (0x7fff)
239 : : #define UNWIND_LABEL_DO_FINAL_UNWIND (0x7ffe)
240 : :
241 : : typedef struct _exc_stack_entry_t {
242 : : uint16_t label : 15;
243 : : uint16_t is_finally : 1;
244 : : uint16_t unwind_label : 15;
245 : : uint16_t is_active : 1;
246 : : } exc_stack_entry_t;
247 : :
248 : : struct _emit_t {
249 : : mp_emit_common_t *emit_common;
250 : : mp_obj_t *error_slot;
251 : : uint *label_slot;
252 : : uint exit_label;
253 : : int pass;
254 : :
255 : : bool do_viper_types;
256 : : bool prelude_offset_uses_u16_encoding;
257 : :
258 : : mp_uint_t local_vtype_alloc;
259 : : vtype_kind_t *local_vtype;
260 : :
261 : : mp_uint_t stack_info_alloc;
262 : : stack_info_t *stack_info;
263 : : vtype_kind_t saved_stack_vtype;
264 : :
265 : : size_t exc_stack_alloc;
266 : : size_t exc_stack_size;
267 : : exc_stack_entry_t *exc_stack;
268 : :
269 : : int prelude_offset;
270 : : int prelude_ptr_index;
271 : : int start_offset;
272 : : int n_state;
273 : : uint16_t code_state_start;
274 : : uint16_t stack_start;
275 : : int stack_size;
276 : : uint16_t n_info;
277 : : uint16_t n_cell;
278 : :
279 : : scope_t *scope;
280 : :
281 : : ASM_T *as;
282 : : };
283 : :
284 : : STATIC void emit_load_reg_with_object(emit_t *emit, int reg, mp_obj_t obj);
285 : : STATIC void emit_native_global_exc_entry(emit_t *emit);
286 : : STATIC void emit_native_global_exc_exit(emit_t *emit);
287 : : STATIC void emit_native_load_const_obj(emit_t *emit, mp_obj_t obj);
288 : :
289 : 1403 : emit_t *EXPORT_FUN(new)(mp_emit_common_t * emit_common, mp_obj_t *error_slot, uint *label_slot, mp_uint_t max_num_labels) {
290 : 1403 : emit_t *emit = m_new0(emit_t, 1);
291 : 1403 : emit->emit_common = emit_common;
292 : 1403 : emit->error_slot = error_slot;
293 : 1403 : emit->label_slot = label_slot;
294 : 1403 : emit->stack_info_alloc = 8;
295 : 1403 : emit->stack_info = m_new(stack_info_t, emit->stack_info_alloc);
296 : 1403 : emit->exc_stack_alloc = 8;
297 : 1403 : emit->exc_stack = m_new(exc_stack_entry_t, emit->exc_stack_alloc);
298 : 1403 : emit->as = m_new0(ASM_T, 1);
299 : 1403 : mp_asm_base_init(&emit->as->base, max_num_labels);
300 : 1403 : return emit;
301 : : }
302 : :
303 : 1387 : void EXPORT_FUN(free)(emit_t * emit) {
304 : 1387 : mp_asm_base_deinit(&emit->as->base, false);
305 : 1387 : m_del_obj(ASM_T, emit->as);
306 : 1387 : m_del(exc_stack_entry_t, emit->exc_stack, emit->exc_stack_alloc);
307 : 1387 : m_del(vtype_kind_t, emit->local_vtype, emit->local_vtype_alloc);
308 : 1387 : m_del(stack_info_t, emit->stack_info, emit->stack_info_alloc);
309 : 1387 : m_del_obj(emit_t, emit);
310 : 1387 : }
311 : :
312 : : STATIC void emit_call_with_imm_arg(emit_t *emit, mp_fun_kind_t fun_kind, mp_int_t arg_val, int arg_reg);
313 : :
314 : 21619 : STATIC void emit_native_mov_reg_const(emit_t *emit, int reg_dest, int const_val) {
315 : 21619 : ASM_LOAD_REG_REG_OFFSET(emit->as, reg_dest, REG_FUN_TABLE, const_val);
316 : 21619 : }
317 : :
318 : 188684 : STATIC void emit_native_mov_state_reg(emit_t *emit, int local_num, int reg_src) {
319 [ + + ]: 188684 : if (emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR) {
320 : 11313 : ASM_STORE_REG_REG_OFFSET(emit->as, reg_src, REG_GENERATOR_STATE, local_num);
321 : : } else {
322 : 177371 : ASM_MOV_LOCAL_REG(emit->as, local_num, reg_src);
323 : : }
324 : 188684 : }
325 : :
326 : 121735 : STATIC void emit_native_mov_reg_state(emit_t *emit, int reg_dest, int local_num) {
327 [ + + ]: 121735 : if (emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR) {
328 : 6614 : ASM_LOAD_REG_REG_OFFSET(emit->as, reg_dest, REG_GENERATOR_STATE, local_num);
329 : : } else {
330 : 115121 : ASM_MOV_REG_LOCAL(emit->as, reg_dest, local_num);
331 : : }
332 : 121735 : }
333 : :
334 : 76660 : STATIC void emit_native_mov_reg_state_addr(emit_t *emit, int reg_dest, int local_num) {
335 [ + + ]: 76660 : if (emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR) {
336 : 5541 : ASM_MOV_REG_IMM(emit->as, reg_dest, local_num * ASM_WORD_SIZE);
337 : 5541 : ASM_ADD_REG_REG(emit->as, reg_dest, REG_GENERATOR_STATE);
338 : : } else {
339 : 71119 : ASM_MOV_REG_LOCAL_ADDR(emit->as, reg_dest, local_num);
340 : : }
341 : 76660 : }
342 : :
343 : 122468 : STATIC void emit_native_mov_reg_qstr(emit_t *emit, int arg_reg, qstr qst) {
344 : : #if MICROPY_PERSISTENT_CODE_SAVE
345 : : ASM_LOAD16_REG_REG_OFFSET(emit->as, arg_reg, REG_QSTR_TABLE, mp_emit_common_use_qstr(emit->emit_common, qst));
346 : : #else
347 : 122468 : ASM_MOV_REG_IMM(emit->as, arg_reg, qst);
348 : : #endif
349 : 122468 : }
350 : :
351 : 22251 : STATIC void emit_native_mov_reg_qstr_obj(emit_t *emit, int reg_dest, qstr qst) {
352 : : #if MICROPY_PERSISTENT_CODE_SAVE
353 : : emit_load_reg_with_object(emit, reg_dest, MP_OBJ_NEW_QSTR(qst));
354 : : #else
355 : 22251 : ASM_MOV_REG_IMM(emit->as, reg_dest, (mp_uint_t)MP_OBJ_NEW_QSTR(qst));
356 : : #endif
357 : 22251 : }
358 : :
359 : : #define emit_native_mov_state_imm_via(emit, local_num, imm, reg_temp) \
360 : : do { \
361 : : ASM_MOV_REG_IMM((emit)->as, (reg_temp), (imm)); \
362 : : emit_native_mov_state_reg((emit), (local_num), (reg_temp)); \
363 : : } while (false)
364 : :
365 : 12021 : STATIC void emit_native_start_pass(emit_t *emit, pass_kind_t pass, scope_t *scope) {
366 : 12021 : DEBUG_printf("start_pass(pass=%u, scope=%p)\n", pass, scope);
367 : :
368 : 12021 : emit->pass = pass;
369 : 12021 : emit->do_viper_types = scope->emit_options == MP_EMIT_OPT_VIPER;
370 : 12021 : emit->stack_size = 0;
371 : 12021 : emit->scope = scope;
372 : :
373 : : // allocate memory for keeping track of the types of locals
374 [ + + ]: 12021 : if (emit->local_vtype_alloc < scope->num_locals) {
375 : 705 : emit->local_vtype = m_renew(vtype_kind_t, emit->local_vtype, emit->local_vtype_alloc, scope->num_locals);
376 : 705 : emit->local_vtype_alloc = scope->num_locals;
377 : : }
378 : :
379 : : // set default type for arguments
380 : 12021 : mp_uint_t num_args = emit->scope->num_pos_args + emit->scope->num_kwonly_args;
381 [ + + ]: 12021 : if (scope->scope_flags & MP_SCOPE_FLAG_VARARGS) {
382 : 135 : num_args += 1;
383 : : }
384 [ + + ]: 12021 : if (scope->scope_flags & MP_SCOPE_FLAG_VARKEYWORDS) {
385 : 60 : num_args += 1;
386 : : }
387 [ + + ]: 20788 : for (mp_uint_t i = 0; i < num_args; i++) {
388 : 8767 : emit->local_vtype[i] = VTYPE_PYOBJ;
389 : : }
390 : :
391 : : // Set viper type for arguments
392 [ + + ]: 12021 : if (emit->do_viper_types) {
393 [ + + ]: 2428 : for (int i = 0; i < emit->scope->id_info_len; ++i) {
394 : 1720 : id_info_t *id = &emit->scope->id_info[i];
395 [ + + ]: 1720 : if (id->flags & ID_FLAG_IS_PARAM) {
396 [ - + ]: 982 : assert(id->local_num < emit->local_vtype_alloc);
397 : 982 : emit->local_vtype[id->local_num] = id->flags >> ID_FLAG_VIPER_TYPE_POS;
398 : : }
399 : : }
400 : : }
401 : :
402 : : // local variables begin unbound, and have unknown type
403 [ + + ]: 32362 : for (mp_uint_t i = num_args; i < emit->local_vtype_alloc; i++) {
404 [ + + ]: 40114 : emit->local_vtype[i] = emit->do_viper_types ? VTYPE_UNBOUND : VTYPE_PYOBJ;
405 : : }
406 : :
407 : : // values on stack begin unbound
408 [ + + ]: 118685 : for (mp_uint_t i = 0; i < emit->stack_info_alloc; i++) {
409 : 106664 : emit->stack_info[i].kind = STACK_VALUE;
410 : 106664 : emit->stack_info[i].vtype = VTYPE_UNBOUND;
411 : : }
412 : :
413 [ + + ]: 20067 : mp_asm_base_start_pass(&emit->as->base, pass == MP_PASS_EMIT ? MP_ASM_PASS_EMIT : MP_ASM_PASS_COMPUTE);
414 : :
415 : : // generate code for entry to function
416 : :
417 : : // Work out start of code state (mp_code_state_native_t or reduced version for viper)
418 : 12021 : emit->code_state_start = 0;
419 [ + + + + ]: 12021 : if (NEED_GLOBAL_EXC_HANDLER(emit)) {
420 : 8340 : emit->code_state_start = SIZEOF_NLR_BUF; // for nlr_buf_t
421 : 8340 : emit->code_state_start += 1; // for return_value
422 [ + + ]: 8340 : if (NEED_EXC_HANDLER_UNWIND(emit)) {
423 : 2208 : emit->code_state_start += 1;
424 : : }
425 : : }
426 : :
427 : 12021 : size_t fun_table_off = mp_emit_common_use_const_obj(emit->emit_common, MP_OBJ_FROM_PTR(&mp_fun_table));
428 : :
429 [ + + ]: 12021 : if (emit->do_viper_types) {
430 : : // Work out size of state (locals plus stack)
431 : : // n_state counts all stack and locals, even those in registers
432 : 708 : emit->n_state = scope->num_locals + scope->stack_size;
433 : 708 : int num_locals_in_regs = 0;
434 [ + + + + ]: 708 : if (CAN_USE_REGS_FOR_LOCALS(emit)) {
435 : 670 : num_locals_in_regs = scope->num_locals;
436 [ + + ]: 670 : if (num_locals_in_regs > MAX_REGS_FOR_LOCAL_VARS) {
437 : 120 : num_locals_in_regs = MAX_REGS_FOR_LOCAL_VARS;
438 : : }
439 : : // Need a spot for REG_LOCAL_LAST (see below)
440 [ + + ]: 670 : if (scope->num_pos_args >= MAX_REGS_FOR_LOCAL_VARS + 1) {
441 : 24 : --num_locals_in_regs;
442 : : }
443 : : }
444 : :
445 : : // Work out where the locals and Python stack start within the C stack
446 [ + + + + ]: 708 : if (NEED_GLOBAL_EXC_HANDLER(emit)) {
447 : : // Reserve 2 words for function object and old globals
448 : 288 : emit->stack_start = emit->code_state_start + 2;
449 [ + + ]: 420 : } else if (scope->scope_flags & MP_SCOPE_FLAG_HASCONSTS) {
450 : : // Reserve 1 word for function object, to access const table
451 : 12 : emit->stack_start = emit->code_state_start + 1;
452 : : } else {
453 : 408 : emit->stack_start = emit->code_state_start + 0;
454 : : }
455 : :
456 : : // Entry to function
457 : 708 : ASM_ENTRY(emit->as, emit->stack_start + emit->n_state - num_locals_in_regs);
458 : :
459 : : #if N_X86
460 : : asm_x86_mov_arg_to_r32(emit->as, 0, REG_PARENT_ARG_1);
461 : : #endif
462 : :
463 : : // Load REG_FUN_TABLE with a pointer to mp_fun_table, found in the const_table
464 : 708 : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_FUN_TABLE, REG_PARENT_ARG_1, OFFSETOF_OBJ_FUN_BC_CONTEXT);
465 : : #if MICROPY_PERSISTENT_CODE_SAVE
466 : : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_QSTR_TABLE, REG_FUN_TABLE, OFFSETOF_MODULE_CONTEXT_QSTR_TABLE);
467 : : #endif
468 : 708 : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_FUN_TABLE, REG_FUN_TABLE, OFFSETOF_MODULE_CONTEXT_OBJ_TABLE);
469 : 708 : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_FUN_TABLE, REG_FUN_TABLE, fun_table_off);
470 : :
471 : : // Store function object (passed as first arg) to stack if needed
472 [ + + + + ]: 708 : if (NEED_FUN_OBJ(emit)) {
473 : 296 : ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_FUN_OBJ(emit), REG_PARENT_ARG_1);
474 : : }
475 : :
476 : : // Put n_args in REG_ARG_1, n_kw in REG_ARG_2, args array in REG_LOCAL_LAST
477 : : #if N_X86
478 : : asm_x86_mov_arg_to_r32(emit->as, 1, REG_ARG_1);
479 : : asm_x86_mov_arg_to_r32(emit->as, 2, REG_ARG_2);
480 : : asm_x86_mov_arg_to_r32(emit->as, 3, REG_LOCAL_LAST);
481 : : #else
482 : 708 : ASM_MOV_REG_REG(emit->as, REG_ARG_1, REG_PARENT_ARG_2);
483 : 708 : ASM_MOV_REG_REG(emit->as, REG_ARG_2, REG_PARENT_ARG_3);
484 : 708 : ASM_MOV_REG_REG(emit->as, REG_LOCAL_LAST, REG_PARENT_ARG_4);
485 : : #endif
486 : :
487 : : // Check number of args matches this function, and call mp_arg_check_num_sig if not
488 : 708 : ASM_JUMP_IF_REG_NONZERO(emit->as, REG_ARG_2, *emit->label_slot + 4, true);
489 : 708 : ASM_MOV_REG_IMM(emit->as, REG_ARG_3, scope->num_pos_args);
490 : 708 : ASM_JUMP_IF_REG_EQ(emit->as, REG_ARG_1, REG_ARG_3, *emit->label_slot + 5);
491 : 708 : mp_asm_base_label_assign(&emit->as->base, *emit->label_slot + 4);
492 : 708 : ASM_MOV_REG_IMM(emit->as, REG_ARG_3, MP_OBJ_FUN_MAKE_SIG(scope->num_pos_args, scope->num_pos_args, false));
493 : 708 : ASM_CALL_IND(emit->as, MP_F_ARG_CHECK_NUM_SIG);
494 : 708 : mp_asm_base_label_assign(&emit->as->base, *emit->label_slot + 5);
495 : :
496 : : // Store arguments into locals (reg or stack), converting to native if needed
497 [ + + ]: 1678 : for (int i = 0; i < emit->scope->num_pos_args; i++) {
498 : 970 : int r = REG_ARG_1;
499 : 970 : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_ARG_1, REG_LOCAL_LAST, i);
500 [ + + ]: 970 : if (emit->local_vtype[i] != VTYPE_PYOBJ) {
501 : 862 : emit_call_with_imm_arg(emit, MP_F_CONVERT_OBJ_TO_NATIVE, emit->local_vtype[i], REG_ARG_2);
502 : 862 : r = REG_RET;
503 : : }
504 : : // REG_LOCAL_LAST points to the args array so be sure not to overwrite it if it's still needed
505 [ + + + - : 970 : if (i < MAX_REGS_FOR_LOCAL_VARS && CAN_USE_REGS_FOR_LOCALS(emit) && (i != MAX_REGS_FOR_LOCAL_VARS - 1 || emit->scope->num_pos_args == MAX_REGS_FOR_LOCAL_VARS)) {
+ - + + +
+ ]
506 : 904 : ASM_MOV_REG_REG(emit->as, reg_local_table[i], r);
507 : : } else {
508 : 66 : emit_native_mov_state_reg(emit, LOCAL_IDX_LOCAL_VAR(emit, i), r);
509 : : }
510 : : }
511 : : // Get local from the stack back into REG_LOCAL_LAST if this reg couldn't be written to above
512 [ + + + - : 708 : if (emit->scope->num_pos_args >= MAX_REGS_FOR_LOCAL_VARS + 1 && CAN_USE_REGS_FOR_LOCALS(emit)) {
+ - ]
513 : 24 : ASM_MOV_REG_LOCAL(emit->as, REG_LOCAL_LAST, LOCAL_IDX_LOCAL_VAR(emit, MAX_REGS_FOR_LOCAL_VARS - 1));
514 : : }
515 : :
516 : 708 : emit_native_global_exc_entry(emit);
517 : :
518 : : } else {
519 : : // work out size of state (locals plus stack)
520 : 11313 : emit->n_state = scope->num_locals + scope->stack_size;
521 : :
522 [ + + ]: 11313 : if (emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR) {
523 : 708 : mp_asm_base_data(&emit->as->base, ASM_WORD_SIZE, (uintptr_t)emit->prelude_ptr_index);
524 : 708 : mp_asm_base_data(&emit->as->base, ASM_WORD_SIZE, (uintptr_t)emit->start_offset);
525 : 708 : ASM_ENTRY(emit->as, emit->code_state_start);
526 : :
527 : : // Reset the state size for the state pointed to by REG_GENERATOR_STATE
528 : 708 : emit->code_state_start = 0;
529 : 708 : emit->stack_start = SIZEOF_CODE_STATE;
530 : :
531 : : // Put address of code_state into REG_GENERATOR_STATE
532 : : #if N_X86
533 : : asm_x86_mov_arg_to_r32(emit->as, 0, REG_GENERATOR_STATE);
534 : : #else
535 : 708 : ASM_MOV_REG_REG(emit->as, REG_GENERATOR_STATE, REG_PARENT_ARG_1);
536 : : #endif
537 : :
538 : : // Put throw value into LOCAL_IDX_EXC_VAL slot, for yield/yield-from
539 : : #if N_X86
540 : : asm_x86_mov_arg_to_r32(emit->as, 1, REG_PARENT_ARG_2);
541 : : #endif
542 : 708 : ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_VAL(emit), REG_PARENT_ARG_2);
543 : :
544 : : // Load REG_FUN_TABLE with a pointer to mp_fun_table, found in the const_table
545 : 708 : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_TEMP0, REG_GENERATOR_STATE, LOCAL_IDX_FUN_OBJ(emit));
546 : 708 : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_TEMP0, REG_TEMP0, OFFSETOF_OBJ_FUN_BC_CONTEXT);
547 : : #if MICROPY_PERSISTENT_CODE_SAVE
548 : : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_QSTR_TABLE, REG_TEMP0, OFFSETOF_MODULE_CONTEXT_QSTR_TABLE);
549 : : #endif
550 : 708 : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_TEMP0, REG_TEMP0, OFFSETOF_MODULE_CONTEXT_OBJ_TABLE);
551 : 708 : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_FUN_TABLE, REG_TEMP0, fun_table_off);
552 : : } else {
553 : : // The locals and stack start after the code_state structure
554 : 10605 : emit->stack_start = emit->code_state_start + SIZEOF_CODE_STATE;
555 : :
556 : : // Allocate space on C-stack for code_state structure, which includes state
557 : 10605 : ASM_ENTRY(emit->as, emit->stack_start + emit->n_state);
558 : :
559 : : // Prepare incoming arguments for call to mp_setup_code_state
560 : :
561 : : #if N_X86
562 : : asm_x86_mov_arg_to_r32(emit->as, 0, REG_PARENT_ARG_1);
563 : : asm_x86_mov_arg_to_r32(emit->as, 1, REG_PARENT_ARG_2);
564 : : asm_x86_mov_arg_to_r32(emit->as, 2, REG_PARENT_ARG_3);
565 : : asm_x86_mov_arg_to_r32(emit->as, 3, REG_PARENT_ARG_4);
566 : : #endif
567 : :
568 : : // Load REG_FUN_TABLE with a pointer to mp_fun_table, found in the const_table
569 : 10605 : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_FUN_TABLE, REG_PARENT_ARG_1, OFFSETOF_OBJ_FUN_BC_CONTEXT);
570 : : #if MICROPY_PERSISTENT_CODE_SAVE
571 : : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_QSTR_TABLE, REG_FUN_TABLE, OFFSETOF_MODULE_CONTEXT_QSTR_TABLE);
572 : : #endif
573 : 10605 : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_FUN_TABLE, REG_FUN_TABLE, OFFSETOF_MODULE_CONTEXT_OBJ_TABLE);
574 : 10605 : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_FUN_TABLE, REG_FUN_TABLE, fun_table_off);
575 : :
576 : : // Set code_state.fun_bc
577 : 10605 : ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_FUN_OBJ(emit), REG_PARENT_ARG_1);
578 : :
579 : : // Set code_state.ip, a pointer to the beginning of the prelude. This pointer is found
580 : : // either directly in mp_obj_fun_bc_t.child_table (if there are no children), or in
581 : : // mp_obj_fun_bc_t.child_table[num_children] (if num_children > 0).
582 : 10605 : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_PARENT_ARG_1, REG_PARENT_ARG_1, OFFSETOF_OBJ_FUN_BC_CHILD_TABLE);
583 [ + + ]: 10605 : if (emit->prelude_ptr_index != 0) {
584 : 2593 : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_PARENT_ARG_1, REG_PARENT_ARG_1, emit->prelude_ptr_index);
585 : : }
586 : 10605 : emit_native_mov_state_reg(emit, emit->code_state_start + OFFSETOF_CODE_STATE_IP, REG_PARENT_ARG_1);
587 : :
588 : : // Set code_state.n_state (only works on little endian targets due to n_state being uint16_t)
589 : 10605 : emit_native_mov_state_imm_via(emit, emit->code_state_start + OFFSETOF_CODE_STATE_N_STATE, emit->n_state, REG_ARG_1);
590 : :
591 : : // Put address of code_state into first arg
592 : 10605 : ASM_MOV_REG_LOCAL_ADDR(emit->as, REG_ARG_1, emit->code_state_start);
593 : :
594 : : // Copy next 3 args if needed
595 : : #if REG_ARG_2 != REG_PARENT_ARG_2
596 : : ASM_MOV_REG_REG(emit->as, REG_ARG_2, REG_PARENT_ARG_2);
597 : : #endif
598 : : #if REG_ARG_3 != REG_PARENT_ARG_3
599 : : ASM_MOV_REG_REG(emit->as, REG_ARG_3, REG_PARENT_ARG_3);
600 : : #endif
601 : : #if REG_ARG_4 != REG_PARENT_ARG_4
602 : : ASM_MOV_REG_REG(emit->as, REG_ARG_4, REG_PARENT_ARG_4);
603 : : #endif
604 : :
605 : : // Call mp_setup_code_state to prepare code_state structure
606 : : #if N_THUMB
607 : : asm_thumb_bl_ind(emit->as, MP_F_SETUP_CODE_STATE, ASM_THUMB_REG_R4);
608 : : #elif N_ARM
609 : : asm_arm_bl_ind(emit->as, MP_F_SETUP_CODE_STATE, ASM_ARM_REG_R4);
610 : : #else
611 : 10605 : ASM_CALL_IND(emit->as, MP_F_SETUP_CODE_STATE);
612 : : #endif
613 : : }
614 : :
615 : 11313 : emit_native_global_exc_entry(emit);
616 : :
617 : : // cache some locals in registers, but only if no exception handlers
618 [ + + + + ]: 11313 : if (CAN_USE_REGS_FOR_LOCALS(emit)) {
619 [ + + + + ]: 16257 : for (int i = 0; i < MAX_REGS_FOR_LOCAL_VARS && i < scope->num_locals; ++i) {
620 : 7671 : ASM_MOV_REG_LOCAL(emit->as, reg_local_table[i], LOCAL_IDX_LOCAL_VAR(emit, i));
621 : : }
622 : : }
623 : :
624 : : // set the type of closed over variables
625 [ + + ]: 61029 : for (mp_uint_t i = 0; i < scope->id_info_len; i++) {
626 : 49716 : id_info_t *id = &scope->id_info[i];
627 [ + + ]: 49716 : if (id->kind == ID_INFO_KIND_CELL) {
628 : 231 : emit->local_vtype[id->local_num] = VTYPE_PYOBJ;
629 : : }
630 : : }
631 : : }
632 : 12021 : }
633 : :
634 : 87516 : static inline void emit_native_write_code_info_byte(emit_t *emit, byte val) {
635 : 87516 : mp_asm_base_data(&emit->as->base, 1, val);
636 : : }
637 : :
638 : 18915 : static inline void emit_native_write_code_info_qstr(emit_t *emit, qstr qst) {
639 : 18915 : mp_encode_uint(&emit->as->base, mp_asm_base_get_cur_to_write_bytes, mp_emit_common_use_qstr(emit->emit_common, qst));
640 : 18915 : }
641 : :
642 : 12005 : STATIC bool emit_native_end_pass(emit_t *emit) {
643 : 12005 : emit_native_global_exc_exit(emit);
644 : :
645 [ + + ]: 12005 : if (!emit->do_viper_types) {
646 : 11313 : emit->prelude_offset = mp_asm_base_get_code_pos(&emit->as->base);
647 : 11313 : emit->prelude_ptr_index = emit->emit_common->ct_cur_child;
648 : :
649 : 11313 : size_t n_state = emit->n_state;
650 : 11313 : size_t n_exc_stack = 0; // exc-stack not needed for native code
651 [ + + ]: 76164 : MP_BC_PRELUDE_SIG_ENCODE(n_state, n_exc_stack, emit->scope, emit_native_write_code_info_byte, emit);
652 : :
653 : 11313 : size_t n_info = emit->n_info;
654 : 11313 : size_t n_cell = emit->n_cell;
655 [ + + + + ]: 11352 : MP_BC_PRELUDE_SIZE_ENCODE(n_info, n_cell, emit_native_write_code_info_byte, emit);
656 : :
657 : : // bytecode prelude: source info (function and argument qstrs)
658 : 11313 : size_t info_start = mp_asm_base_get_code_pos(&emit->as->base);
659 : 11313 : emit_native_write_code_info_qstr(emit, emit->scope->simple_name);
660 [ + + ]: 18915 : for (int i = 0; i < emit->scope->num_pos_args + emit->scope->num_kwonly_args; i++) {
661 : 13395 : qstr qst = MP_QSTR__star_;
662 [ + + ]: 13395 : for (int j = 0; j < emit->scope->id_info_len; ++j) {
663 : 12870 : id_info_t *id = &emit->scope->id_info[j];
664 [ + + + + ]: 12870 : if ((id->flags & ID_FLAG_IS_PARAM) && id->local_num == i) {
665 : 7077 : qst = id->qst;
666 : 7077 : break;
667 : : }
668 : : }
669 : 7602 : emit_native_write_code_info_qstr(emit, qst);
670 : : }
671 : 11313 : emit->n_info = mp_asm_base_get_code_pos(&emit->as->base) - info_start;
672 : :
673 : : // bytecode prelude: initialise closed over variables
674 : 11313 : size_t cell_start = mp_asm_base_get_code_pos(&emit->as->base);
675 [ + + ]: 61029 : for (int i = 0; i < emit->scope->id_info_len; i++) {
676 : 49716 : id_info_t *id = &emit->scope->id_info[i];
677 [ + + ]: 49716 : if (id->kind == ID_INFO_KIND_CELL) {
678 [ - + ]: 231 : assert(id->local_num <= 255);
679 : 231 : mp_asm_base_data(&emit->as->base, 1, id->local_num); // write the local which should be converted to a cell
680 : : }
681 : : }
682 : 11313 : emit->n_cell = mp_asm_base_get_code_pos(&emit->as->base) - cell_start;
683 : :
684 : : }
685 : :
686 : 12005 : ASM_END_PASS(emit->as);
687 : :
688 : : // check stack is back to zero size
689 [ - + ]: 12005 : assert(emit->stack_size == 0);
690 [ - + ]: 12005 : assert(emit->exc_stack_size == 0);
691 : :
692 [ + + ]: 12005 : if (emit->pass == MP_PASS_EMIT) {
693 [ + + ]: 3975 : void *f = mp_asm_base_get_code(&emit->as->base);
694 [ + + ]: 3975 : mp_uint_t f_len = mp_asm_base_get_code_size(&emit->as->base);
695 : :
696 : 3975 : mp_raw_code_t **children = emit->emit_common->children;
697 [ + + ]: 3975 : if (!emit->do_viper_types) {
698 : : #if MICROPY_EMIT_NATIVE_PRELUDE_SEPARATE_FROM_MACHINE_CODE
699 : : // Executable code cannot be accessed byte-wise on this architecture, so copy
700 : : // the prelude to a separate memory region that is byte-wise readable.
701 : : void *buf = emit->as->base.code_base + emit->prelude_offset;
702 : : size_t n = emit->as->base.code_offset - emit->prelude_offset;
703 : : const uint8_t *prelude_ptr = memcpy(m_new(uint8_t, n), buf, n);
704 : : #else
705 : : // Point to the prelude directly, at the end of the machine code data.
706 : 3771 : const uint8_t *prelude_ptr = (const uint8_t *)f + emit->prelude_offset;
707 : : #endif
708 : :
709 : : // Store the pointer to the prelude using the child_table.
710 [ - + ]: 3771 : assert(emit->prelude_ptr_index == emit->emit_common->ct_cur_child);
711 [ + + ]: 3771 : if (emit->prelude_ptr_index == 0) {
712 : : children = (void *)prelude_ptr;
713 : : } else {
714 : 920 : children = m_renew(mp_raw_code_t *, children, emit->prelude_ptr_index, emit->prelude_ptr_index + 1);
715 : 920 : children[emit->prelude_ptr_index] = (void *)prelude_ptr;
716 : : }
717 : : }
718 : :
719 : 3975 : mp_emit_glue_assign_native(emit->scope->raw_code,
720 : 3975 : emit->do_viper_types ? MP_CODE_NATIVE_VIPER : MP_CODE_NATIVE_PY,
721 : : f, f_len,
722 : : children,
723 : : #if MICROPY_PERSISTENT_CODE_SAVE
724 : : emit->emit_common->ct_cur_child,
725 : : emit->prelude_offset,
726 : : #endif
727 [ + + ]: 3975 : emit->scope->scope_flags, 0, 0);
728 : : }
729 : :
730 : 12005 : return true;
731 : : }
732 : :
733 : 335648 : STATIC void ensure_extra_stack(emit_t *emit, size_t delta) {
734 [ + + ]: 335648 : if (emit->stack_size + delta > emit->stack_info_alloc) {
735 : 338 : size_t new_alloc = (emit->stack_size + delta + 8) & ~3;
736 : 338 : emit->stack_info = m_renew(stack_info_t, emit->stack_info, emit->stack_info_alloc, new_alloc);
737 : 338 : emit->stack_info_alloc = new_alloc;
738 : : }
739 : 335648 : }
740 : :
741 : 633845 : STATIC void adjust_stack(emit_t *emit, mp_int_t stack_size_delta) {
742 [ - + ]: 633845 : assert((mp_int_t)emit->stack_size + stack_size_delta >= 0);
743 [ - + ]: 633845 : assert((mp_int_t)emit->stack_size + stack_size_delta <= (mp_int_t)emit->stack_info_alloc);
744 : 633845 : emit->stack_size += stack_size_delta;
745 [ + - + + ]: 633845 : if (emit->pass > MP_PASS_SCOPE && emit->stack_size > emit->scope->stack_size) {
746 : 14738 : emit->scope->stack_size = emit->stack_size;
747 : : }
748 : : #ifdef DEBUG_PRINT
749 : : DEBUG_printf(" adjust_stack; stack_size=%d+%d; stack now:", emit->stack_size - stack_size_delta, stack_size_delta);
750 : : for (int i = 0; i < emit->stack_size; i++) {
751 : : stack_info_t *si = &emit->stack_info[i];
752 : : DEBUG_printf(" (v=%d k=%d %d)", si->vtype, si->kind, si->data.u_reg);
753 : : }
754 : : DEBUG_printf("\n");
755 : : #endif
756 : 633845 : }
757 : :
758 : 6987 : STATIC void emit_native_adjust_stack_size(emit_t *emit, mp_int_t delta) {
759 : 6987 : DEBUG_printf("adjust_stack_size(" INT_FMT ")\n", delta);
760 [ + + ]: 6987 : if (delta > 0) {
761 : 6528 : ensure_extra_stack(emit, delta);
762 : : }
763 : : // If we are adjusting the stack in a positive direction (pushing) then we
764 : : // need to fill in values for the stack kind and vtype of the newly-pushed
765 : : // entries. These should be set to "value" (ie not reg or imm) because we
766 : : // should only need to adjust the stack due to a jump to this part in the
767 : : // code (and hence we have settled the stack before the jump).
768 [ + + ]: 14391 : for (mp_int_t i = 0; i < delta; i++) {
769 : 7404 : stack_info_t *si = &emit->stack_info[emit->stack_size + i];
770 : 7404 : si->kind = STACK_VALUE;
771 : : // TODO we don't know the vtype to use here. At the moment this is a
772 : : // hack to get the case of multi comparison working.
773 [ + + ]: 7404 : if (delta == 1) {
774 : 5991 : si->vtype = emit->saved_stack_vtype;
775 : : } else {
776 : 1413 : si->vtype = VTYPE_PYOBJ;
777 : : }
778 : : }
779 : 6987 : adjust_stack(emit, delta);
780 : 6987 : }
781 : :
782 : 225928 : STATIC void emit_native_set_source_line(emit_t *emit, mp_uint_t source_line) {
783 : 225928 : (void)emit;
784 : 225928 : (void)source_line;
785 : 225928 : }
786 : :
787 : : // this must be called at start of emit functions
788 : : STATIC void emit_native_pre(emit_t *emit) {
789 : : (void)emit;
790 : : }
791 : :
792 : : // depth==0 is top, depth==1 is before top, etc
793 : 11685 : STATIC stack_info_t *peek_stack(emit_t *emit, mp_uint_t depth) {
794 : 11685 : return &emit->stack_info[emit->stack_size - 1 - depth];
795 : : }
796 : :
797 : : // depth==0 is top, depth==1 is before top, etc
798 : 98798 : STATIC vtype_kind_t peek_vtype(emit_t *emit, mp_uint_t depth) {
799 [ + + ]: 98798 : if (emit->do_viper_types) {
800 : 4022 : return peek_stack(emit, depth)->vtype;
801 : : } else {
802 : : // Type is always PYOBJ even if the intermediate stored value is not
803 : : return VTYPE_PYOBJ;
804 : : }
805 : : }
806 : :
807 : : // pos=1 is TOS, pos=2 is next, etc
808 : : // use pos=0 for no skipping
809 : 233726 : STATIC void need_reg_single(emit_t *emit, int reg_needed, int skip_stack_pos) {
810 : 233726 : skip_stack_pos = emit->stack_size - skip_stack_pos;
811 [ + + ]: 744924 : for (int i = 0; i < emit->stack_size; i++) {
812 [ + + ]: 511198 : if (i != skip_stack_pos) {
813 : 320861 : stack_info_t *si = &emit->stack_info[i];
814 [ + + + + ]: 320861 : if (si->kind == STACK_REG && si->data.u_reg == reg_needed) {
815 : 29988 : si->kind = STACK_VALUE;
816 : 29988 : emit_native_mov_state_reg(emit, emit->stack_start + i, si->data.u_reg);
817 : : }
818 : : }
819 : : }
820 : 233726 : }
821 : :
822 : : // Ensures all unsettled registers that hold Python values are copied to the
823 : : // concrete Python stack. All registers are then free to use.
824 : 476720 : STATIC void need_reg_all(emit_t *emit) {
825 [ + + ]: 1081195 : for (int i = 0; i < emit->stack_size; i++) {
826 : 604475 : stack_info_t *si = &emit->stack_info[i];
827 [ + + ]: 604475 : if (si->kind == STACK_REG) {
828 : 97323 : DEBUG_printf(" reg(%u) to local(%u)\n", si->data.u_reg, emit->stack_start + i);
829 : 97323 : si->kind = STACK_VALUE;
830 : 97323 : emit_native_mov_state_reg(emit, emit->stack_start + i, si->data.u_reg);
831 : : }
832 : : }
833 : 476720 : }
834 : :
835 : 54312 : STATIC vtype_kind_t load_reg_stack_imm(emit_t *emit, int reg_dest, const stack_info_t *si, bool convert_to_pyobj) {
836 [ + + + + ]: 54312 : if (!convert_to_pyobj && emit->do_viper_types) {
837 : 632 : ASM_MOV_REG_IMM(emit->as, reg_dest, si->data.u_imm);
838 : 632 : return si->vtype;
839 : : } else {
840 [ + + ]: 53680 : if (si->vtype == VTYPE_PYOBJ) {
841 : 762 : ASM_MOV_REG_IMM(emit->as, reg_dest, si->data.u_imm);
842 [ + + ]: 52918 : } else if (si->vtype == VTYPE_BOOL) {
843 : 1635 : emit_native_mov_reg_const(emit, reg_dest, MP_F_CONST_FALSE_OBJ + si->data.u_imm);
844 [ + + ]: 51283 : } else if (si->vtype == VTYPE_INT || si->vtype == VTYPE_UINT) {
845 : 32517 : ASM_MOV_REG_IMM(emit->as, reg_dest, (uintptr_t)MP_OBJ_NEW_SMALL_INT(si->data.u_imm));
846 [ + + ]: 18766 : } else if (si->vtype == VTYPE_PTR_NONE) {
847 : 18762 : emit_native_mov_reg_const(emit, reg_dest, MP_F_CONST_NONE_OBJ);
848 : : } else {
849 : 4 : mp_raise_NotImplementedError(MP_ERROR_TEXT("conversion to object"));
850 : : }
851 : 53676 : return VTYPE_PYOBJ;
852 : : }
853 : : }
854 : :
855 : : // Copies all unsettled registers and immediates that are Python values into the
856 : : // concrete Python stack. This ensures the concrete Python stack holds valid
857 : : // values for the current stack_size.
858 : : // This function may clobber REG_TEMP1.
859 : 109808 : STATIC void need_stack_settled(emit_t *emit) {
860 : 109808 : DEBUG_printf(" need_stack_settled; stack_size=%d\n", emit->stack_size);
861 : 109808 : need_reg_all(emit);
862 [ + + ]: 177854 : for (int i = 0; i < emit->stack_size; i++) {
863 : 68046 : stack_info_t *si = &emit->stack_info[i];
864 [ + + ]: 68046 : if (si->kind == STACK_IMM) {
865 : 1854 : DEBUG_printf(" imm(" INT_FMT ") to local(%u)\n", si->data.u_imm, emit->stack_start + i);
866 : 1854 : si->kind = STACK_VALUE;
867 : : // using REG_TEMP1 to avoid clobbering REG_TEMP0 (aka REG_RET)
868 : 1854 : si->vtype = load_reg_stack_imm(emit, REG_TEMP1, si, false);
869 : 1854 : emit_native_mov_state_reg(emit, emit->stack_start + i, REG_TEMP1);
870 : : }
871 : : }
872 : 109808 : }
873 : :
874 : : // pos=1 is TOS, pos=2 is next, etc
875 : 185014 : STATIC void emit_access_stack(emit_t *emit, int pos, vtype_kind_t *vtype, int reg_dest) {
876 : 185014 : need_reg_single(emit, reg_dest, pos);
877 : 185014 : stack_info_t *si = &emit->stack_info[emit->stack_size - pos];
878 : 185014 : *vtype = si->vtype;
879 [ + + + - ]: 185014 : switch (si->kind) {
880 : 61601 : case STACK_VALUE:
881 : 61601 : emit_native_mov_reg_state(emit, reg_dest, emit->stack_start + emit->stack_size - pos);
882 : 61601 : break;
883 : :
884 : 93960 : case STACK_REG:
885 [ + + ]: 93960 : if (si->data.u_reg != reg_dest) {
886 : 88191 : ASM_MOV_REG_REG(emit->as, reg_dest, si->data.u_reg);
887 : : }
888 : : break;
889 : :
890 : 29453 : case STACK_IMM:
891 : 29453 : *vtype = load_reg_stack_imm(emit, reg_dest, si, false);
892 : 29453 : break;
893 : : }
894 : 185014 : }
895 : :
896 : : // does an efficient X=pop(); discard(); push(X)
897 : : // needs a (non-temp) register in case the popped element was stored in the stack
898 : 530 : STATIC void emit_fold_stack_top(emit_t *emit, int reg_dest) {
899 : 530 : stack_info_t *si = &emit->stack_info[emit->stack_size - 2];
900 : 530 : si[0] = si[1];
901 [ + + ]: 530 : if (si->kind == STACK_VALUE) {
902 : : // if folded element was on the stack we need to put it in a register
903 : 516 : emit_native_mov_reg_state(emit, reg_dest, emit->stack_start + emit->stack_size - 1);
904 : 516 : si->kind = STACK_REG;
905 : 516 : si->data.u_reg = reg_dest;
906 : : }
907 : 530 : adjust_stack(emit, -1);
908 : 530 : }
909 : :
910 : : // If stacked value is in a register and the register is not r1 or r2, then
911 : : // *reg_dest is set to that register. Otherwise the value is put in *reg_dest.
912 : 7293 : STATIC void emit_pre_pop_reg_flexible(emit_t *emit, vtype_kind_t *vtype, int *reg_dest, int not_r1, int not_r2) {
913 : 7293 : stack_info_t *si = peek_stack(emit, 0);
914 [ + + + + : 7293 : if (si->kind == STACK_REG && si->data.u_reg != not_r1 && si->data.u_reg != not_r2) {
+ - ]
915 : 5323 : *vtype = si->vtype;
916 : 5323 : *reg_dest = si->data.u_reg;
917 : 5323 : need_reg_single(emit, *reg_dest, 1);
918 : : } else {
919 : 1970 : emit_access_stack(emit, 1, vtype, *reg_dest);
920 : : }
921 : 7293 : adjust_stack(emit, -1);
922 : 7293 : }
923 : :
924 : 39731 : STATIC void emit_pre_pop_discard(emit_t *emit) {
925 : 39731 : adjust_stack(emit, -1);
926 : 39731 : }
927 : :
928 : 181313 : STATIC void emit_pre_pop_reg(emit_t *emit, vtype_kind_t *vtype, int reg_dest) {
929 : 181313 : emit_access_stack(emit, 1, vtype, reg_dest);
930 : 181313 : adjust_stack(emit, -1);
931 : 181313 : }
932 : :
933 : 22190 : STATIC void emit_pre_pop_reg_reg(emit_t *emit, vtype_kind_t *vtypea, int rega, vtype_kind_t *vtypeb, int regb) {
934 : 22190 : emit_pre_pop_reg(emit, vtypea, rega);
935 : 22190 : emit_pre_pop_reg(emit, vtypeb, regb);
936 : 22190 : }
937 : :
938 : 3360 : STATIC void emit_pre_pop_reg_reg_reg(emit_t *emit, vtype_kind_t *vtypea, int rega, vtype_kind_t *vtypeb, int regb, vtype_kind_t *vtypec, int regc) {
939 : 3360 : emit_pre_pop_reg(emit, vtypea, rega);
940 : 3360 : emit_pre_pop_reg(emit, vtypeb, regb);
941 : 3360 : emit_pre_pop_reg(emit, vtypec, regc);
942 : 3360 : }
943 : :
944 : : STATIC void emit_post(emit_t *emit) {
945 : : (void)emit;
946 : : }
947 : :
948 : 14 : STATIC void emit_post_top_set_vtype(emit_t *emit, vtype_kind_t new_vtype) {
949 : 14 : stack_info_t *si = &emit->stack_info[emit->stack_size - 1];
950 : 14 : si->vtype = new_vtype;
951 : 14 : }
952 : :
953 : 258388 : STATIC void emit_post_push_reg(emit_t *emit, vtype_kind_t vtype, int reg) {
954 : 258388 : ensure_extra_stack(emit, 1);
955 : 258388 : stack_info_t *si = &emit->stack_info[emit->stack_size];
956 : 258388 : si->vtype = vtype;
957 : 258388 : si->kind = STACK_REG;
958 : 258388 : si->data.u_reg = reg;
959 : 258388 : adjust_stack(emit, 1);
960 : 258388 : }
961 : :
962 : 55240 : STATIC void emit_post_push_imm(emit_t *emit, vtype_kind_t vtype, mp_int_t imm) {
963 : 55240 : ensure_extra_stack(emit, 1);
964 : 55240 : stack_info_t *si = &emit->stack_info[emit->stack_size];
965 : 55240 : si->vtype = vtype;
966 : 55240 : si->kind = STACK_IMM;
967 : 55240 : si->data.u_imm = imm;
968 : 55240 : adjust_stack(emit, 1);
969 : 55240 : }
970 : :
971 : 6114 : STATIC void emit_post_push_reg_reg(emit_t *emit, vtype_kind_t vtypea, int rega, vtype_kind_t vtypeb, int regb) {
972 : 6114 : emit_post_push_reg(emit, vtypea, rega);
973 : 6114 : emit_post_push_reg(emit, vtypeb, regb);
974 : 6114 : }
975 : :
976 : 126 : STATIC void emit_post_push_reg_reg_reg(emit_t *emit, vtype_kind_t vtypea, int rega, vtype_kind_t vtypeb, int regb, vtype_kind_t vtypec, int regc) {
977 : 126 : emit_post_push_reg(emit, vtypea, rega);
978 : 126 : emit_post_push_reg(emit, vtypeb, regb);
979 : 126 : emit_post_push_reg(emit, vtypec, regc);
980 : 126 : }
981 : :
982 : 417 : STATIC void emit_post_push_reg_reg_reg_reg(emit_t *emit, vtype_kind_t vtypea, int rega, vtype_kind_t vtypeb, int regb, vtype_kind_t vtypec, int regc, vtype_kind_t vtyped, int regd) {
983 : 417 : emit_post_push_reg(emit, vtypea, rega);
984 : 417 : emit_post_push_reg(emit, vtypeb, regb);
985 : 417 : emit_post_push_reg(emit, vtypec, regc);
986 : 417 : emit_post_push_reg(emit, vtyped, regd);
987 : 417 : }
988 : :
989 : 74877 : STATIC void emit_call(emit_t *emit, mp_fun_kind_t fun_kind) {
990 : 74877 : need_reg_all(emit);
991 : 74877 : ASM_CALL_IND(emit->as, fun_kind);
992 : 74877 : }
993 : :
994 : 70772 : STATIC void emit_call_with_imm_arg(emit_t *emit, mp_fun_kind_t fun_kind, mp_int_t arg_val, int arg_reg) {
995 : 70772 : need_reg_all(emit);
996 : 70772 : ASM_MOV_REG_IMM(emit->as, arg_reg, arg_val);
997 : 70772 : ASM_CALL_IND(emit->as, fun_kind);
998 : 70772 : }
999 : :
1000 : 14370 : STATIC void emit_call_with_2_imm_args(emit_t *emit, mp_fun_kind_t fun_kind, mp_int_t arg_val1, int arg_reg1, mp_int_t arg_val2, int arg_reg2) {
1001 : 14370 : need_reg_all(emit);
1002 : 14370 : ASM_MOV_REG_IMM(emit->as, arg_reg1, arg_val1);
1003 : 14370 : ASM_MOV_REG_IMM(emit->as, arg_reg2, arg_val2);
1004 : 14370 : ASM_CALL_IND(emit->as, fun_kind);
1005 : 14370 : }
1006 : :
1007 : 122468 : STATIC void emit_call_with_qstr_arg(emit_t *emit, mp_fun_kind_t fun_kind, qstr qst, int arg_reg) {
1008 : 122468 : need_reg_all(emit);
1009 : 122468 : emit_native_mov_reg_qstr(emit, arg_reg, qst);
1010 : 122468 : ASM_CALL_IND(emit->as, fun_kind);
1011 : 122468 : }
1012 : :
1013 : : // vtype of all n_pop objects is VTYPE_PYOBJ
1014 : : // Will convert any items that are not VTYPE_PYOBJ to this type and put them back on the stack.
1015 : : // If any conversions of non-immediate values are needed, then it uses REG_ARG_1, REG_ARG_2 and REG_RET.
1016 : : // Otherwise, it does not use any temporary registers (but may use reg_dest before loading it with stack pointer).
1017 : 61172 : STATIC void emit_get_stack_pointer_to_reg_for_pop(emit_t *emit, mp_uint_t reg_dest, mp_uint_t n_pop) {
1018 : 61172 : need_reg_all(emit);
1019 : :
1020 : : // First, store any immediate values to their respective place on the stack.
1021 [ + + ]: 181134 : for (mp_uint_t i = 0; i < n_pop; i++) {
1022 : 119966 : stack_info_t *si = &emit->stack_info[emit->stack_size - 1 - i];
1023 : : // must push any imm's to stack
1024 : : // must convert them to VTYPE_PYOBJ for viper code
1025 [ + + ]: 119966 : if (si->kind == STACK_IMM) {
1026 : 23005 : si->kind = STACK_VALUE;
1027 : 23005 : si->vtype = load_reg_stack_imm(emit, reg_dest, si, true);
1028 : 23001 : emit_native_mov_state_reg(emit, emit->stack_start + emit->stack_size - 1 - i, reg_dest);
1029 : : }
1030 : :
1031 : : // verify that this value is on the stack
1032 [ - + ]: 119962 : assert(si->kind == STACK_VALUE);
1033 : : }
1034 : :
1035 : : // Second, convert any non-VTYPE_PYOBJ to that type.
1036 [ + + ]: 181130 : for (mp_uint_t i = 0; i < n_pop; i++) {
1037 : 119962 : stack_info_t *si = &emit->stack_info[emit->stack_size - 1 - i];
1038 [ + + ]: 119962 : if (si->vtype != VTYPE_PYOBJ) {
1039 : 544 : mp_uint_t local_num = emit->stack_start + emit->stack_size - 1 - i;
1040 : 544 : emit_native_mov_reg_state(emit, REG_ARG_1, local_num);
1041 : 544 : emit_call_with_imm_arg(emit, MP_F_CONVERT_NATIVE_TO_OBJ, si->vtype, REG_ARG_2); // arg2 = type
1042 : 544 : emit_native_mov_state_reg(emit, local_num, REG_RET);
1043 : 544 : si->vtype = VTYPE_PYOBJ;
1044 : 119962 : DEBUG_printf(" convert_native_to_obj(local_num=" UINT_FMT ")\n", local_num);
1045 : : }
1046 : : }
1047 : :
1048 : : // Adujust the stack for a pop of n_pop items, and load the stack pointer into reg_dest.
1049 : 61168 : adjust_stack(emit, -n_pop);
1050 : 61168 : emit_native_mov_reg_state_addr(emit, reg_dest, emit->stack_start + emit->stack_size);
1051 : 61168 : }
1052 : :
1053 : : // vtype of all n_push objects is VTYPE_PYOBJ
1054 : 15492 : STATIC void emit_get_stack_pointer_to_reg_for_push(emit_t *emit, mp_uint_t reg_dest, mp_uint_t n_push) {
1055 : 15492 : need_reg_all(emit);
1056 : 15492 : ensure_extra_stack(emit, n_push);
1057 [ + + ]: 49014 : for (mp_uint_t i = 0; i < n_push; i++) {
1058 : 33522 : emit->stack_info[emit->stack_size + i].kind = STACK_VALUE;
1059 : 33522 : emit->stack_info[emit->stack_size + i].vtype = VTYPE_PYOBJ;
1060 : : }
1061 : 15492 : emit_native_mov_reg_state_addr(emit, reg_dest, emit->stack_start + emit->stack_size);
1062 : 15492 : adjust_stack(emit, n_push);
1063 : 15492 : }
1064 : :
1065 : 5445 : STATIC void emit_native_push_exc_stack(emit_t *emit, uint label, bool is_finally) {
1066 [ + + ]: 5445 : if (emit->exc_stack_size + 1 > emit->exc_stack_alloc) {
1067 : 6 : size_t new_alloc = emit->exc_stack_alloc + 4;
1068 : 6 : emit->exc_stack = m_renew(exc_stack_entry_t, emit->exc_stack, emit->exc_stack_alloc, new_alloc);
1069 : 6 : emit->exc_stack_alloc = new_alloc;
1070 : : }
1071 : :
1072 : 5445 : exc_stack_entry_t *e = &emit->exc_stack[emit->exc_stack_size++];
1073 : 5445 : e->label = label;
1074 : 5445 : e->is_finally = is_finally;
1075 : 5445 : e->unwind_label = UNWIND_LABEL_UNUSED;
1076 : 5445 : e->is_active = true;
1077 : :
1078 : 5445 : ASM_MOV_REG_PCREL(emit->as, REG_RET, label);
1079 : 5445 : ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_HANDLER_PC(emit), REG_RET);
1080 : 5445 : }
1081 : :
1082 : 10026 : STATIC void emit_native_leave_exc_stack(emit_t *emit, bool start_of_handler) {
1083 [ - + ]: 10026 : assert(emit->exc_stack_size > 0);
1084 : :
1085 : : // Get current exception handler and deactivate it
1086 : 10026 : exc_stack_entry_t *e = &emit->exc_stack[emit->exc_stack_size - 1];
1087 : 10026 : e->is_active = false;
1088 : :
1089 : : // Find next innermost active exception handler, to restore as current handler
1090 [ + + + + ]: 10686 : for (--e; e >= emit->exc_stack && !e->is_active; --e) {
1091 : 660 : }
1092 : :
1093 : : // Update the PC of the new exception handler
1094 [ + + ]: 10026 : if (e < emit->exc_stack) {
1095 : : // No active handler, clear handler PC to zero
1096 [ + + ]: 9519 : if (start_of_handler) {
1097 : : // Optimisation: PC is already cleared by global exc handler
1098 : : return;
1099 : : }
1100 : 5112 : ASM_XOR_REG_REG(emit->as, REG_RET, REG_RET);
1101 : : } else {
1102 : : // Found new active handler, get its PC
1103 : 507 : ASM_MOV_REG_PCREL(emit->as, REG_RET, e->label);
1104 : : }
1105 : 5619 : ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_HANDLER_PC(emit), REG_RET);
1106 : : }
1107 : :
1108 : 5445 : STATIC exc_stack_entry_t *emit_native_pop_exc_stack(emit_t *emit) {
1109 [ - + ]: 5445 : assert(emit->exc_stack_size > 0);
1110 : 5445 : exc_stack_entry_t *e = &emit->exc_stack[--emit->exc_stack_size];
1111 [ - + ]: 5445 : assert(e->is_active == false);
1112 : 5445 : return e;
1113 : : }
1114 : :
1115 : 10596 : STATIC void emit_load_reg_with_object(emit_t *emit, int reg, mp_obj_t obj) {
1116 : 10596 : emit->scope->scope_flags |= MP_SCOPE_FLAG_HASCONSTS;
1117 : 10596 : size_t table_off = mp_emit_common_use_const_obj(emit->emit_common, obj);
1118 : 10596 : emit_native_mov_reg_state(emit, REG_TEMP0, LOCAL_IDX_FUN_OBJ(emit));
1119 : 10596 : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_TEMP0, REG_TEMP0, OFFSETOF_OBJ_FUN_BC_CONTEXT);
1120 : 10596 : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_TEMP0, REG_TEMP0, OFFSETOF_MODULE_CONTEXT_OBJ_TABLE);
1121 : 10596 : ASM_LOAD_REG_REG_OFFSET(emit->as, reg, REG_TEMP0, table_off);
1122 : 10596 : }
1123 : :
1124 : 7761 : STATIC void emit_load_reg_with_child(emit_t *emit, int reg, mp_raw_code_t *rc) {
1125 [ + + ]: 7761 : size_t table_off = mp_emit_common_alloc_const_child(emit->emit_common, rc);
1126 : 7761 : emit_native_mov_reg_state(emit, REG_TEMP0, LOCAL_IDX_FUN_OBJ(emit));
1127 : 7761 : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_TEMP0, REG_TEMP0, OFFSETOF_OBJ_FUN_BC_CHILD_TABLE);
1128 : 7761 : ASM_LOAD_REG_REG_OFFSET(emit->as, reg, REG_TEMP0, table_off);
1129 : 7761 : }
1130 : :
1131 : 65392 : STATIC void emit_native_label_assign(emit_t *emit, mp_uint_t l) {
1132 : 65392 : DEBUG_printf("label_assign(" UINT_FMT ")\n", l);
1133 : :
1134 : 65392 : bool is_finally = false;
1135 [ + + ]: 65392 : if (emit->exc_stack_size > 0) {
1136 : 12960 : exc_stack_entry_t *e = &emit->exc_stack[emit->exc_stack_size - 1];
1137 [ + + + + ]: 12960 : is_finally = e->is_finally && e->label == l;
1138 : : }
1139 : :
1140 : 864 : if (is_finally) {
1141 : : // Label is at start of finally handler: store TOS into exception slot
1142 : 864 : vtype_kind_t vtype;
1143 : 864 : emit_pre_pop_reg(emit, &vtype, REG_TEMP0);
1144 : 864 : ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_VAL(emit), REG_TEMP0);
1145 : : }
1146 : :
1147 : 65392 : emit_native_pre(emit);
1148 : : // need to commit stack because we can jump here from elsewhere
1149 : 65392 : need_stack_settled(emit);
1150 : 65392 : mp_asm_base_label_assign(&emit->as->base, l);
1151 : 65392 : emit_post(emit);
1152 : :
1153 [ + + ]: 65392 : if (is_finally) {
1154 : : // Label is at start of finally handler: pop exception stack
1155 : 864 : emit_native_leave_exc_stack(emit, false);
1156 : : }
1157 : 65392 : }
1158 : :
1159 : 12021 : STATIC void emit_native_global_exc_entry(emit_t *emit) {
1160 : : // Note: 4 labels are reserved for this function, starting at *emit->label_slot
1161 : :
1162 : 12021 : emit->exit_label = *emit->label_slot;
1163 : :
1164 [ + + + + ]: 12021 : if (NEED_GLOBAL_EXC_HANDLER(emit)) {
1165 : 8340 : mp_uint_t nlr_label = *emit->label_slot + 1;
1166 : 8340 : mp_uint_t start_label = *emit->label_slot + 2;
1167 : 8340 : mp_uint_t global_except_label = *emit->label_slot + 3;
1168 : :
1169 [ + + ]: 8340 : if (!(emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR)) {
1170 : : // Set new globals
1171 : 7624 : emit_native_mov_reg_state(emit, REG_ARG_1, LOCAL_IDX_FUN_OBJ(emit));
1172 : 7624 : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_ARG_1, REG_ARG_1, OFFSETOF_OBJ_FUN_BC_CONTEXT);
1173 : 7624 : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_ARG_1, REG_ARG_1, OFFSETOF_MODULE_CONTEXT_GLOBALS);
1174 : 7624 : emit_call(emit, MP_F_NATIVE_SWAP_GLOBALS);
1175 : :
1176 : : // Save old globals (or NULL if globals didn't change)
1177 : 7624 : emit_native_mov_state_reg(emit, LOCAL_IDX_OLD_GLOBALS(emit), REG_RET);
1178 : : }
1179 : :
1180 [ + + ]: 8340 : if (emit->scope->exc_stack_size == 0) {
1181 [ + + ]: 6132 : if (!(emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR)) {
1182 : : // Optimisation: if globals didn't change don't push the nlr context
1183 : 5575 : ASM_JUMP_IF_REG_ZERO(emit->as, REG_RET, start_label, false);
1184 : : }
1185 : :
1186 : : // Wrap everything in an nlr context
1187 : 6132 : ASM_MOV_REG_LOCAL_ADDR(emit->as, REG_ARG_1, 0);
1188 : 6132 : emit_call(emit, MP_F_NLR_PUSH);
1189 : : #if N_NLR_SETJMP
1190 : : ASM_MOV_REG_LOCAL_ADDR(emit->as, REG_ARG_1, 2);
1191 : : emit_call(emit, MP_F_SETJMP);
1192 : : #endif
1193 : 6132 : ASM_JUMP_IF_REG_ZERO(emit->as, REG_RET, start_label, true);
1194 : : } else {
1195 : : // Clear the unwind state
1196 : 2208 : ASM_XOR_REG_REG(emit->as, REG_TEMP0, REG_TEMP0);
1197 : 2208 : ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_HANDLER_UNWIND(emit), REG_TEMP0);
1198 : :
1199 : : // Put PC of start code block into REG_LOCAL_1
1200 : 2208 : ASM_MOV_REG_PCREL(emit->as, REG_LOCAL_1, start_label);
1201 : :
1202 : : // Wrap everything in an nlr context
1203 : 2208 : emit_native_label_assign(emit, nlr_label);
1204 : 2208 : ASM_MOV_REG_LOCAL_ADDR(emit->as, REG_ARG_1, 0);
1205 : 2208 : emit_call(emit, MP_F_NLR_PUSH);
1206 : : #if N_NLR_SETJMP
1207 : : ASM_MOV_REG_LOCAL_ADDR(emit->as, REG_ARG_1, 2);
1208 : : emit_call(emit, MP_F_SETJMP);
1209 : : #endif
1210 : 2208 : ASM_JUMP_IF_REG_NONZERO(emit->as, REG_RET, global_except_label, true);
1211 : :
1212 : : // Clear PC of current code block, and jump there to resume execution
1213 : 2208 : ASM_XOR_REG_REG(emit->as, REG_TEMP0, REG_TEMP0);
1214 : 2208 : ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_HANDLER_PC(emit), REG_TEMP0);
1215 : 2208 : ASM_JUMP_REG(emit->as, REG_LOCAL_1);
1216 : :
1217 : : // Global exception handler: check for valid exception handler
1218 : 2208 : emit_native_label_assign(emit, global_except_label);
1219 : 2208 : ASM_MOV_REG_LOCAL(emit->as, REG_LOCAL_1, LOCAL_IDX_EXC_HANDLER_PC(emit));
1220 : 2208 : ASM_JUMP_IF_REG_NONZERO(emit->as, REG_LOCAL_1, nlr_label, false);
1221 : : }
1222 : :
1223 [ + + ]: 8340 : if (!(emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR)) {
1224 : : // Restore old globals
1225 : 7624 : emit_native_mov_reg_state(emit, REG_ARG_1, LOCAL_IDX_OLD_GLOBALS(emit));
1226 : 7624 : emit_call(emit, MP_F_NATIVE_SWAP_GLOBALS);
1227 : : }
1228 : :
1229 [ + + ]: 8340 : if (emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR) {
1230 : : // Store return value in state[0]
1231 : 716 : ASM_MOV_REG_LOCAL(emit->as, REG_TEMP0, LOCAL_IDX_EXC_VAL(emit));
1232 : 716 : ASM_STORE_REG_REG_OFFSET(emit->as, REG_TEMP0, REG_GENERATOR_STATE, OFFSETOF_CODE_STATE_STATE);
1233 : :
1234 : : // Load return kind
1235 : 716 : ASM_MOV_REG_IMM(emit->as, REG_PARENT_RET, MP_VM_RETURN_EXCEPTION);
1236 : :
1237 : 716 : ASM_EXIT(emit->as);
1238 : : } else {
1239 : : // Re-raise exception out to caller
1240 : 7624 : ASM_MOV_REG_LOCAL(emit->as, REG_ARG_1, LOCAL_IDX_EXC_VAL(emit));
1241 : 7624 : emit_call(emit, MP_F_NATIVE_RAISE);
1242 : : }
1243 : :
1244 : : // Label for start of function
1245 : 8340 : emit_native_label_assign(emit, start_label);
1246 : :
1247 [ + + ]: 8340 : if (emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR) {
1248 : 716 : emit_native_mov_reg_state(emit, REG_TEMP0, LOCAL_IDX_GEN_PC(emit));
1249 : 716 : ASM_JUMP_REG(emit->as, REG_TEMP0);
1250 : 716 : emit->start_offset = mp_asm_base_get_code_pos(&emit->as->base);
1251 : :
1252 : : // This is the first entry of the generator
1253 : :
1254 : : // Check LOCAL_IDX_EXC_VAL for any injected value
1255 : 716 : ASM_MOV_REG_LOCAL(emit->as, REG_ARG_1, LOCAL_IDX_EXC_VAL(emit));
1256 : 716 : emit_call(emit, MP_F_NATIVE_RAISE);
1257 : : }
1258 : : }
1259 : 12021 : }
1260 : :
1261 : 12005 : STATIC void emit_native_global_exc_exit(emit_t *emit) {
1262 : : // Label for end of function
1263 : 12005 : emit_native_label_assign(emit, emit->exit_label);
1264 : :
1265 [ + + + + ]: 12005 : if (NEED_GLOBAL_EXC_HANDLER(emit)) {
1266 : : // Get old globals
1267 [ + + ]: 8328 : if (!(emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR)) {
1268 : 7620 : emit_native_mov_reg_state(emit, REG_ARG_1, LOCAL_IDX_OLD_GLOBALS(emit));
1269 : :
1270 [ + + ]: 7620 : if (emit->scope->exc_stack_size == 0) {
1271 : : // Optimisation: if globals didn't change then don't restore them and don't do nlr_pop
1272 : 5571 : ASM_JUMP_IF_REG_ZERO(emit->as, REG_ARG_1, emit->exit_label + 1, false);
1273 : : }
1274 : :
1275 : : // Restore old globals
1276 : 7620 : emit_call(emit, MP_F_NATIVE_SWAP_GLOBALS);
1277 : : }
1278 : :
1279 : : // Pop the nlr context
1280 : 8328 : emit_call(emit, MP_F_NLR_POP);
1281 : :
1282 [ + + ]: 8328 : if (!(emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR)) {
1283 [ + + ]: 7620 : if (emit->scope->exc_stack_size == 0) {
1284 : : // Destination label for above optimisation
1285 : 5571 : emit_native_label_assign(emit, emit->exit_label + 1);
1286 : : }
1287 : : }
1288 : :
1289 : : // Load return value
1290 : 8328 : ASM_MOV_REG_LOCAL(emit->as, REG_PARENT_RET, LOCAL_IDX_RET_VAL(emit));
1291 : : }
1292 : :
1293 : 12005 : ASM_EXIT(emit->as);
1294 : 12005 : }
1295 : :
1296 : 2085 : STATIC void emit_native_import_name(emit_t *emit, qstr qst) {
1297 : 2085 : DEBUG_printf("import_name %s\n", qstr_str(qst));
1298 : :
1299 : : // get arguments from stack: arg2 = fromlist, arg3 = level
1300 : : // If using viper types these arguments must be converted to proper objects, and
1301 : : // to accomplish this viper types are turned off for the emit_pre_pop_reg_reg call.
1302 : 2085 : bool orig_do_viper_types = emit->do_viper_types;
1303 : 2085 : emit->do_viper_types = false;
1304 : 2085 : vtype_kind_t vtype_fromlist;
1305 : 2085 : vtype_kind_t vtype_level;
1306 : 2085 : emit_pre_pop_reg_reg(emit, &vtype_fromlist, REG_ARG_2, &vtype_level, REG_ARG_3);
1307 [ - + ]: 2085 : assert(vtype_fromlist == VTYPE_PYOBJ);
1308 [ - + ]: 2085 : assert(vtype_level == VTYPE_PYOBJ);
1309 : 2085 : emit->do_viper_types = orig_do_viper_types;
1310 : :
1311 : 2085 : emit_call_with_qstr_arg(emit, MP_F_IMPORT_NAME, qst, REG_ARG_1); // arg1 = import name
1312 : 2085 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
1313 : 2085 : }
1314 : :
1315 : 831 : STATIC void emit_native_import_from(emit_t *emit, qstr qst) {
1316 : 831 : DEBUG_printf("import_from %s\n", qstr_str(qst));
1317 : 831 : emit_native_pre(emit);
1318 : 831 : vtype_kind_t vtype_module;
1319 : 831 : emit_access_stack(emit, 1, &vtype_module, REG_ARG_1); // arg1 = module
1320 [ - + ]: 831 : assert(vtype_module == VTYPE_PYOBJ);
1321 : 831 : emit_call_with_qstr_arg(emit, MP_F_IMPORT_FROM, qst, REG_ARG_2); // arg2 = import name
1322 : 831 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
1323 : 831 : }
1324 : :
1325 : 102 : STATIC void emit_native_import_star(emit_t *emit) {
1326 : 102 : DEBUG_printf("import_star\n");
1327 : 102 : vtype_kind_t vtype_module;
1328 : 102 : emit_pre_pop_reg(emit, &vtype_module, REG_ARG_1); // arg1 = module
1329 [ - + ]: 102 : assert(vtype_module == VTYPE_PYOBJ);
1330 : 102 : emit_call(emit, MP_F_IMPORT_ALL);
1331 : 102 : emit_post(emit);
1332 : 102 : }
1333 : :
1334 : 3018 : STATIC void emit_native_import(emit_t *emit, qstr qst, int kind) {
1335 [ + + ]: 3018 : if (kind == MP_EMIT_IMPORT_NAME) {
1336 : 2085 : emit_native_import_name(emit, qst);
1337 [ + + ]: 933 : } else if (kind == MP_EMIT_IMPORT_FROM) {
1338 : 831 : emit_native_import_from(emit, qst);
1339 : : } else {
1340 : 102 : emit_native_import_star(emit);
1341 : : }
1342 : 3018 : }
1343 : :
1344 : 20367 : STATIC void emit_native_load_const_tok(emit_t *emit, mp_token_kind_t tok) {
1345 : 20367 : DEBUG_printf("load_const_tok(tok=%u)\n", tok);
1346 [ + + ]: 20367 : if (tok == MP_TOKEN_ELLIPSIS) {
1347 : 12 : emit_native_load_const_obj(emit, MP_OBJ_FROM_PTR(&mp_const_ellipsis_obj));
1348 : : } else {
1349 : 20355 : emit_native_pre(emit);
1350 [ + + ]: 20355 : if (tok == MP_TOKEN_KW_NONE) {
1351 : 18708 : emit_post_push_imm(emit, VTYPE_PTR_NONE, 0);
1352 : : } else {
1353 : 1647 : emit_post_push_imm(emit, VTYPE_BOOL, tok == MP_TOKEN_KW_FALSE ? 0 : 1);
1354 : : }
1355 : : }
1356 : 20367 : }
1357 : :
1358 : 33243 : STATIC void emit_native_load_const_small_int(emit_t *emit, mp_int_t arg) {
1359 : 33243 : DEBUG_printf("load_const_small_int(int=" INT_FMT ")\n", arg);
1360 : 33243 : emit_native_pre(emit);
1361 : 33243 : emit_post_push_imm(emit, VTYPE_INT, arg);
1362 : 33243 : }
1363 : :
1364 : 22251 : STATIC void emit_native_load_const_str(emit_t *emit, qstr qst) {
1365 : 22251 : emit_native_pre(emit);
1366 : : // TODO: Eventually we want to be able to work with raw pointers in viper to
1367 : : // do native array access. For now we just load them as any other object.
1368 : : /*
1369 : : if (emit->do_viper_types) {
1370 : : // load a pointer to the asciiz string?
1371 : : emit_post_push_imm(emit, VTYPE_PTR, (mp_uint_t)qstr_str(qst));
1372 : : } else
1373 : : */
1374 : : {
1375 : 22251 : need_reg_single(emit, REG_TEMP0, 0);
1376 : 22251 : emit_native_mov_reg_qstr_obj(emit, REG_TEMP0, qst);
1377 : 22251 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_TEMP0);
1378 : : }
1379 : 22251 : }
1380 : :
1381 : 10596 : STATIC void emit_native_load_const_obj(emit_t *emit, mp_obj_t obj) {
1382 : 10596 : emit_native_pre(emit);
1383 : 10596 : need_reg_single(emit, REG_RET, 0);
1384 : 10596 : emit_load_reg_with_object(emit, REG_RET, obj);
1385 : 10596 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
1386 : 10596 : }
1387 : :
1388 : 762 : STATIC void emit_native_load_null(emit_t *emit) {
1389 : 762 : emit_native_pre(emit);
1390 : 762 : emit_post_push_imm(emit, VTYPE_PYOBJ, 0);
1391 : 762 : }
1392 : :
1393 : 22822 : STATIC void emit_native_load_fast(emit_t *emit, qstr qst, mp_uint_t local_num) {
1394 : 22822 : DEBUG_printf("load_fast(%s, " UINT_FMT ")\n", qstr_str(qst), local_num);
1395 : 22822 : vtype_kind_t vtype = emit->local_vtype[local_num];
1396 [ + + ]: 22822 : if (vtype == VTYPE_UNBOUND) {
1397 : 4 : EMIT_NATIVE_VIPER_TYPE_ERROR(emit, MP_ERROR_TEXT("local '%q' used before type known"), qst);
1398 : : }
1399 : 22822 : emit_native_pre(emit);
1400 [ + + + + : 22822 : if (local_num < MAX_REGS_FOR_LOCAL_VARS && CAN_USE_REGS_FOR_LOCALS(emit)) {
+ + ]
1401 : 13450 : emit_post_push_reg(emit, vtype, reg_local_table[local_num]);
1402 : : } else {
1403 : 9372 : need_reg_single(emit, REG_TEMP0, 0);
1404 : 9372 : emit_native_mov_reg_state(emit, REG_TEMP0, LOCAL_IDX_LOCAL_VAR(emit, local_num));
1405 : 9372 : emit_post_push_reg(emit, vtype, REG_TEMP0);
1406 : : }
1407 : 22822 : }
1408 : :
1409 : 312 : STATIC void emit_native_load_deref(emit_t *emit, qstr qst, mp_uint_t local_num) {
1410 : 312 : DEBUG_printf("load_deref(%s, " UINT_FMT ")\n", qstr_str(qst), local_num);
1411 : 312 : need_reg_single(emit, REG_RET, 0);
1412 : 312 : emit_native_load_fast(emit, qst, local_num);
1413 : 312 : vtype_kind_t vtype;
1414 : 312 : int reg_base = REG_RET;
1415 : 312 : emit_pre_pop_reg_flexible(emit, &vtype, ®_base, -1, -1);
1416 : 312 : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_RET, reg_base, 1);
1417 : : // closed over vars are always Python objects
1418 : 312 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
1419 : 312 : }
1420 : :
1421 : 22621 : STATIC void emit_native_load_local(emit_t *emit, qstr qst, mp_uint_t local_num, int kind) {
1422 [ + + ]: 22621 : if (kind == MP_EMIT_IDOP_LOCAL_FAST) {
1423 : 22309 : emit_native_load_fast(emit, qst, local_num);
1424 : : } else {
1425 : 312 : emit_native_load_deref(emit, qst, local_num);
1426 : : }
1427 : 22621 : }
1428 : :
1429 : 75525 : STATIC void emit_native_load_global(emit_t *emit, qstr qst, int kind) {
1430 : 75525 : MP_STATIC_ASSERT(MP_F_LOAD_NAME + MP_EMIT_IDOP_GLOBAL_NAME == MP_F_LOAD_NAME);
1431 : 75525 : MP_STATIC_ASSERT(MP_F_LOAD_NAME + MP_EMIT_IDOP_GLOBAL_GLOBAL == MP_F_LOAD_GLOBAL);
1432 : 75525 : emit_native_pre(emit);
1433 [ + + ]: 75525 : if (kind == MP_EMIT_IDOP_GLOBAL_NAME) {
1434 : : DEBUG_printf("load_name(%s)\n", qstr_str(qst));
1435 : : } else {
1436 : 14328 : DEBUG_printf("load_global(%s)\n", qstr_str(qst));
1437 [ + + ]: 14328 : if (emit->do_viper_types) {
1438 : : // check for builtin casting operators
1439 : 594 : int native_type = mp_native_type_from_qstr(qst);
1440 [ + + ]: 594 : if (native_type >= MP_NATIVE_TYPE_BOOL) {
1441 : 100 : emit_post_push_imm(emit, VTYPE_BUILTIN_CAST, native_type);
1442 : 100 : return;
1443 : : }
1444 : : }
1445 : : }
1446 : 75425 : emit_call_with_qstr_arg(emit, MP_F_LOAD_NAME + kind, qst, REG_ARG_1);
1447 : 75425 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
1448 : : }
1449 : :
1450 : 7638 : STATIC void emit_native_load_attr(emit_t *emit, qstr qst) {
1451 : : // depends on type of subject:
1452 : : // - integer, function, pointer to integers: error
1453 : : // - pointer to structure: get member, quite easy
1454 : : // - Python object: call mp_load_attr, and needs to be typed to convert result
1455 : 7638 : vtype_kind_t vtype_base;
1456 : 7638 : emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1); // arg1 = base
1457 [ - + ]: 7638 : assert(vtype_base == VTYPE_PYOBJ);
1458 : 7638 : emit_call_with_qstr_arg(emit, MP_F_LOAD_ATTR, qst, REG_ARG_2); // arg2 = attribute name
1459 : 7638 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
1460 : 7638 : }
1461 : :
1462 : 13449 : STATIC void emit_native_load_method(emit_t *emit, qstr qst, bool is_super) {
1463 [ + + ]: 13449 : if (is_super) {
1464 : 42 : emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_2, 3); // arg2 = dest ptr
1465 : 42 : emit_get_stack_pointer_to_reg_for_push(emit, REG_ARG_2, 2); // arg2 = dest ptr
1466 : 42 : emit_call_with_qstr_arg(emit, MP_F_LOAD_SUPER_METHOD, qst, REG_ARG_1); // arg1 = method name
1467 : : } else {
1468 : 13407 : vtype_kind_t vtype_base;
1469 : 13407 : emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1); // arg1 = base
1470 [ - + ]: 13407 : assert(vtype_base == VTYPE_PYOBJ);
1471 : 13407 : emit_get_stack_pointer_to_reg_for_push(emit, REG_ARG_3, 2); // arg3 = dest ptr
1472 : 13407 : emit_call_with_qstr_arg(emit, MP_F_LOAD_METHOD, qst, REG_ARG_2); // arg2 = method name
1473 : : }
1474 : 13449 : }
1475 : :
1476 : 1338 : STATIC void emit_native_load_build_class(emit_t *emit) {
1477 : 1338 : emit_native_pre(emit);
1478 : 1338 : emit_call(emit, MP_F_LOAD_BUILD_CLASS);
1479 : 1338 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
1480 : 1338 : }
1481 : :
1482 : 3485 : STATIC void emit_native_load_subscr(emit_t *emit) {
1483 : 3485 : DEBUG_printf("load_subscr\n");
1484 : : // need to compile: base[index]
1485 : :
1486 : : // pop: index, base
1487 : : // optimise case where index is an immediate
1488 : 3485 : vtype_kind_t vtype_base = peek_vtype(emit, 1);
1489 : :
1490 [ + + ]: 3485 : if (vtype_base == VTYPE_PYOBJ) {
1491 : : // standard Python subscr
1492 : : // TODO factor this implicit cast code with other uses of it
1493 : 3339 : vtype_kind_t vtype_index = peek_vtype(emit, 0);
1494 [ + + ]: 3339 : if (vtype_index == VTYPE_PYOBJ) {
1495 : 3333 : emit_pre_pop_reg(emit, &vtype_index, REG_ARG_2);
1496 : : } else {
1497 : 6 : emit_pre_pop_reg(emit, &vtype_index, REG_ARG_1);
1498 : 6 : emit_call_with_imm_arg(emit, MP_F_CONVERT_NATIVE_TO_OBJ, vtype_index, REG_ARG_2); // arg2 = type
1499 : 6 : ASM_MOV_REG_REG(emit->as, REG_ARG_2, REG_RET);
1500 : : }
1501 : 3339 : emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1);
1502 : 3339 : emit_call_with_imm_arg(emit, MP_F_OBJ_SUBSCR, (mp_uint_t)MP_OBJ_SENTINEL, REG_ARG_3);
1503 : 3339 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
1504 : : } else {
1505 : : // viper load
1506 : : // TODO The different machine architectures have very different
1507 : : // capabilities and requirements for loads, so probably best to
1508 : : // write a completely separate load-optimiser for each one.
1509 : 146 : stack_info_t *top = peek_stack(emit, 0);
1510 [ + + ]: 146 : if (top->vtype == VTYPE_INT && top->kind == STACK_IMM) {
1511 : : // index is an immediate
1512 : 70 : mp_int_t index_value = top->data.u_imm;
1513 : 70 : emit_pre_pop_discard(emit); // discard index
1514 : 70 : int reg_base = REG_ARG_1;
1515 : 70 : int reg_index = REG_ARG_2;
1516 : 70 : emit_pre_pop_reg_flexible(emit, &vtype_base, ®_base, reg_index, reg_index);
1517 : 70 : need_reg_single(emit, REG_RET, 0);
1518 [ + + + + ]: 70 : switch (vtype_base) {
1519 : 42 : case VTYPE_PTR8: {
1520 : : // pointer to 8-bit memory
1521 : : // TODO optimise to use thumb ldrb r1, [r2, r3]
1522 [ + + ]: 42 : if (index_value != 0) {
1523 : : // index is non-zero
1524 : : #if N_THUMB
1525 : : if (index_value > 0 && index_value < 32) {
1526 : : asm_thumb_ldrb_rlo_rlo_i5(emit->as, REG_RET, reg_base, index_value);
1527 : : break;
1528 : : }
1529 : : #endif
1530 : 30 : need_reg_single(emit, reg_index, 0);
1531 : 30 : ASM_MOV_REG_IMM(emit->as, reg_index, index_value);
1532 : 30 : ASM_ADD_REG_REG(emit->as, reg_index, reg_base); // add index to base
1533 : 30 : reg_base = reg_index;
1534 : : }
1535 : 42 : ASM_LOAD8_REG_REG(emit->as, REG_RET, reg_base); // load from (base+index)
1536 : 42 : break;
1537 : : }
1538 : 12 : case VTYPE_PTR16: {
1539 : : // pointer to 16-bit memory
1540 [ + + ]: 12 : if (index_value != 0) {
1541 : : // index is a non-zero immediate
1542 : : #if N_THUMB
1543 : : if (index_value > 0 && index_value < 32) {
1544 : : asm_thumb_ldrh_rlo_rlo_i5(emit->as, REG_RET, reg_base, index_value);
1545 : : break;
1546 : : }
1547 : : #endif
1548 : 6 : need_reg_single(emit, reg_index, 0);
1549 : 6 : ASM_MOV_REG_IMM(emit->as, reg_index, index_value << 1);
1550 : 6 : ASM_ADD_REG_REG(emit->as, reg_index, reg_base); // add 2*index to base
1551 : 6 : reg_base = reg_index;
1552 : : }
1553 : 12 : ASM_LOAD16_REG_REG(emit->as, REG_RET, reg_base); // load from (base+2*index)
1554 : 12 : break;
1555 : : }
1556 : 12 : case VTYPE_PTR32: {
1557 : : // pointer to 32-bit memory
1558 [ + + ]: 12 : if (index_value != 0) {
1559 : : // index is a non-zero immediate
1560 : : #if N_THUMB
1561 : : if (index_value > 0 && index_value < 32) {
1562 : : asm_thumb_ldr_rlo_rlo_i5(emit->as, REG_RET, reg_base, index_value);
1563 : : break;
1564 : : }
1565 : : #endif
1566 : 6 : need_reg_single(emit, reg_index, 0);
1567 : 6 : ASM_MOV_REG_IMM(emit->as, reg_index, index_value << 2);
1568 : 6 : ASM_ADD_REG_REG(emit->as, reg_index, reg_base); // add 4*index to base
1569 : 6 : reg_base = reg_index;
1570 : : }
1571 : 12 : ASM_LOAD32_REG_REG(emit->as, REG_RET, reg_base); // load from (base+4*index)
1572 : 12 : break;
1573 : : }
1574 : 4 : default:
1575 : 70 : EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
1576 : : MP_ERROR_TEXT("can't load from '%q'"), vtype_to_qstr(vtype_base));
1577 : : }
1578 : : } else {
1579 : : // index is not an immediate
1580 : 76 : vtype_kind_t vtype_index;
1581 : 76 : int reg_index = REG_ARG_2;
1582 : 76 : emit_pre_pop_reg_flexible(emit, &vtype_index, ®_index, REG_ARG_1, REG_ARG_1);
1583 : 76 : emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1);
1584 : 76 : need_reg_single(emit, REG_RET, 0);
1585 [ + + ]: 76 : if (vtype_index != VTYPE_INT && vtype_index != VTYPE_UINT) {
1586 : 4 : EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
1587 : : MP_ERROR_TEXT("can't load with '%q' index"), vtype_to_qstr(vtype_index));
1588 : : }
1589 [ + + + + ]: 76 : switch (vtype_base) {
1590 : 48 : case VTYPE_PTR8: {
1591 : : // pointer to 8-bit memory
1592 : : // TODO optimise to use thumb ldrb r1, [r2, r3]
1593 : 48 : ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
1594 : 48 : ASM_LOAD8_REG_REG(emit->as, REG_RET, REG_ARG_1); // store value to (base+index)
1595 : 48 : break;
1596 : : }
1597 : 12 : case VTYPE_PTR16: {
1598 : : // pointer to 16-bit memory
1599 : 12 : ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
1600 : 12 : ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
1601 : 12 : ASM_LOAD16_REG_REG(emit->as, REG_RET, REG_ARG_1); // load from (base+2*index)
1602 : 12 : break;
1603 : : }
1604 : 12 : case VTYPE_PTR32: {
1605 : : // pointer to word-size memory
1606 : 12 : ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
1607 : 12 : ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
1608 : 12 : ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
1609 : 12 : ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
1610 : 12 : ASM_LOAD32_REG_REG(emit->as, REG_RET, REG_ARG_1); // load from (base+4*index)
1611 : 12 : break;
1612 : : }
1613 : 4 : default:
1614 : 76 : EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
1615 : : MP_ERROR_TEXT("can't load from '%q'"), vtype_to_qstr(vtype_base));
1616 : : }
1617 : : }
1618 : 146 : emit_post_push_reg(emit, VTYPE_INT, REG_RET);
1619 : : }
1620 : 3485 : }
1621 : :
1622 : 5509 : STATIC void emit_native_store_fast(emit_t *emit, qstr qst, mp_uint_t local_num) {
1623 : 5509 : vtype_kind_t vtype;
1624 [ + + + + : 5509 : if (local_num < MAX_REGS_FOR_LOCAL_VARS && CAN_USE_REGS_FOR_LOCALS(emit)) {
+ + ]
1625 : 1159 : emit_pre_pop_reg(emit, &vtype, reg_local_table[local_num]);
1626 : : } else {
1627 : 4350 : emit_pre_pop_reg(emit, &vtype, REG_TEMP0);
1628 : 4350 : emit_native_mov_state_reg(emit, LOCAL_IDX_LOCAL_VAR(emit, local_num), REG_TEMP0);
1629 : : }
1630 : 5509 : emit_post(emit);
1631 : :
1632 : : // check types
1633 [ + + ]: 5509 : if (emit->local_vtype[local_num] == VTYPE_UNBOUND) {
1634 : : // first time this local is assigned, so give it a type of the object stored in it
1635 : 312 : emit->local_vtype[local_num] = vtype;
1636 [ + + ]: 5197 : } else if (emit->local_vtype[local_num] != vtype) {
1637 : : // type of local is not the same as object stored in it
1638 : 4 : EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
1639 : : MP_ERROR_TEXT("local '%q' has type '%q' but source is '%q'"),
1640 : : qst, vtype_to_qstr(emit->local_vtype[local_num]), vtype_to_qstr(vtype));
1641 : : }
1642 : 5509 : }
1643 : :
1644 : 201 : STATIC void emit_native_store_deref(emit_t *emit, qstr qst, mp_uint_t local_num) {
1645 : 201 : DEBUG_printf("store_deref(%s, " UINT_FMT ")\n", qstr_str(qst), local_num);
1646 : 201 : need_reg_single(emit, REG_TEMP0, 0);
1647 : 201 : need_reg_single(emit, REG_TEMP1, 0);
1648 : 201 : emit_native_load_fast(emit, qst, local_num);
1649 : 201 : vtype_kind_t vtype;
1650 : 201 : int reg_base = REG_TEMP0;
1651 : 201 : emit_pre_pop_reg_flexible(emit, &vtype, ®_base, -1, -1);
1652 : 201 : int reg_src = REG_TEMP1;
1653 : 201 : emit_pre_pop_reg_flexible(emit, &vtype, ®_src, reg_base, reg_base);
1654 : 201 : ASM_STORE_REG_REG_OFFSET(emit->as, reg_src, reg_base, 1);
1655 : 201 : emit_post(emit);
1656 : 201 : }
1657 : :
1658 : 5500 : STATIC void emit_native_store_local(emit_t *emit, qstr qst, mp_uint_t local_num, int kind) {
1659 [ + + ]: 5500 : if (kind == MP_EMIT_IDOP_LOCAL_FAST) {
1660 : 5299 : emit_native_store_fast(emit, qst, local_num);
1661 : : } else {
1662 : 201 : emit_native_store_deref(emit, qst, local_num);
1663 : : }
1664 : 5500 : }
1665 : :
1666 : 20436 : STATIC void emit_native_store_global(emit_t *emit, qstr qst, int kind) {
1667 : 20436 : MP_STATIC_ASSERT(MP_F_STORE_NAME + MP_EMIT_IDOP_GLOBAL_NAME == MP_F_STORE_NAME);
1668 : 20436 : MP_STATIC_ASSERT(MP_F_STORE_NAME + MP_EMIT_IDOP_GLOBAL_GLOBAL == MP_F_STORE_GLOBAL);
1669 [ + + ]: 20436 : if (kind == MP_EMIT_IDOP_GLOBAL_NAME) {
1670 : : // mp_store_name, but needs conversion of object (maybe have mp_viper_store_name(obj, type))
1671 : 19950 : vtype_kind_t vtype;
1672 : 19950 : emit_pre_pop_reg(emit, &vtype, REG_ARG_2);
1673 [ - + ]: 19950 : assert(vtype == VTYPE_PYOBJ);
1674 : : } else {
1675 : 486 : vtype_kind_t vtype = peek_vtype(emit, 0);
1676 [ + + ]: 486 : if (vtype == VTYPE_PYOBJ) {
1677 : 480 : emit_pre_pop_reg(emit, &vtype, REG_ARG_2);
1678 : : } else {
1679 : 6 : emit_pre_pop_reg(emit, &vtype, REG_ARG_1);
1680 : 6 : emit_call_with_imm_arg(emit, MP_F_CONVERT_NATIVE_TO_OBJ, vtype, REG_ARG_2); // arg2 = type
1681 : 6 : ASM_MOV_REG_REG(emit->as, REG_ARG_2, REG_RET);
1682 : : }
1683 : : }
1684 : 20436 : emit_call_with_qstr_arg(emit, MP_F_STORE_NAME + kind, qst, REG_ARG_1); // arg1 = name
1685 : 20436 : emit_post(emit);
1686 : 20436 : }
1687 : :
1688 : 1842 : STATIC void emit_native_store_attr(emit_t *emit, qstr qst) {
1689 : 1842 : vtype_kind_t vtype_base;
1690 : 1842 : vtype_kind_t vtype_val = peek_vtype(emit, 1);
1691 [ + + ]: 1842 : if (vtype_val == VTYPE_PYOBJ) {
1692 : 1830 : emit_pre_pop_reg_reg(emit, &vtype_base, REG_ARG_1, &vtype_val, REG_ARG_3); // arg1 = base, arg3 = value
1693 : : } else {
1694 : 12 : emit_access_stack(emit, 2, &vtype_val, REG_ARG_1); // arg1 = value
1695 : 12 : emit_call_with_imm_arg(emit, MP_F_CONVERT_NATIVE_TO_OBJ, vtype_val, REG_ARG_2); // arg2 = type
1696 : 12 : ASM_MOV_REG_REG(emit->as, REG_ARG_3, REG_RET); // arg3 = value (converted)
1697 : 12 : emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1); // arg1 = base
1698 : 12 : adjust_stack(emit, -1); // pop value
1699 : : }
1700 [ - + ]: 1842 : assert(vtype_base == VTYPE_PYOBJ);
1701 : 1842 : emit_call_with_qstr_arg(emit, MP_F_STORE_ATTR, qst, REG_ARG_2); // arg2 = attribute name
1702 : 1842 : emit_post(emit);
1703 : 1842 : }
1704 : :
1705 : 1307 : STATIC void emit_native_store_subscr(emit_t *emit) {
1706 : 1307 : DEBUG_printf("store_subscr\n");
1707 : : // need to compile: base[index] = value
1708 : :
1709 : : // pop: index, base, value
1710 : : // optimise case where index is an immediate
1711 : 1307 : vtype_kind_t vtype_base = peek_vtype(emit, 1);
1712 : :
1713 [ + + ]: 1307 : if (vtype_base == VTYPE_PYOBJ) {
1714 : : // standard Python subscr
1715 : 1179 : vtype_kind_t vtype_index = peek_vtype(emit, 0);
1716 : 1179 : vtype_kind_t vtype_value = peek_vtype(emit, 2);
1717 [ + + ]: 1179 : if (vtype_index != VTYPE_PYOBJ || vtype_value != VTYPE_PYOBJ) {
1718 : : // need to implicitly convert non-objects to objects
1719 : : // TODO do this properly
1720 : 6 : emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_1, 3);
1721 : 6 : adjust_stack(emit, 3);
1722 : : }
1723 : 1179 : emit_pre_pop_reg_reg_reg(emit, &vtype_index, REG_ARG_2, &vtype_base, REG_ARG_1, &vtype_value, REG_ARG_3);
1724 : 1179 : emit_call(emit, MP_F_OBJ_SUBSCR);
1725 : : } else {
1726 : : // viper store
1727 : : // TODO The different machine architectures have very different
1728 : : // capabilities and requirements for stores, so probably best to
1729 : : // write a completely separate store-optimiser for each one.
1730 : 128 : stack_info_t *top = peek_stack(emit, 0);
1731 [ + + ]: 128 : if (top->vtype == VTYPE_INT && top->kind == STACK_IMM) {
1732 : : // index is an immediate
1733 : 48 : mp_int_t index_value = top->data.u_imm;
1734 : 48 : emit_pre_pop_discard(emit); // discard index
1735 : 48 : vtype_kind_t vtype_value;
1736 : 48 : int reg_base = REG_ARG_1;
1737 : 48 : int reg_index = REG_ARG_2;
1738 : 48 : int reg_value = REG_ARG_3;
1739 : 48 : emit_pre_pop_reg_flexible(emit, &vtype_base, ®_base, reg_index, reg_value);
1740 : : #if N_X64 || N_X86
1741 : : // special case: x86 needs byte stores to be from lower 4 regs (REG_ARG_3 is EDX)
1742 : 48 : emit_pre_pop_reg(emit, &vtype_value, reg_value);
1743 : : #else
1744 : : emit_pre_pop_reg_flexible(emit, &vtype_value, ®_value, reg_base, reg_index);
1745 : : #endif
1746 [ + + ]: 48 : if (vtype_value != VTYPE_BOOL && vtype_value != VTYPE_INT && vtype_value != VTYPE_UINT) {
1747 : 4 : EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
1748 : : MP_ERROR_TEXT("can't store '%q'"), vtype_to_qstr(vtype_value));
1749 : : }
1750 [ + + + + ]: 48 : switch (vtype_base) {
1751 : 12 : case VTYPE_PTR8: {
1752 : : // pointer to 8-bit memory
1753 : : // TODO optimise to use thumb strb r1, [r2, r3]
1754 [ + + ]: 12 : if (index_value != 0) {
1755 : : // index is non-zero
1756 : : #if N_THUMB
1757 : : if (index_value > 0 && index_value < 32) {
1758 : : asm_thumb_strb_rlo_rlo_i5(emit->as, reg_value, reg_base, index_value);
1759 : : break;
1760 : : }
1761 : : #endif
1762 : 6 : ASM_MOV_REG_IMM(emit->as, reg_index, index_value);
1763 : : #if N_ARM
1764 : : asm_arm_strb_reg_reg_reg(emit->as, reg_value, reg_base, reg_index);
1765 : : return;
1766 : : #endif
1767 : 6 : ASM_ADD_REG_REG(emit->as, reg_index, reg_base); // add index to base
1768 : 6 : reg_base = reg_index;
1769 : : }
1770 : 12 : ASM_STORE8_REG_REG(emit->as, reg_value, reg_base); // store value to (base+index)
1771 : 12 : break;
1772 : : }
1773 : 12 : case VTYPE_PTR16: {
1774 : : // pointer to 16-bit memory
1775 [ + + ]: 12 : if (index_value != 0) {
1776 : : // index is a non-zero immediate
1777 : : #if N_THUMB
1778 : : if (index_value > 0 && index_value < 32) {
1779 : : asm_thumb_strh_rlo_rlo_i5(emit->as, reg_value, reg_base, index_value);
1780 : : break;
1781 : : }
1782 : : #endif
1783 : 6 : ASM_MOV_REG_IMM(emit->as, reg_index, index_value << 1);
1784 : 6 : ASM_ADD_REG_REG(emit->as, reg_index, reg_base); // add 2*index to base
1785 : 6 : reg_base = reg_index;
1786 : : }
1787 : 12 : ASM_STORE16_REG_REG(emit->as, reg_value, reg_base); // store value to (base+2*index)
1788 : 12 : break;
1789 : : }
1790 : 16 : case VTYPE_PTR32: {
1791 : : // pointer to 32-bit memory
1792 [ + + ]: 16 : if (index_value != 0) {
1793 : : // index is a non-zero immediate
1794 : : #if N_THUMB
1795 : : if (index_value > 0 && index_value < 32) {
1796 : : asm_thumb_str_rlo_rlo_i5(emit->as, reg_value, reg_base, index_value);
1797 : : break;
1798 : : }
1799 : : #endif
1800 : : #if N_ARM
1801 : : ASM_MOV_REG_IMM(emit->as, reg_index, index_value);
1802 : : asm_arm_str_reg_reg_reg(emit->as, reg_value, reg_base, reg_index);
1803 : : return;
1804 : : #endif
1805 : 6 : ASM_MOV_REG_IMM(emit->as, reg_index, index_value << 2);
1806 : 6 : ASM_ADD_REG_REG(emit->as, reg_index, reg_base); // add 4*index to base
1807 : 6 : reg_base = reg_index;
1808 : : }
1809 : 16 : ASM_STORE32_REG_REG(emit->as, reg_value, reg_base); // store value to (base+4*index)
1810 : 16 : break;
1811 : : }
1812 : 8 : default:
1813 : 48 : EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
1814 : : MP_ERROR_TEXT("can't store to '%q'"), vtype_to_qstr(vtype_base));
1815 : : }
1816 : : } else {
1817 : : // index is not an immediate
1818 : 80 : vtype_kind_t vtype_index, vtype_value;
1819 : 80 : int reg_index = REG_ARG_2;
1820 : 80 : int reg_value = REG_ARG_3;
1821 : 80 : emit_pre_pop_reg_flexible(emit, &vtype_index, ®_index, REG_ARG_1, reg_value);
1822 : 80 : emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1);
1823 [ + + ]: 80 : if (vtype_index != VTYPE_INT && vtype_index != VTYPE_UINT) {
1824 : 8 : EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
1825 : : MP_ERROR_TEXT("can't store with '%q' index"), vtype_to_qstr(vtype_index));
1826 : : }
1827 : : #if N_X64 || N_X86
1828 : : // special case: x86 needs byte stores to be from lower 4 regs (REG_ARG_3 is EDX)
1829 : 80 : emit_pre_pop_reg(emit, &vtype_value, reg_value);
1830 : : #else
1831 : : emit_pre_pop_reg_flexible(emit, &vtype_value, ®_value, REG_ARG_1, reg_index);
1832 : : #endif
1833 [ + + ]: 80 : if (vtype_value != VTYPE_BOOL && vtype_value != VTYPE_INT && vtype_value != VTYPE_UINT) {
1834 : 4 : EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
1835 : : MP_ERROR_TEXT("can't store '%q'"), vtype_to_qstr(vtype_value));
1836 : : }
1837 [ + + + + ]: 80 : switch (vtype_base) {
1838 : 48 : case VTYPE_PTR8: {
1839 : : // pointer to 8-bit memory
1840 : : // TODO optimise to use thumb strb r1, [r2, r3]
1841 : : #if N_ARM
1842 : : asm_arm_strb_reg_reg_reg(emit->as, reg_value, REG_ARG_1, reg_index);
1843 : : break;
1844 : : #endif
1845 : 48 : ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
1846 : 48 : ASM_STORE8_REG_REG(emit->as, reg_value, REG_ARG_1); // store value to (base+index)
1847 : 48 : break;
1848 : : }
1849 : 12 : case VTYPE_PTR16: {
1850 : : // pointer to 16-bit memory
1851 : : #if N_ARM
1852 : : asm_arm_strh_reg_reg_reg(emit->as, reg_value, REG_ARG_1, reg_index);
1853 : : break;
1854 : : #endif
1855 : 12 : ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
1856 : 12 : ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
1857 : 12 : ASM_STORE16_REG_REG(emit->as, reg_value, REG_ARG_1); // store value to (base+2*index)
1858 : 12 : break;
1859 : : }
1860 : 16 : case VTYPE_PTR32: {
1861 : : // pointer to 32-bit memory
1862 : : #if N_ARM
1863 : : asm_arm_str_reg_reg_reg(emit->as, reg_value, REG_ARG_1, reg_index);
1864 : : break;
1865 : : #endif
1866 : 16 : ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
1867 : 16 : ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
1868 : 16 : ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
1869 : 16 : ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
1870 : 16 : ASM_STORE32_REG_REG(emit->as, reg_value, REG_ARG_1); // store value to (base+4*index)
1871 : 16 : break;
1872 : : }
1873 : 4 : default:
1874 : 80 : EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
1875 : : MP_ERROR_TEXT("can't store to '%q'"), vtype_to_qstr(vtype_base));
1876 : : }
1877 : : }
1878 : :
1879 : : }
1880 : 1307 : }
1881 : :
1882 : 210 : STATIC void emit_native_delete_local(emit_t *emit, qstr qst, mp_uint_t local_num, int kind) {
1883 [ + - ]: 210 : if (kind == MP_EMIT_IDOP_LOCAL_FAST) {
1884 : : // TODO: This is not compliant implementation. We could use MP_OBJ_SENTINEL
1885 : : // to mark deleted vars but then every var would need to be checked on
1886 : : // each access. Very inefficient, so just set value to None to enable GC.
1887 : 210 : emit_native_load_const_tok(emit, MP_TOKEN_KW_NONE);
1888 : 210 : emit_native_store_fast(emit, qst, local_num);
1889 : : } else {
1890 : : // TODO implement me!
1891 : 210 : }
1892 : 210 : }
1893 : :
1894 : 327 : STATIC void emit_native_delete_global(emit_t *emit, qstr qst, int kind) {
1895 : 327 : MP_STATIC_ASSERT(MP_F_DELETE_NAME + MP_EMIT_IDOP_GLOBAL_NAME == MP_F_DELETE_NAME);
1896 : 327 : MP_STATIC_ASSERT(MP_F_DELETE_NAME + MP_EMIT_IDOP_GLOBAL_GLOBAL == MP_F_DELETE_GLOBAL);
1897 : 327 : emit_native_pre(emit);
1898 : 327 : emit_call_with_qstr_arg(emit, MP_F_DELETE_NAME + kind, qst, REG_ARG_1);
1899 : 327 : emit_post(emit);
1900 : 327 : }
1901 : :
1902 : 45 : STATIC void emit_native_delete_attr(emit_t *emit, qstr qst) {
1903 : 45 : vtype_kind_t vtype_base;
1904 : 45 : emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1); // arg1 = base
1905 [ - + ]: 45 : assert(vtype_base == VTYPE_PYOBJ);
1906 : 45 : ASM_XOR_REG_REG(emit->as, REG_ARG_3, REG_ARG_3); // arg3 = value (null for delete)
1907 : 45 : emit_call_with_qstr_arg(emit, MP_F_STORE_ATTR, qst, REG_ARG_2); // arg2 = attribute name
1908 : 45 : emit_post(emit);
1909 : 45 : }
1910 : :
1911 : 159 : STATIC void emit_native_delete_subscr(emit_t *emit) {
1912 : 159 : vtype_kind_t vtype_index, vtype_base;
1913 : 159 : emit_pre_pop_reg_reg(emit, &vtype_index, REG_ARG_2, &vtype_base, REG_ARG_1); // index, base
1914 [ - + ]: 159 : assert(vtype_index == VTYPE_PYOBJ);
1915 [ - + ]: 159 : assert(vtype_base == VTYPE_PYOBJ);
1916 : 159 : emit_call_with_imm_arg(emit, MP_F_OBJ_SUBSCR, (mp_uint_t)MP_OBJ_NULL, REG_ARG_3);
1917 : 159 : }
1918 : :
1919 : 4951 : STATIC void emit_native_subscr(emit_t *emit, int kind) {
1920 [ + + ]: 4951 : if (kind == MP_EMIT_SUBSCR_LOAD) {
1921 : 3485 : emit_native_load_subscr(emit);
1922 [ + + ]: 1466 : } else if (kind == MP_EMIT_SUBSCR_STORE) {
1923 : 1307 : emit_native_store_subscr(emit);
1924 : : } else {
1925 : 159 : emit_native_delete_subscr(emit);
1926 : : }
1927 : 4951 : }
1928 : :
1929 : 9525 : STATIC void emit_native_attr(emit_t *emit, qstr qst, int kind) {
1930 [ + + ]: 9525 : if (kind == MP_EMIT_ATTR_LOAD) {
1931 : 7638 : emit_native_load_attr(emit, qst);
1932 [ + + ]: 1887 : } else if (kind == MP_EMIT_ATTR_STORE) {
1933 : 1842 : emit_native_store_attr(emit, qst);
1934 : : } else {
1935 : 45 : emit_native_delete_attr(emit, qst);
1936 : : }
1937 : 9525 : }
1938 : :
1939 : 5547 : STATIC void emit_native_dup_top(emit_t *emit) {
1940 : 5547 : DEBUG_printf("dup_top\n");
1941 : 5547 : vtype_kind_t vtype;
1942 : 5547 : int reg = REG_TEMP0;
1943 : 5547 : emit_pre_pop_reg_flexible(emit, &vtype, ®, -1, -1);
1944 : 5547 : emit_post_push_reg_reg(emit, vtype, reg, vtype, reg);
1945 : 5547 : }
1946 : :
1947 : 417 : STATIC void emit_native_dup_top_two(emit_t *emit) {
1948 : 417 : vtype_kind_t vtype0, vtype1;
1949 : 417 : emit_pre_pop_reg_reg(emit, &vtype0, REG_TEMP0, &vtype1, REG_TEMP1);
1950 : 417 : emit_post_push_reg_reg_reg_reg(emit, vtype1, REG_TEMP1, vtype0, REG_TEMP0, vtype1, REG_TEMP1, vtype0, REG_TEMP0);
1951 : 417 : }
1952 : :
1953 : 38837 : STATIC void emit_native_pop_top(emit_t *emit) {
1954 : 38837 : DEBUG_printf("pop_top\n");
1955 : 38837 : emit_pre_pop_discard(emit);
1956 : 38837 : emit_post(emit);
1957 : 38837 : }
1958 : :
1959 : 567 : STATIC void emit_native_rot_two(emit_t *emit) {
1960 : 567 : DEBUG_printf("rot_two\n");
1961 : 567 : vtype_kind_t vtype0, vtype1;
1962 : 567 : emit_pre_pop_reg_reg(emit, &vtype0, REG_TEMP0, &vtype1, REG_TEMP1);
1963 : 567 : emit_post_push_reg_reg(emit, vtype0, REG_TEMP0, vtype1, REG_TEMP1);
1964 : 567 : }
1965 : :
1966 : 126 : STATIC void emit_native_rot_three(emit_t *emit) {
1967 : 126 : DEBUG_printf("rot_three\n");
1968 : 126 : vtype_kind_t vtype0, vtype1, vtype2;
1969 : 126 : emit_pre_pop_reg_reg_reg(emit, &vtype0, REG_TEMP0, &vtype1, REG_TEMP1, &vtype2, REG_TEMP2);
1970 : 126 : emit_post_push_reg_reg_reg(emit, vtype0, REG_TEMP0, vtype2, REG_TEMP2, vtype1, REG_TEMP1);
1971 : 126 : }
1972 : :
1973 : 28743 : STATIC void emit_native_jump(emit_t *emit, mp_uint_t label) {
1974 : 28743 : DEBUG_printf("jump(label=" UINT_FMT ")\n", label);
1975 : 28743 : emit_native_pre(emit);
1976 : : // need to commit stack because we are jumping elsewhere
1977 : 28743 : need_stack_settled(emit);
1978 : 28743 : ASM_JUMP(emit->as, label);
1979 : 28743 : emit_post(emit);
1980 : 28743 : mp_asm_base_suppress_code(&emit->as->base);
1981 : 28743 : }
1982 : :
1983 : 9295 : STATIC void emit_native_jump_helper(emit_t *emit, bool cond, mp_uint_t label, bool pop) {
1984 : 9295 : vtype_kind_t vtype = peek_vtype(emit, 0);
1985 [ + + ]: 9295 : if (vtype == VTYPE_PYOBJ) {
1986 : 9027 : emit_pre_pop_reg(emit, &vtype, REG_ARG_1);
1987 [ + + ]: 9027 : if (!pop) {
1988 : 231 : adjust_stack(emit, 1);
1989 : : }
1990 : 9027 : emit_call(emit, MP_F_OBJ_IS_TRUE);
1991 : : } else {
1992 : 268 : emit_pre_pop_reg(emit, &vtype, REG_RET);
1993 [ + + ]: 268 : if (!pop) {
1994 : 36 : adjust_stack(emit, 1);
1995 : : }
1996 [ + + ]: 268 : if (!(vtype == VTYPE_BOOL || vtype == VTYPE_INT || vtype == VTYPE_UINT)) {
1997 : 4 : EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
1998 : : MP_ERROR_TEXT("can't implicitly convert '%q' to 'bool'"), vtype_to_qstr(vtype));
1999 : : }
2000 : : }
2001 : : // For non-pop need to save the vtype so that emit_native_adjust_stack_size
2002 : : // can use it. This is a bit of a hack.
2003 [ + + ]: 9295 : if (!pop) {
2004 : 267 : emit->saved_stack_vtype = vtype;
2005 : : }
2006 : : // need to commit stack because we may jump elsewhere
2007 : 9295 : need_stack_settled(emit);
2008 : : // Emit the jump
2009 [ + + ]: 9295 : if (cond) {
2010 [ + + ]: 2367 : ASM_JUMP_IF_REG_NONZERO(emit->as, REG_RET, label, vtype == VTYPE_PYOBJ);
2011 : : } else {
2012 [ + + ]: 6928 : ASM_JUMP_IF_REG_ZERO(emit->as, REG_RET, label, vtype == VTYPE_PYOBJ);
2013 : : }
2014 [ + + ]: 9295 : if (!pop) {
2015 : 267 : adjust_stack(emit, -1);
2016 : : }
2017 : 9295 : emit_post(emit);
2018 : 9295 : }
2019 : :
2020 : 9028 : STATIC void emit_native_pop_jump_if(emit_t *emit, bool cond, mp_uint_t label) {
2021 : 9028 : DEBUG_printf("pop_jump_if(cond=%u, label=" UINT_FMT ")\n", cond, label);
2022 : 9028 : emit_native_jump_helper(emit, cond, label, true);
2023 : 9028 : }
2024 : :
2025 : 267 : STATIC void emit_native_jump_if_or_pop(emit_t *emit, bool cond, mp_uint_t label) {
2026 : 267 : DEBUG_printf("jump_if_or_pop(cond=%u, label=" UINT_FMT ")\n", cond, label);
2027 : 267 : emit_native_jump_helper(emit, cond, label, false);
2028 : 267 : }
2029 : :
2030 : 15291 : STATIC void emit_native_unwind_jump(emit_t *emit, mp_uint_t label, mp_uint_t except_depth) {
2031 [ + + ]: 15291 : if (except_depth > 0) {
2032 : 276 : exc_stack_entry_t *first_finally = NULL;
2033 : 276 : exc_stack_entry_t *prev_finally = NULL;
2034 : 276 : exc_stack_entry_t *e = &emit->exc_stack[emit->exc_stack_size - 1];
2035 [ + + ]: 816 : for (; except_depth > 0; --except_depth, --e) {
2036 [ + + + + ]: 540 : if (e->is_finally && e->is_active) {
2037 : : // Found an active finally handler
2038 [ + + ]: 219 : if (first_finally == NULL) {
2039 : 189 : first_finally = e;
2040 : : }
2041 [ + + ]: 219 : if (prev_finally != NULL) {
2042 : : // Mark prev finally as needed to unwind a jump
2043 : 30 : prev_finally->unwind_label = e->label;
2044 : : }
2045 : : prev_finally = e;
2046 : : }
2047 : : }
2048 [ + + ]: 276 : if (prev_finally == NULL) {
2049 : : // No finally, handle the jump ourselves
2050 : : // First, restore the exception handler address for the jump
2051 [ + - ]: 87 : if (e < emit->exc_stack) {
2052 : 87 : ASM_XOR_REG_REG(emit->as, REG_RET, REG_RET);
2053 : : } else {
2054 : 0 : ASM_MOV_REG_PCREL(emit->as, REG_RET, e->label);
2055 : : }
2056 : 87 : ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_HANDLER_PC(emit), REG_RET);
2057 : : } else {
2058 : : // Last finally should do our jump for us
2059 : : // Mark finally as needing to decide the type of jump
2060 : 189 : prev_finally->unwind_label = UNWIND_LABEL_DO_FINAL_UNWIND;
2061 : 189 : ASM_MOV_REG_PCREL(emit->as, REG_RET, label & ~MP_EMIT_BREAK_FROM_FOR);
2062 : 189 : ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_HANDLER_UNWIND(emit), REG_RET);
2063 : : // Cancel any active exception (see also emit_native_pop_except_jump)
2064 : 189 : ASM_MOV_REG_IMM(emit->as, REG_RET, (mp_uint_t)MP_OBJ_NULL);
2065 : 189 : ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_VAL(emit), REG_RET);
2066 : : // Jump to the innermost active finally
2067 : 189 : label = first_finally->label;
2068 : : }
2069 : : }
2070 : 15291 : emit_native_jump(emit, label & ~MP_EMIT_BREAK_FROM_FOR);
2071 : 15291 : }
2072 : :
2073 : 195 : STATIC void emit_native_setup_with(emit_t *emit, mp_uint_t label) {
2074 : : // the context manager is on the top of the stack
2075 : : // stack: (..., ctx_mgr)
2076 : :
2077 : : // get __exit__ method
2078 : 195 : vtype_kind_t vtype;
2079 : 195 : emit_access_stack(emit, 1, &vtype, REG_ARG_1); // arg1 = ctx_mgr
2080 [ - + ]: 195 : assert(vtype == VTYPE_PYOBJ);
2081 : 195 : emit_get_stack_pointer_to_reg_for_push(emit, REG_ARG_3, 2); // arg3 = dest ptr
2082 : 195 : emit_call_with_qstr_arg(emit, MP_F_LOAD_METHOD, MP_QSTR___exit__, REG_ARG_2);
2083 : : // stack: (..., ctx_mgr, __exit__, self)
2084 : :
2085 : 195 : emit_pre_pop_reg(emit, &vtype, REG_ARG_3); // self
2086 : 195 : emit_pre_pop_reg(emit, &vtype, REG_ARG_2); // __exit__
2087 : 195 : emit_pre_pop_reg(emit, &vtype, REG_ARG_1); // ctx_mgr
2088 : 195 : emit_post_push_reg(emit, vtype, REG_ARG_2); // __exit__
2089 : 195 : emit_post_push_reg(emit, vtype, REG_ARG_3); // self
2090 : : // stack: (..., __exit__, self)
2091 : : // REG_ARG_1=ctx_mgr
2092 : :
2093 : : // get __enter__ method
2094 : 195 : emit_get_stack_pointer_to_reg_for_push(emit, REG_ARG_3, 2); // arg3 = dest ptr
2095 : 195 : emit_call_with_qstr_arg(emit, MP_F_LOAD_METHOD, MP_QSTR___enter__, REG_ARG_2); // arg2 = method name
2096 : : // stack: (..., __exit__, self, __enter__, self)
2097 : :
2098 : : // call __enter__ method
2099 : 195 : emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, 2); // pointer to items, including meth and self
2100 : 195 : emit_call_with_2_imm_args(emit, MP_F_CALL_METHOD_N_KW, 0, REG_ARG_1, 0, REG_ARG_2);
2101 : 195 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET); // push return value of __enter__
2102 : : // stack: (..., __exit__, self, as_value)
2103 : :
2104 : : // need to commit stack because we may jump elsewhere
2105 : 195 : need_stack_settled(emit);
2106 : 195 : emit_native_push_exc_stack(emit, label, true);
2107 : :
2108 : 195 : emit_native_dup_top(emit);
2109 : : // stack: (..., __exit__, self, as_value, as_value)
2110 : 195 : }
2111 : :
2112 : 5445 : STATIC void emit_native_setup_block(emit_t *emit, mp_uint_t label, int kind) {
2113 [ + + ]: 5445 : if (kind == MP_EMIT_SETUP_BLOCK_WITH) {
2114 : 195 : emit_native_setup_with(emit, label);
2115 : : } else {
2116 : : // Set up except and finally
2117 : 5250 : emit_native_pre(emit);
2118 : 5250 : need_stack_settled(emit);
2119 : 5250 : emit_native_push_exc_stack(emit, label, kind == MP_EMIT_SETUP_BLOCK_FINALLY);
2120 : 5250 : emit_post(emit);
2121 : : }
2122 : 5445 : }
2123 : :
2124 : 195 : STATIC void emit_native_with_cleanup(emit_t *emit, mp_uint_t label) {
2125 : : // Note: 3 labels are reserved for this function, starting at *emit->label_slot
2126 : :
2127 : : // stack: (..., __exit__, self, as_value)
2128 : 195 : emit_native_pre(emit);
2129 : 195 : emit_native_leave_exc_stack(emit, false);
2130 : 195 : adjust_stack(emit, -1);
2131 : : // stack: (..., __exit__, self)
2132 : :
2133 : : // Label for case where __exit__ is called from an unwind jump
2134 : 195 : emit_native_label_assign(emit, *emit->label_slot + 2);
2135 : :
2136 : : // call __exit__
2137 : 195 : emit_post_push_imm(emit, VTYPE_PTR_NONE, 0);
2138 : 195 : emit_post_push_imm(emit, VTYPE_PTR_NONE, 0);
2139 : 195 : emit_post_push_imm(emit, VTYPE_PTR_NONE, 0);
2140 : 195 : emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, 5);
2141 : 195 : emit_call_with_2_imm_args(emit, MP_F_CALL_METHOD_N_KW, 3, REG_ARG_1, 0, REG_ARG_2);
2142 : :
2143 : : // Replace exc with None and finish
2144 : 195 : emit_native_jump(emit, *emit->label_slot);
2145 : :
2146 : : // nlr_catch
2147 : : // Don't use emit_native_label_assign because this isn't a real finally label
2148 : 195 : mp_asm_base_label_assign(&emit->as->base, label);
2149 : :
2150 : : // Leave with's exception handler
2151 : 195 : emit_native_leave_exc_stack(emit, true);
2152 : :
2153 : : // Adjust stack counter for: __exit__, self (implicitly discard as_value which is above self)
2154 : 195 : emit_native_adjust_stack_size(emit, 2);
2155 : : // stack: (..., __exit__, self)
2156 : :
2157 : 195 : ASM_MOV_REG_LOCAL(emit->as, REG_ARG_1, LOCAL_IDX_EXC_VAL(emit)); // get exc
2158 : :
2159 : : // Check if exc is MP_OBJ_NULL (i.e. zero) and jump to non-exc handler if it is
2160 : 195 : ASM_JUMP_IF_REG_ZERO(emit->as, REG_ARG_1, *emit->label_slot + 2, false);
2161 : :
2162 : 195 : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_ARG_2, REG_ARG_1, 0); // get type(exc)
2163 : 195 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_ARG_2); // push type(exc)
2164 : 195 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_ARG_1); // push exc value
2165 : 195 : emit_post_push_imm(emit, VTYPE_PTR_NONE, 0); // traceback info
2166 : : // Stack: (..., __exit__, self, type(exc), exc, traceback)
2167 : :
2168 : : // call __exit__ method
2169 : 195 : emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, 5);
2170 : 195 : emit_call_with_2_imm_args(emit, MP_F_CALL_METHOD_N_KW, 3, REG_ARG_1, 0, REG_ARG_2);
2171 : : // Stack: (...)
2172 : :
2173 : : // If REG_RET is true then we need to replace exception with None (swallow exception)
2174 : 195 : if (REG_ARG_1 != REG_RET) {
2175 : 195 : ASM_MOV_REG_REG(emit->as, REG_ARG_1, REG_RET);
2176 : : }
2177 : 195 : emit_call(emit, MP_F_OBJ_IS_TRUE);
2178 : 195 : ASM_JUMP_IF_REG_ZERO(emit->as, REG_RET, *emit->label_slot + 1, true);
2179 : :
2180 : : // Replace exception with MP_OBJ_NULL.
2181 : 195 : emit_native_label_assign(emit, *emit->label_slot);
2182 : 195 : ASM_MOV_REG_IMM(emit->as, REG_TEMP0, (mp_uint_t)MP_OBJ_NULL);
2183 : 195 : ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_VAL(emit), REG_TEMP0);
2184 : :
2185 : : // end of with cleanup nlr_catch block
2186 : 195 : emit_native_label_assign(emit, *emit->label_slot + 1);
2187 : :
2188 : : // Exception is in nlr_buf.ret_val slot
2189 : 195 : }
2190 : :
2191 : 5445 : STATIC void emit_native_end_finally(emit_t *emit) {
2192 : : // logic:
2193 : : // exc = pop_stack
2194 : : // if exc == None: pass
2195 : : // else: raise exc
2196 : : // the check if exc is None is done in the MP_F_NATIVE_RAISE stub
2197 : 5445 : emit_native_pre(emit);
2198 : 5445 : ASM_MOV_REG_LOCAL(emit->as, REG_ARG_1, LOCAL_IDX_EXC_VAL(emit));
2199 : 5445 : emit_call(emit, MP_F_NATIVE_RAISE);
2200 : :
2201 : : // Get state for this finally and see if we need to unwind
2202 : 5445 : exc_stack_entry_t *e = emit_native_pop_exc_stack(emit);
2203 [ + + ]: 5445 : if (e->unwind_label != UNWIND_LABEL_UNUSED) {
2204 : 216 : ASM_MOV_REG_LOCAL(emit->as, REG_RET, LOCAL_IDX_EXC_HANDLER_UNWIND(emit));
2205 : 216 : ASM_JUMP_IF_REG_ZERO(emit->as, REG_RET, *emit->label_slot, false);
2206 [ + + ]: 216 : if (e->unwind_label == UNWIND_LABEL_DO_FINAL_UNWIND) {
2207 : 186 : ASM_JUMP_REG(emit->as, REG_RET);
2208 : : } else {
2209 : 30 : emit_native_jump(emit, e->unwind_label);
2210 : : }
2211 : 216 : emit_native_label_assign(emit, *emit->label_slot);
2212 : : }
2213 : :
2214 : 5445 : emit_post(emit);
2215 : 5445 : }
2216 : :
2217 : 1792 : STATIC void emit_native_get_iter(emit_t *emit, bool use_stack) {
2218 : : // perhaps the difficult one, as we want to rewrite for loops using native code
2219 : : // in cases where we iterate over a Python object, can we use normal runtime calls?
2220 : :
2221 : 1792 : vtype_kind_t vtype;
2222 : 1792 : emit_pre_pop_reg(emit, &vtype, REG_ARG_1);
2223 [ - + ]: 1792 : assert(vtype == VTYPE_PYOBJ);
2224 [ + + ]: 1792 : if (use_stack) {
2225 : 1191 : emit_get_stack_pointer_to_reg_for_push(emit, REG_ARG_2, MP_OBJ_ITER_BUF_NSLOTS);
2226 : 1191 : emit_call(emit, MP_F_NATIVE_GETITER);
2227 : : } else {
2228 : : // mp_getiter will allocate the iter_buf on the heap
2229 : 601 : ASM_MOV_REG_IMM(emit->as, REG_ARG_2, 0);
2230 : 601 : emit_call(emit, MP_F_NATIVE_GETITER);
2231 : 601 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
2232 : : }
2233 : 1792 : }
2234 : :
2235 : 1272 : STATIC void emit_native_for_iter(emit_t *emit, mp_uint_t label) {
2236 : 1272 : emit_native_pre(emit);
2237 : 1272 : emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_1, MP_OBJ_ITER_BUF_NSLOTS);
2238 : 1272 : adjust_stack(emit, MP_OBJ_ITER_BUF_NSLOTS);
2239 : 1272 : emit_call(emit, MP_F_NATIVE_ITERNEXT);
2240 : : #if MICROPY_DEBUG_MP_OBJ_SENTINELS
2241 : : ASM_MOV_REG_IMM(emit->as, REG_TEMP1, (mp_uint_t)MP_OBJ_STOP_ITERATION);
2242 : : ASM_JUMP_IF_REG_EQ(emit->as, REG_RET, REG_TEMP1, label);
2243 : : #else
2244 : 1272 : MP_STATIC_ASSERT(MP_OBJ_STOP_ITERATION == 0);
2245 : 1272 : ASM_JUMP_IF_REG_ZERO(emit->as, REG_RET, label, false);
2246 : : #endif
2247 : 1272 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
2248 : 1272 : }
2249 : :
2250 : 1272 : STATIC void emit_native_for_iter_end(emit_t *emit) {
2251 : : // adjust stack counter (we get here from for_iter ending, which popped the value for us)
2252 : 1272 : emit_native_pre(emit);
2253 : 1272 : adjust_stack(emit, -MP_OBJ_ITER_BUF_NSLOTS);
2254 : 1272 : emit_post(emit);
2255 : 1272 : }
2256 : :
2257 : 8814 : STATIC void emit_native_pop_except_jump(emit_t *emit, mp_uint_t label, bool within_exc_handler) {
2258 [ + + ]: 8814 : if (within_exc_handler) {
2259 : : // Cancel any active exception so subsequent handlers don't see it
2260 : 4428 : ASM_MOV_REG_IMM(emit->as, REG_TEMP0, (mp_uint_t)MP_OBJ_NULL);
2261 : 4428 : ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_VAL(emit), REG_TEMP0);
2262 : : } else {
2263 : 4386 : emit_native_leave_exc_stack(emit, false);
2264 : : }
2265 : 8814 : emit_native_jump(emit, label);
2266 : 8814 : }
2267 : :
2268 : 1206 : STATIC void emit_native_unary_op(emit_t *emit, mp_unary_op_t op) {
2269 : 1206 : vtype_kind_t vtype;
2270 : 1206 : emit_pre_pop_reg(emit, &vtype, REG_ARG_2);
2271 [ + + ]: 1206 : if (vtype == VTYPE_PYOBJ) {
2272 : 1194 : emit_call_with_imm_arg(emit, MP_F_UNARY_OP, op, REG_ARG_1);
2273 : 1194 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
2274 : : } else {
2275 : 12 : adjust_stack(emit, 1);
2276 : 12 : EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
2277 : : MP_ERROR_TEXT("unary op %q not implemented"), mp_unary_op_method_name[op]);
2278 : : }
2279 : 1206 : }
2280 : :
2281 : 17156 : STATIC void emit_native_binary_op(emit_t *emit, mp_binary_op_t op) {
2282 : 17156 : DEBUG_printf("binary_op(" UINT_FMT ")\n", op);
2283 : 17156 : vtype_kind_t vtype_lhs = peek_vtype(emit, 1);
2284 : 17156 : vtype_kind_t vtype_rhs = peek_vtype(emit, 0);
2285 [ + + ]: 17156 : if ((vtype_lhs == VTYPE_INT || vtype_lhs == VTYPE_UINT)
2286 [ + + ]: 878 : && (vtype_rhs == VTYPE_INT || vtype_rhs == VTYPE_UINT)) {
2287 : : // for integers, inplace and normal ops are equivalent, so use just normal ops
2288 [ + + ]: 874 : if (MP_BINARY_OP_INPLACE_OR <= op && op <= MP_BINARY_OP_INPLACE_POWER) {
2289 : 186 : op += MP_BINARY_OP_OR - MP_BINARY_OP_INPLACE_OR;
2290 : : }
2291 : :
2292 : : #if N_X64 || N_X86
2293 : : // special cases for x86 and shifting
2294 [ + + ]: 874 : if (op == MP_BINARY_OP_LSHIFT || op == MP_BINARY_OP_RSHIFT) {
2295 : : #if N_X64
2296 : 96 : emit_pre_pop_reg_reg(emit, &vtype_rhs, ASM_X64_REG_RCX, &vtype_lhs, REG_RET);
2297 : : #else
2298 : : emit_pre_pop_reg_reg(emit, &vtype_rhs, ASM_X86_REG_ECX, &vtype_lhs, REG_RET);
2299 : : #endif
2300 [ + + ]: 96 : if (op == MP_BINARY_OP_LSHIFT) {
2301 : 48 : ASM_LSL_REG(emit->as, REG_RET);
2302 : : } else {
2303 [ + + ]: 48 : if (vtype_lhs == VTYPE_UINT) {
2304 : 6 : ASM_LSR_REG(emit->as, REG_RET);
2305 : : } else {
2306 : 42 : ASM_ASR_REG(emit->as, REG_RET);
2307 : : }
2308 : : }
2309 : 96 : emit_post_push_reg(emit, vtype_lhs, REG_RET);
2310 : 212 : return;
2311 : : }
2312 : : #endif
2313 : :
2314 : : // special cases for floor-divide and module because we dispatch to helper functions
2315 [ + + ]: 778 : if (op == MP_BINARY_OP_FLOOR_DIVIDE || op == MP_BINARY_OP_MODULO) {
2316 : 20 : emit_pre_pop_reg_reg(emit, &vtype_rhs, REG_ARG_2, &vtype_lhs, REG_ARG_1);
2317 [ + + ]: 20 : if (vtype_lhs != VTYPE_INT) {
2318 : 8 : EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
2319 : : MP_ERROR_TEXT("div/mod not implemented for uint"), mp_binary_op_method_name[op]);
2320 : : }
2321 [ + + ]: 20 : if (op == MP_BINARY_OP_FLOOR_DIVIDE) {
2322 : 10 : emit_call(emit, MP_F_SMALL_INT_FLOOR_DIVIDE);
2323 : : } else {
2324 : 10 : emit_call(emit, MP_F_SMALL_INT_MODULO);
2325 : : }
2326 : 20 : emit_post_push_reg(emit, VTYPE_INT, REG_RET);
2327 : 20 : return;
2328 : : }
2329 : :
2330 : 758 : int reg_rhs = REG_ARG_3;
2331 : 758 : emit_pre_pop_reg_flexible(emit, &vtype_rhs, ®_rhs, REG_RET, REG_ARG_2);
2332 : 758 : emit_pre_pop_reg(emit, &vtype_lhs, REG_ARG_2);
2333 : :
2334 : : #if !(N_X64 || N_X86)
2335 : : if (op == MP_BINARY_OP_LSHIFT || op == MP_BINARY_OP_RSHIFT) {
2336 : : if (op == MP_BINARY_OP_LSHIFT) {
2337 : : ASM_LSL_REG_REG(emit->as, REG_ARG_2, reg_rhs);
2338 : : } else {
2339 : : if (vtype_lhs == VTYPE_UINT) {
2340 : : ASM_LSR_REG_REG(emit->as, REG_ARG_2, reg_rhs);
2341 : : } else {
2342 : : ASM_ASR_REG_REG(emit->as, REG_ARG_2, reg_rhs);
2343 : : }
2344 : : }
2345 : : emit_post_push_reg(emit, vtype_lhs, REG_ARG_2);
2346 : : return;
2347 : : }
2348 : : #endif
2349 : :
2350 [ + + + + : 758 : if (op == MP_BINARY_OP_OR) {
+ + + + ]
2351 : 60 : ASM_OR_REG_REG(emit->as, REG_ARG_2, reg_rhs);
2352 : 60 : emit_post_push_reg(emit, vtype_lhs, REG_ARG_2);
2353 : : } else if (op == MP_BINARY_OP_XOR) {
2354 : 24 : ASM_XOR_REG_REG(emit->as, REG_ARG_2, reg_rhs);
2355 : 24 : emit_post_push_reg(emit, vtype_lhs, REG_ARG_2);
2356 : : } else if (op == MP_BINARY_OP_AND) {
2357 : 60 : ASM_AND_REG_REG(emit->as, REG_ARG_2, reg_rhs);
2358 : 60 : emit_post_push_reg(emit, vtype_lhs, REG_ARG_2);
2359 : : } else if (op == MP_BINARY_OP_ADD) {
2360 : 294 : ASM_ADD_REG_REG(emit->as, REG_ARG_2, reg_rhs);
2361 : 294 : emit_post_push_reg(emit, vtype_lhs, REG_ARG_2);
2362 : : } else if (op == MP_BINARY_OP_SUBTRACT) {
2363 : 24 : ASM_SUB_REG_REG(emit->as, REG_ARG_2, reg_rhs);
2364 : 24 : emit_post_push_reg(emit, vtype_lhs, REG_ARG_2);
2365 : : } else if (op == MP_BINARY_OP_MULTIPLY) {
2366 : 24 : ASM_MUL_REG_REG(emit->as, REG_ARG_2, reg_rhs);
2367 : 24 : emit_post_push_reg(emit, vtype_lhs, REG_ARG_2);
2368 : : } else if (op == MP_BINARY_OP_LESS
2369 : : || op == MP_BINARY_OP_MORE
2370 : : || op == MP_BINARY_OP_EQUAL
2371 : : || op == MP_BINARY_OP_LESS_EQUAL
2372 : : || op == MP_BINARY_OP_MORE_EQUAL
2373 : : || op == MP_BINARY_OP_NOT_EQUAL) {
2374 : : // comparison ops
2375 : :
2376 [ + + ]: 268 : if (vtype_lhs != vtype_rhs) {
2377 : 4 : EMIT_NATIVE_VIPER_TYPE_ERROR(emit, MP_ERROR_TEXT("comparison of int and uint"));
2378 : : }
2379 : :
2380 [ + + ]: 268 : size_t op_idx = op - MP_BINARY_OP_LESS + (vtype_lhs == VTYPE_UINT ? 0 : 6);
2381 : :
2382 : 268 : need_reg_single(emit, REG_RET, 0);
2383 : : #if N_X64
2384 : 268 : asm_x64_xor_r64_r64(emit->as, REG_RET, REG_RET);
2385 : 268 : asm_x64_cmp_r64_with_r64(emit->as, reg_rhs, REG_ARG_2);
2386 : 268 : static byte ops[6 + 6] = {
2387 : : // unsigned
2388 : : ASM_X64_CC_JB,
2389 : : ASM_X64_CC_JA,
2390 : : ASM_X64_CC_JE,
2391 : : ASM_X64_CC_JBE,
2392 : : ASM_X64_CC_JAE,
2393 : : ASM_X64_CC_JNE,
2394 : : // signed
2395 : : ASM_X64_CC_JL,
2396 : : ASM_X64_CC_JG,
2397 : : ASM_X64_CC_JE,
2398 : : ASM_X64_CC_JLE,
2399 : : ASM_X64_CC_JGE,
2400 : : ASM_X64_CC_JNE,
2401 : : };
2402 : 268 : asm_x64_setcc_r8(emit->as, ops[op_idx], REG_RET);
2403 : : #elif N_X86
2404 : : asm_x86_xor_r32_r32(emit->as, REG_RET, REG_RET);
2405 : : asm_x86_cmp_r32_with_r32(emit->as, reg_rhs, REG_ARG_2);
2406 : : static byte ops[6 + 6] = {
2407 : : // unsigned
2408 : : ASM_X86_CC_JB,
2409 : : ASM_X86_CC_JA,
2410 : : ASM_X86_CC_JE,
2411 : : ASM_X86_CC_JBE,
2412 : : ASM_X86_CC_JAE,
2413 : : ASM_X86_CC_JNE,
2414 : : // signed
2415 : : ASM_X86_CC_JL,
2416 : : ASM_X86_CC_JG,
2417 : : ASM_X86_CC_JE,
2418 : : ASM_X86_CC_JLE,
2419 : : ASM_X86_CC_JGE,
2420 : : ASM_X86_CC_JNE,
2421 : : };
2422 : : asm_x86_setcc_r8(emit->as, ops[op_idx], REG_RET);
2423 : : #elif N_THUMB
2424 : : asm_thumb_cmp_rlo_rlo(emit->as, REG_ARG_2, reg_rhs);
2425 : : if (asm_thumb_allow_armv7m(emit->as)) {
2426 : : static uint16_t ops[6 + 6] = {
2427 : : // unsigned
2428 : : ASM_THUMB_OP_ITE_CC,
2429 : : ASM_THUMB_OP_ITE_HI,
2430 : : ASM_THUMB_OP_ITE_EQ,
2431 : : ASM_THUMB_OP_ITE_LS,
2432 : : ASM_THUMB_OP_ITE_CS,
2433 : : ASM_THUMB_OP_ITE_NE,
2434 : : // signed
2435 : : ASM_THUMB_OP_ITE_LT,
2436 : : ASM_THUMB_OP_ITE_GT,
2437 : : ASM_THUMB_OP_ITE_EQ,
2438 : : ASM_THUMB_OP_ITE_LE,
2439 : : ASM_THUMB_OP_ITE_GE,
2440 : : ASM_THUMB_OP_ITE_NE,
2441 : : };
2442 : : asm_thumb_op16(emit->as, ops[op_idx]);
2443 : : asm_thumb_mov_rlo_i8(emit->as, REG_RET, 1);
2444 : : asm_thumb_mov_rlo_i8(emit->as, REG_RET, 0);
2445 : : } else {
2446 : : static uint16_t ops[6 + 6] = {
2447 : : // unsigned
2448 : : ASM_THUMB_CC_CC,
2449 : : ASM_THUMB_CC_HI,
2450 : : ASM_THUMB_CC_EQ,
2451 : : ASM_THUMB_CC_LS,
2452 : : ASM_THUMB_CC_CS,
2453 : : ASM_THUMB_CC_NE,
2454 : : // signed
2455 : : ASM_THUMB_CC_LT,
2456 : : ASM_THUMB_CC_GT,
2457 : : ASM_THUMB_CC_EQ,
2458 : : ASM_THUMB_CC_LE,
2459 : : ASM_THUMB_CC_GE,
2460 : : ASM_THUMB_CC_NE,
2461 : : };
2462 : : asm_thumb_bcc_rel9(emit->as, ops[op_idx], 6);
2463 : : asm_thumb_mov_rlo_i8(emit->as, REG_RET, 0);
2464 : : asm_thumb_b_rel12(emit->as, 4);
2465 : : asm_thumb_mov_rlo_i8(emit->as, REG_RET, 1);
2466 : : }
2467 : : #elif N_ARM
2468 : : asm_arm_cmp_reg_reg(emit->as, REG_ARG_2, reg_rhs);
2469 : : static uint ccs[6 + 6] = {
2470 : : // unsigned
2471 : : ASM_ARM_CC_CC,
2472 : : ASM_ARM_CC_HI,
2473 : : ASM_ARM_CC_EQ,
2474 : : ASM_ARM_CC_LS,
2475 : : ASM_ARM_CC_CS,
2476 : : ASM_ARM_CC_NE,
2477 : : // signed
2478 : : ASM_ARM_CC_LT,
2479 : : ASM_ARM_CC_GT,
2480 : : ASM_ARM_CC_EQ,
2481 : : ASM_ARM_CC_LE,
2482 : : ASM_ARM_CC_GE,
2483 : : ASM_ARM_CC_NE,
2484 : : };
2485 : : asm_arm_setcc_reg(emit->as, REG_RET, ccs[op_idx]);
2486 : : #elif N_XTENSA || N_XTENSAWIN
2487 : : static uint8_t ccs[6 + 6] = {
2488 : : // unsigned
2489 : : ASM_XTENSA_CC_LTU,
2490 : : 0x80 | ASM_XTENSA_CC_LTU, // for GTU we'll swap args
2491 : : ASM_XTENSA_CC_EQ,
2492 : : 0x80 | ASM_XTENSA_CC_GEU, // for LEU we'll swap args
2493 : : ASM_XTENSA_CC_GEU,
2494 : : ASM_XTENSA_CC_NE,
2495 : : // signed
2496 : : ASM_XTENSA_CC_LT,
2497 : : 0x80 | ASM_XTENSA_CC_LT, // for GT we'll swap args
2498 : : ASM_XTENSA_CC_EQ,
2499 : : 0x80 | ASM_XTENSA_CC_GE, // for LE we'll swap args
2500 : : ASM_XTENSA_CC_GE,
2501 : : ASM_XTENSA_CC_NE,
2502 : : };
2503 : : uint8_t cc = ccs[op_idx];
2504 : : if ((cc & 0x80) == 0) {
2505 : : asm_xtensa_setcc_reg_reg_reg(emit->as, cc, REG_RET, REG_ARG_2, reg_rhs);
2506 : : } else {
2507 : : asm_xtensa_setcc_reg_reg_reg(emit->as, cc & ~0x80, REG_RET, reg_rhs, REG_ARG_2);
2508 : : }
2509 : : #else
2510 : : #error not implemented
2511 : : #endif
2512 : 268 : emit_post_push_reg(emit, VTYPE_BOOL, REG_RET);
2513 : : } else {
2514 : : // TODO other ops not yet implemented
2515 : 4 : adjust_stack(emit, 1);
2516 : 758 : EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
2517 : : MP_ERROR_TEXT("binary op %q not implemented"), mp_binary_op_method_name[op]);
2518 : : }
2519 [ + + + - ]: 32560 : } else if (vtype_lhs == VTYPE_PYOBJ && vtype_rhs == VTYPE_PYOBJ) {
2520 : 16278 : emit_pre_pop_reg_reg(emit, &vtype_rhs, REG_ARG_3, &vtype_lhs, REG_ARG_2);
2521 : 16278 : bool invert = false;
2522 [ + + ]: 16278 : if (op == MP_BINARY_OP_NOT_IN) {
2523 : : invert = true;
2524 : : op = MP_BINARY_OP_IN;
2525 [ + + ]: 16143 : } else if (op == MP_BINARY_OP_IS_NOT) {
2526 : 483 : invert = true;
2527 : 483 : op = MP_BINARY_OP_IS;
2528 : : }
2529 : 16278 : emit_call_with_imm_arg(emit, MP_F_BINARY_OP, op, REG_ARG_1);
2530 [ + + ]: 16278 : if (invert) {
2531 : 618 : ASM_MOV_REG_REG(emit->as, REG_ARG_2, REG_RET);
2532 : 618 : emit_call_with_imm_arg(emit, MP_F_UNARY_OP, MP_UNARY_OP_NOT, REG_ARG_1);
2533 : : }
2534 : 16278 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
2535 : : } else {
2536 : 4 : adjust_stack(emit, -1);
2537 : 17040 : EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
2538 : : MP_ERROR_TEXT("can't do binary op between '%q' and '%q'"),
2539 : : vtype_to_qstr(vtype_lhs), vtype_to_qstr(vtype_rhs));
2540 : : }
2541 : : }
2542 : :
2543 : : #if MICROPY_PY_BUILTINS_SLICE
2544 : : STATIC void emit_native_build_slice(emit_t *emit, mp_uint_t n_args);
2545 : : #endif
2546 : :
2547 : 6885 : STATIC void emit_native_build(emit_t *emit, mp_uint_t n_args, int kind) {
2548 : : // for viper: call runtime, with types of args
2549 : : // if wrapped in byte_array, or something, allocates memory and fills it
2550 : 6885 : MP_STATIC_ASSERT(MP_F_BUILD_TUPLE + MP_EMIT_BUILD_TUPLE == MP_F_BUILD_TUPLE);
2551 : 6885 : MP_STATIC_ASSERT(MP_F_BUILD_TUPLE + MP_EMIT_BUILD_LIST == MP_F_BUILD_LIST);
2552 : 6885 : MP_STATIC_ASSERT(MP_F_BUILD_TUPLE + MP_EMIT_BUILD_MAP == MP_F_BUILD_MAP);
2553 : 6885 : MP_STATIC_ASSERT(MP_F_BUILD_TUPLE + MP_EMIT_BUILD_SET == MP_F_BUILD_SET);
2554 : : #if MICROPY_PY_BUILTINS_SLICE
2555 [ + + ]: 6885 : if (kind == MP_EMIT_BUILD_SLICE) {
2556 : 885 : emit_native_build_slice(emit, n_args);
2557 : 885 : return;
2558 : : }
2559 : : #endif
2560 : 6000 : emit_native_pre(emit);
2561 [ + + ]: 6000 : if (kind == MP_EMIT_BUILD_TUPLE || kind == MP_EMIT_BUILD_LIST || kind == MP_EMIT_BUILD_SET) {
2562 : 4971 : emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_2, n_args); // pointer to items
2563 : : }
2564 : 6000 : emit_call_with_imm_arg(emit, MP_F_BUILD_TUPLE + kind, n_args, REG_ARG_1);
2565 : 6000 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET); // new tuple/list/map/set
2566 : : }
2567 : :
2568 : 1884 : STATIC void emit_native_store_map(emit_t *emit) {
2569 : 1884 : vtype_kind_t vtype_key, vtype_value, vtype_map;
2570 : 1884 : emit_pre_pop_reg_reg_reg(emit, &vtype_key, REG_ARG_2, &vtype_value, REG_ARG_3, &vtype_map, REG_ARG_1); // key, value, map
2571 [ - + ]: 1884 : assert(vtype_key == VTYPE_PYOBJ);
2572 [ - + ]: 1884 : assert(vtype_value == VTYPE_PYOBJ);
2573 [ - + ]: 1884 : assert(vtype_map == VTYPE_PYOBJ);
2574 : 1884 : emit_call(emit, MP_F_STORE_MAP);
2575 : 1884 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET); // map
2576 : 1884 : }
2577 : :
2578 : : #if MICROPY_PY_BUILTINS_SLICE
2579 : 885 : STATIC void emit_native_build_slice(emit_t *emit, mp_uint_t n_args) {
2580 : 885 : DEBUG_printf("build_slice %d\n", n_args);
2581 [ + + ]: 885 : if (n_args == 2) {
2582 : 714 : vtype_kind_t vtype_start, vtype_stop;
2583 : 714 : emit_pre_pop_reg_reg(emit, &vtype_stop, REG_ARG_2, &vtype_start, REG_ARG_1); // arg1 = start, arg2 = stop
2584 [ - + ]: 714 : assert(vtype_start == VTYPE_PYOBJ);
2585 [ - + ]: 714 : assert(vtype_stop == VTYPE_PYOBJ);
2586 : 714 : emit_native_mov_reg_const(emit, REG_ARG_3, MP_F_CONST_NONE_OBJ); // arg3 = step
2587 : : } else {
2588 [ - + ]: 171 : assert(n_args == 3);
2589 : 171 : vtype_kind_t vtype_start, vtype_stop, vtype_step;
2590 : 171 : emit_pre_pop_reg_reg_reg(emit, &vtype_step, REG_ARG_3, &vtype_stop, REG_ARG_2, &vtype_start, REG_ARG_1); // arg1 = start, arg2 = stop, arg3 = step
2591 [ - + ]: 171 : assert(vtype_start == VTYPE_PYOBJ);
2592 [ - + ]: 171 : assert(vtype_stop == VTYPE_PYOBJ);
2593 [ - + ]: 171 : assert(vtype_step == VTYPE_PYOBJ);
2594 : : }
2595 : 885 : emit_call(emit, MP_F_NEW_SLICE);
2596 : 885 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
2597 : 885 : }
2598 : : #endif
2599 : :
2600 : 177 : STATIC void emit_native_store_comp(emit_t *emit, scope_kind_t kind, mp_uint_t collection_index) {
2601 : 177 : mp_fun_kind_t f;
2602 [ + + ]: 177 : if (kind == SCOPE_LIST_COMP) {
2603 : 150 : vtype_kind_t vtype_item;
2604 : 150 : emit_pre_pop_reg(emit, &vtype_item, REG_ARG_2);
2605 [ - + ]: 150 : assert(vtype_item == VTYPE_PYOBJ);
2606 : 150 : f = MP_F_LIST_APPEND;
2607 : : #if MICROPY_PY_BUILTINS_SET
2608 [ + + ]: 27 : } else if (kind == SCOPE_SET_COMP) {
2609 : 3 : vtype_kind_t vtype_item;
2610 : 3 : emit_pre_pop_reg(emit, &vtype_item, REG_ARG_2);
2611 [ - + ]: 3 : assert(vtype_item == VTYPE_PYOBJ);
2612 : 3 : f = MP_F_STORE_SET;
2613 : : #endif
2614 : : } else {
2615 : : // SCOPE_DICT_COMP
2616 : 24 : vtype_kind_t vtype_key, vtype_value;
2617 : 24 : emit_pre_pop_reg_reg(emit, &vtype_key, REG_ARG_2, &vtype_value, REG_ARG_3);
2618 [ - + ]: 24 : assert(vtype_key == VTYPE_PYOBJ);
2619 [ - + ]: 24 : assert(vtype_value == VTYPE_PYOBJ);
2620 : 24 : f = MP_F_STORE_MAP;
2621 : : }
2622 : 177 : vtype_kind_t vtype_collection;
2623 : 177 : emit_access_stack(emit, collection_index, &vtype_collection, REG_ARG_1);
2624 [ - + ]: 177 : assert(vtype_collection == VTYPE_PYOBJ);
2625 : 177 : emit_call(emit, f);
2626 : 177 : emit_post(emit);
2627 : 177 : }
2628 : :
2629 : 363 : STATIC void emit_native_unpack_sequence(emit_t *emit, mp_uint_t n_args) {
2630 : 363 : DEBUG_printf("unpack_sequence %d\n", n_args);
2631 : 363 : vtype_kind_t vtype_base;
2632 : 363 : emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1); // arg1 = seq
2633 [ - + ]: 363 : assert(vtype_base == VTYPE_PYOBJ);
2634 : 363 : emit_get_stack_pointer_to_reg_for_push(emit, REG_ARG_3, n_args); // arg3 = dest ptr
2635 : 363 : emit_call_with_imm_arg(emit, MP_F_UNPACK_SEQUENCE, n_args, REG_ARG_2); // arg2 = n_args
2636 : 363 : }
2637 : :
2638 : 99 : STATIC void emit_native_unpack_ex(emit_t *emit, mp_uint_t n_left, mp_uint_t n_right) {
2639 : 99 : DEBUG_printf("unpack_ex %d %d\n", n_left, n_right);
2640 : 99 : vtype_kind_t vtype_base;
2641 : 99 : emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1); // arg1 = seq
2642 [ - + ]: 99 : assert(vtype_base == VTYPE_PYOBJ);
2643 : 99 : emit_get_stack_pointer_to_reg_for_push(emit, REG_ARG_3, n_left + n_right + 1); // arg3 = dest ptr
2644 : 99 : emit_call_with_imm_arg(emit, MP_F_UNPACK_EX, n_left | (n_right << 8), REG_ARG_2); // arg2 = n_left + n_right
2645 : 99 : }
2646 : :
2647 : 7569 : STATIC void emit_native_make_function(emit_t *emit, scope_t *scope, mp_uint_t n_pos_defaults, mp_uint_t n_kw_defaults) {
2648 : : // call runtime, with type info for args, or don't support dict/default params, or only support Python objects for them
2649 : 7569 : emit_native_pre(emit);
2650 : 7569 : emit_native_mov_reg_state(emit, REG_ARG_2, LOCAL_IDX_FUN_OBJ(emit));
2651 : 7569 : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_ARG_2, REG_ARG_2, OFFSETOF_OBJ_FUN_BC_CONTEXT);
2652 [ + + ]: 7569 : if (n_pos_defaults == 0 && n_kw_defaults == 0) {
2653 : 7191 : need_reg_all(emit);
2654 : 7191 : ASM_MOV_REG_IMM(emit->as, REG_ARG_3, 0);
2655 : : } else {
2656 : 378 : emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, 2);
2657 : 378 : need_reg_all(emit);
2658 : : }
2659 : 7569 : emit_load_reg_with_child(emit, REG_ARG_1, scope->raw_code);
2660 : 7569 : ASM_CALL_IND(emit->as, MP_F_MAKE_FUNCTION_FROM_RAW_CODE);
2661 : 7569 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
2662 : 7569 : }
2663 : :
2664 : 192 : STATIC void emit_native_make_closure(emit_t *emit, scope_t *scope, mp_uint_t n_closed_over, mp_uint_t n_pos_defaults, mp_uint_t n_kw_defaults) {
2665 : : // make function
2666 : 192 : emit_native_pre(emit);
2667 : 192 : emit_native_mov_reg_state(emit, REG_ARG_2, LOCAL_IDX_FUN_OBJ(emit));
2668 : 192 : ASM_LOAD_REG_REG_OFFSET(emit->as, REG_ARG_2, REG_ARG_2, OFFSETOF_OBJ_FUN_BC_CONTEXT);
2669 [ + + ]: 192 : if (n_pos_defaults == 0 && n_kw_defaults == 0) {
2670 : 189 : need_reg_all(emit);
2671 : 189 : ASM_MOV_REG_IMM(emit->as, REG_ARG_3, 0);
2672 : : } else {
2673 : 3 : emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, 2 + n_closed_over);
2674 : 3 : adjust_stack(emit, 2 + n_closed_over);
2675 : 3 : need_reg_all(emit);
2676 : : }
2677 : 192 : emit_load_reg_with_child(emit, REG_ARG_1, scope->raw_code);
2678 : 192 : ASM_CALL_IND(emit->as, MP_F_MAKE_FUNCTION_FROM_RAW_CODE);
2679 : :
2680 : : // make closure
2681 : : #if REG_ARG_1 != REG_RET
2682 : 192 : ASM_MOV_REG_REG(emit->as, REG_ARG_1, REG_RET);
2683 : : #endif
2684 : 192 : ASM_MOV_REG_IMM(emit->as, REG_ARG_2, n_closed_over);
2685 : 192 : emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, n_closed_over);
2686 [ + + ]: 192 : if (n_pos_defaults != 0 || n_kw_defaults != 0) {
2687 : 3 : adjust_stack(emit, -2);
2688 : : }
2689 : 192 : ASM_CALL_IND(emit->as, MP_F_NEW_CLOSURE);
2690 : 192 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
2691 : 192 : }
2692 : :
2693 : 41282 : STATIC void emit_native_call_function(emit_t *emit, mp_uint_t n_positional, mp_uint_t n_keyword, mp_uint_t star_flags) {
2694 : 41282 : DEBUG_printf("call_function(n_pos=" UINT_FMT ", n_kw=" UINT_FMT ", star_flags=" UINT_FMT ")\n", n_positional, n_keyword, star_flags);
2695 : :
2696 : : // TODO: in viper mode, call special runtime routine with type info for args,
2697 : : // and wanted type info for return, to remove need for boxing/unboxing
2698 : :
2699 : 41282 : emit_native_pre(emit);
2700 : 41282 : vtype_kind_t vtype_fun = peek_vtype(emit, n_positional + 2 * n_keyword);
2701 [ + + ]: 41282 : if (vtype_fun == VTYPE_BUILTIN_CAST) {
2702 : : // casting operator
2703 [ - + ]: 96 : assert(n_positional == 1 && n_keyword == 0);
2704 [ - + ]: 96 : assert(!star_flags);
2705 : 96 : DEBUG_printf(" cast to %d\n", vtype_fun);
2706 : 96 : vtype_kind_t vtype_cast = peek_stack(emit, 1)->data.u_imm;
2707 [ + + + ]: 96 : switch (peek_vtype(emit, 0)) {
2708 : 78 : case VTYPE_PYOBJ: {
2709 : 78 : vtype_kind_t vtype;
2710 : 78 : emit_pre_pop_reg(emit, &vtype, REG_ARG_1);
2711 : 78 : emit_pre_pop_discard(emit);
2712 : 78 : emit_call_with_imm_arg(emit, MP_F_CONVERT_OBJ_TO_NATIVE, vtype_cast, REG_ARG_2); // arg2 = type
2713 : 78 : emit_post_push_reg(emit, vtype_cast, REG_RET);
2714 : 78 : break;
2715 : : }
2716 : 14 : case VTYPE_BOOL:
2717 : : case VTYPE_INT:
2718 : : case VTYPE_UINT:
2719 : : case VTYPE_PTR:
2720 : : case VTYPE_PTR8:
2721 : : case VTYPE_PTR16:
2722 : : case VTYPE_PTR32:
2723 : : case VTYPE_PTR_NONE:
2724 : 14 : emit_fold_stack_top(emit, REG_ARG_1);
2725 : 14 : emit_post_top_set_vtype(emit, vtype_cast);
2726 : 14 : break;
2727 : : default:
2728 : : // this can happen when casting a cast: int(int)
2729 : 4 : mp_raise_NotImplementedError(MP_ERROR_TEXT("casting"));
2730 : : }
2731 : : } else {
2732 [ - + ]: 41186 : assert(vtype_fun == VTYPE_PYOBJ);
2733 [ + + ]: 41186 : if (star_flags) {
2734 : 336 : emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, n_positional + 2 * n_keyword + 2); // pointer to args
2735 : 336 : emit_call_with_2_imm_args(emit, MP_F_CALL_METHOD_N_KW_VAR, 0, REG_ARG_1, n_positional | (n_keyword << 8), REG_ARG_2);
2736 : 336 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
2737 : : } else {
2738 [ + + ]: 40850 : if (n_positional != 0 || n_keyword != 0) {
2739 : 37631 : emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, n_positional + 2 * n_keyword); // pointer to args
2740 : : }
2741 : 40846 : emit_pre_pop_reg(emit, &vtype_fun, REG_ARG_1); // the function
2742 : 40846 : emit_call_with_imm_arg(emit, MP_F_NATIVE_CALL_FUNCTION_N_KW, n_positional | (n_keyword << 8), REG_ARG_2);
2743 : 40846 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
2744 : : }
2745 : : }
2746 : 41274 : }
2747 : :
2748 : 13449 : STATIC void emit_native_call_method(emit_t *emit, mp_uint_t n_positional, mp_uint_t n_keyword, mp_uint_t star_flags) {
2749 [ + + ]: 13449 : if (star_flags) {
2750 : 108 : emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, n_positional + 2 * n_keyword + 3); // pointer to args
2751 : 108 : emit_call_with_2_imm_args(emit, MP_F_CALL_METHOD_N_KW_VAR, 1, REG_ARG_1, n_positional | (n_keyword << 8), REG_ARG_2);
2752 : 108 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
2753 : : } else {
2754 : 13341 : emit_native_pre(emit);
2755 : 13341 : emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, 2 + n_positional + 2 * n_keyword); // pointer to items, including meth and self
2756 : 13341 : emit_call_with_2_imm_args(emit, MP_F_CALL_METHOD_N_KW, n_positional, REG_ARG_1, n_keyword, REG_ARG_2);
2757 : 13341 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
2758 : : }
2759 : 13449 : }
2760 : :
2761 : 14991 : STATIC void emit_native_return_value(emit_t *emit) {
2762 : 14991 : DEBUG_printf("return_value\n");
2763 : :
2764 [ + + ]: 14991 : if (emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR) {
2765 : : // Save pointer to current stack position for caller to access return value
2766 : 858 : emit_get_stack_pointer_to_reg_for_pop(emit, REG_TEMP0, 1);
2767 : 858 : emit_native_mov_state_reg(emit, OFFSETOF_CODE_STATE_SP, REG_TEMP0);
2768 : :
2769 : : // Put return type in return value slot
2770 : 858 : ASM_MOV_REG_IMM(emit->as, REG_TEMP0, MP_VM_RETURN_NORMAL);
2771 : 858 : ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_RET_VAL(emit), REG_TEMP0);
2772 : :
2773 : : // Do the unwinding jump to get to the return handler
2774 : 858 : emit_native_unwind_jump(emit, emit->exit_label, emit->exc_stack_size);
2775 : 858 : return;
2776 : : }
2777 : :
2778 [ + + ]: 14133 : if (emit->do_viper_types) {
2779 : 996 : vtype_kind_t return_vtype = emit->scope->scope_flags >> MP_SCOPE_FLAG_VIPERRET_POS;
2780 [ + + ]: 996 : if (peek_vtype(emit, 0) == VTYPE_PTR_NONE) {
2781 : 698 : emit_pre_pop_discard(emit);
2782 [ + + ]: 698 : if (return_vtype == VTYPE_PYOBJ) {
2783 : 508 : emit_native_mov_reg_const(emit, REG_PARENT_RET, MP_F_CONST_NONE_OBJ);
2784 : : } else {
2785 : 190 : ASM_MOV_REG_IMM(emit->as, REG_ARG_1, 0);
2786 : : }
2787 : : } else {
2788 : 298 : vtype_kind_t vtype;
2789 [ + + ]: 476 : emit_pre_pop_reg(emit, &vtype, return_vtype == VTYPE_PYOBJ ? REG_PARENT_RET : REG_ARG_1);
2790 [ + + ]: 298 : if (vtype != return_vtype) {
2791 : 298 : EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
2792 : : MP_ERROR_TEXT("return expected '%q' but got '%q'"),
2793 : : vtype_to_qstr(return_vtype), vtype_to_qstr(vtype));
2794 : : }
2795 : : }
2796 [ + + ]: 996 : if (return_vtype != VTYPE_PYOBJ) {
2797 : 368 : emit_call_with_imm_arg(emit, MP_F_CONVERT_NATIVE_TO_OBJ, return_vtype, REG_ARG_2);
2798 : : #if REG_RET != REG_PARENT_RET
2799 : : ASM_MOV_REG_REG(emit->as, REG_PARENT_RET, REG_RET);
2800 : : #endif
2801 : : }
2802 : : } else {
2803 : 13137 : vtype_kind_t vtype;
2804 : 13137 : emit_pre_pop_reg(emit, &vtype, REG_PARENT_RET);
2805 [ - + ]: 13137 : assert(vtype == VTYPE_PYOBJ);
2806 : : }
2807 [ + + + + ]: 14133 : if (NEED_GLOBAL_EXC_HANDLER(emit)) {
2808 : : // Save return value for the global exception handler to use
2809 : 9696 : ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_RET_VAL(emit), REG_PARENT_RET);
2810 : : }
2811 : 14133 : emit_native_unwind_jump(emit, emit->exit_label, emit->exc_stack_size);
2812 : : }
2813 : :
2814 : 2752 : STATIC void emit_native_raise_varargs(emit_t *emit, mp_uint_t n_args) {
2815 : 2752 : (void)n_args;
2816 [ - + ]: 2752 : assert(n_args == 1);
2817 : 2752 : vtype_kind_t vtype_exc;
2818 : 2752 : emit_pre_pop_reg(emit, &vtype_exc, REG_ARG_1); // arg1 = object to raise
2819 [ + + ]: 2752 : if (vtype_exc != VTYPE_PYOBJ) {
2820 : 4 : EMIT_NATIVE_VIPER_TYPE_ERROR(emit, MP_ERROR_TEXT("must raise an object"));
2821 : : }
2822 : : // TODO probably make this 1 call to the runtime (which could even call convert, native_raise(obj, type))
2823 : 2752 : emit_call(emit, MP_F_NATIVE_RAISE);
2824 : 2752 : mp_asm_base_suppress_code(&emit->as->base);
2825 : 2752 : }
2826 : :
2827 : 941 : STATIC void emit_native_yield(emit_t *emit, int kind) {
2828 : : // Note: 1 (yield) or 3 (yield from) labels are reserved for this function, starting at *emit->label_slot
2829 : :
2830 [ + + ]: 941 : if (emit->do_viper_types) {
2831 : 8 : mp_raise_NotImplementedError(MP_ERROR_TEXT("native yield"));
2832 : : }
2833 : 933 : emit->scope->scope_flags |= MP_SCOPE_FLAG_GENERATOR;
2834 : :
2835 : 933 : need_stack_settled(emit);
2836 : :
2837 [ + + ]: 933 : if (kind == MP_EMIT_YIELD_FROM) {
2838 : :
2839 : : // Top of yield-from loop, conceptually implementing:
2840 : : // for item in generator:
2841 : : // yield item
2842 : :
2843 : : // Jump to start of loop
2844 : 516 : emit_native_jump(emit, *emit->label_slot + 2);
2845 : :
2846 : : // Label for top of loop
2847 : 516 : emit_native_label_assign(emit, *emit->label_slot + 1);
2848 : : }
2849 : :
2850 : : // Save pointer to current stack position for caller to access yielded value
2851 : 933 : emit_get_stack_pointer_to_reg_for_pop(emit, REG_TEMP0, 1);
2852 : 933 : emit_native_mov_state_reg(emit, OFFSETOF_CODE_STATE_SP, REG_TEMP0);
2853 : :
2854 : : // Put return type in return value slot
2855 : 933 : ASM_MOV_REG_IMM(emit->as, REG_TEMP0, MP_VM_RETURN_YIELD);
2856 : 933 : ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_RET_VAL(emit), REG_TEMP0);
2857 : :
2858 : : // Save re-entry PC
2859 : 933 : ASM_MOV_REG_PCREL(emit->as, REG_TEMP0, *emit->label_slot);
2860 : 933 : emit_native_mov_state_reg(emit, LOCAL_IDX_GEN_PC(emit), REG_TEMP0);
2861 : :
2862 : : // Jump to exit handler
2863 : 933 : ASM_JUMP(emit->as, emit->exit_label);
2864 : :
2865 : : // Label re-entry point
2866 : 933 : mp_asm_base_label_assign(&emit->as->base, *emit->label_slot);
2867 : :
2868 : : // Re-open any active exception handler
2869 [ + + ]: 933 : if (emit->exc_stack_size > 0) {
2870 : : // Find innermost active exception handler, to restore as current handler
2871 : 204 : exc_stack_entry_t *e = &emit->exc_stack[emit->exc_stack_size - 1];
2872 [ + + ]: 216 : for (; e >= emit->exc_stack; --e) {
2873 [ + + ]: 210 : if (e->is_active) {
2874 : : // Found active handler, get its PC
2875 : 198 : ASM_MOV_REG_PCREL(emit->as, REG_RET, e->label);
2876 : 198 : ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_HANDLER_PC(emit), REG_RET);
2877 : 198 : break;
2878 : : }
2879 : : }
2880 : : }
2881 : :
2882 : 933 : emit_native_adjust_stack_size(emit, 1); // send_value
2883 : :
2884 [ + + ]: 933 : if (kind == MP_EMIT_YIELD_VALUE) {
2885 : : // Check LOCAL_IDX_EXC_VAL for any injected value
2886 : 417 : ASM_MOV_REG_LOCAL(emit->as, REG_ARG_1, LOCAL_IDX_EXC_VAL(emit));
2887 : 417 : emit_call(emit, MP_F_NATIVE_RAISE);
2888 : : } else {
2889 : : // Label loop entry
2890 : 516 : emit_native_label_assign(emit, *emit->label_slot + 2);
2891 : :
2892 : : // Get the next item from the delegate generator
2893 : 516 : vtype_kind_t vtype;
2894 : 516 : emit_pre_pop_reg(emit, &vtype, REG_ARG_2); // send_value
2895 : 516 : emit_access_stack(emit, 1, &vtype, REG_ARG_1); // generator
2896 : 516 : ASM_MOV_REG_LOCAL(emit->as, REG_ARG_3, LOCAL_IDX_EXC_VAL(emit)); // throw_value
2897 : 516 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_ARG_3);
2898 : 516 : emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, 1); // ret_value
2899 : 516 : emit_call(emit, MP_F_NATIVE_YIELD_FROM);
2900 : :
2901 : : // If returned non-zero then generator continues
2902 : 516 : ASM_JUMP_IF_REG_NONZERO(emit->as, REG_RET, *emit->label_slot + 1, true);
2903 : :
2904 : : // Pop exhausted gen, replace with ret_value
2905 : 516 : emit_native_adjust_stack_size(emit, 1); // ret_value
2906 : 516 : emit_fold_stack_top(emit, REG_ARG_1);
2907 : : }
2908 : 933 : }
2909 : :
2910 : 4386 : STATIC void emit_native_start_except_handler(emit_t *emit) {
2911 : : // Protected block has finished so leave the current exception handler
2912 : 4386 : emit_native_leave_exc_stack(emit, true);
2913 : :
2914 : : // Get and push nlr_buf.ret_val
2915 : 4386 : ASM_MOV_REG_LOCAL(emit->as, REG_TEMP0, LOCAL_IDX_EXC_VAL(emit));
2916 : 4386 : emit_post_push_reg(emit, VTYPE_PYOBJ, REG_TEMP0);
2917 : 4386 : }
2918 : :
2919 : 4386 : STATIC void emit_native_end_except_handler(emit_t *emit) {
2920 : 4386 : adjust_stack(emit, -1); // pop the exception (end_finally didn't use it)
2921 : 4386 : }
2922 : :
2923 : : const emit_method_table_t EXPORT_FUN(method_table) = {
2924 : : #if MICROPY_DYNAMIC_COMPILER
2925 : : EXPORT_FUN(new),
2926 : : EXPORT_FUN(free),
2927 : : #endif
2928 : :
2929 : : emit_native_start_pass,
2930 : : emit_native_end_pass,
2931 : : emit_native_adjust_stack_size,
2932 : : emit_native_set_source_line,
2933 : :
2934 : : {
2935 : : emit_native_load_local,
2936 : : emit_native_load_global,
2937 : : },
2938 : : {
2939 : : emit_native_store_local,
2940 : : emit_native_store_global,
2941 : : },
2942 : : {
2943 : : emit_native_delete_local,
2944 : : emit_native_delete_global,
2945 : : },
2946 : :
2947 : : emit_native_label_assign,
2948 : : emit_native_import,
2949 : : emit_native_load_const_tok,
2950 : : emit_native_load_const_small_int,
2951 : : emit_native_load_const_str,
2952 : : emit_native_load_const_obj,
2953 : : emit_native_load_null,
2954 : : emit_native_load_method,
2955 : : emit_native_load_build_class,
2956 : : emit_native_subscr,
2957 : : emit_native_attr,
2958 : : emit_native_dup_top,
2959 : : emit_native_dup_top_two,
2960 : : emit_native_pop_top,
2961 : : emit_native_rot_two,
2962 : : emit_native_rot_three,
2963 : : emit_native_jump,
2964 : : emit_native_pop_jump_if,
2965 : : emit_native_jump_if_or_pop,
2966 : : emit_native_unwind_jump,
2967 : : emit_native_setup_block,
2968 : : emit_native_with_cleanup,
2969 : : emit_native_end_finally,
2970 : : emit_native_get_iter,
2971 : : emit_native_for_iter,
2972 : : emit_native_for_iter_end,
2973 : : emit_native_pop_except_jump,
2974 : : emit_native_unary_op,
2975 : : emit_native_binary_op,
2976 : : emit_native_build,
2977 : : emit_native_store_map,
2978 : : emit_native_store_comp,
2979 : : emit_native_unpack_sequence,
2980 : : emit_native_unpack_ex,
2981 : : emit_native_make_function,
2982 : : emit_native_make_closure,
2983 : : emit_native_call_function,
2984 : : emit_native_call_method,
2985 : : emit_native_return_value,
2986 : : emit_native_raise_varargs,
2987 : : emit_native_yield,
2988 : :
2989 : : emit_native_start_except_handler,
2990 : : emit_native_end_except_handler,
2991 : : };
2992 : :
2993 : : #endif
|