@@ -174,7 +174,7 @@ static int get_arg_label(emit_inline_asm_t *emit, const char *op, mp_parse_node_
174
174
#define RRI8_B (2)
175
175
176
176
typedef struct _opcode_table_3arg_t {
177
- uint16_t name ; // actually a qstr, which should fit in 16 bits
177
+ qstr_short_t name ;
178
178
uint8_t type ;
179
179
uint8_t a0 : 4 ;
180
180
uint8_t a1 : 4 ;
@@ -188,6 +188,13 @@ static const opcode_table_3arg_t opcode_table_3arg[] = {
188
188
{MP_QSTR_add , RRR , 0 , 8 },
189
189
{MP_QSTR_sub , RRR , 0 , 12 },
190
190
{MP_QSTR_mull , RRR , 2 , 8 },
191
+ {MP_QSTR_addx2 , RRR , 0 , 9 },
192
+ {MP_QSTR_addx4 , RRR , 0 , 10 },
193
+ {MP_QSTR_addx8 , RRR , 0 , 11 },
194
+ {MP_QSTR_subx2 , RRR , 0 , 13 },
195
+ {MP_QSTR_subx4 , RRR , 0 , 14 },
196
+ {MP_QSTR_subx8 , RRR , 0 , 15 },
197
+ {MP_QSTR_src , RRR , 1 , 8 },
191
198
192
199
// load/store/addi opcodes: reg, reg, imm
193
200
// upper nibble of type encodes the range of the immediate arg
@@ -209,21 +216,58 @@ static const opcode_table_3arg_t opcode_table_3arg[] = {
209
216
{MP_QSTR_bge , RRI8_B , ASM_XTENSA_CC_GE , 0 },
210
217
{MP_QSTR_bgeu , RRI8_B , ASM_XTENSA_CC_GEU , 0 },
211
218
{MP_QSTR_blt , RRI8_B , ASM_XTENSA_CC_LT , 0 },
219
+ {MP_QSTR_bltu , RRI8_B , ASM_XTENSA_CC_LTU , 0 },
212
220
{MP_QSTR_bnall , RRI8_B , ASM_XTENSA_CC_NALL , 0 },
213
221
{MP_QSTR_bne , RRI8_B , ASM_XTENSA_CC_NE , 0 },
214
222
{MP_QSTR_bnone , RRI8_B , ASM_XTENSA_CC_NONE , 0 },
215
223
};
216
224
225
+ // The index of the qstrs matches the CCZ condition value to be embedded into the opcode.
226
+ static const qstr_short_t BCCZ_OPCODES [] = { MP_QSTR_beqz , MP_QSTR_bnez , MP_QSTR_bltz , MP_QSTR_bgez };
227
+
228
+ #if MICROPY_EMIT_INLINE_XTENSA_UNCOMMON_OPCODES
229
+ typedef struct _single_opcode_t {
230
+ qstr_short_t name ;
231
+ uint16_t value ;
232
+ } single_opcode_t ;
233
+
234
+ static const single_opcode_t NOARGS_OPCODES [] = {
235
+ {MP_QSTR_dsync , 0x2030 },
236
+ {MP_QSTR_esync , 0x2020 },
237
+ {MP_QSTR_extw , 0x20D0 },
238
+ {MP_QSTR_ill , 0x0000 },
239
+ {MP_QSTR_isync , 0x2000 },
240
+ {MP_QSTR_memw , 0x20C0 },
241
+ {MP_QSTR_rsync , 0x2010 },
242
+ };
243
+ #endif
244
+
217
245
static void emit_inline_xtensa_op (emit_inline_asm_t * emit , qstr op , mp_uint_t n_args , mp_parse_node_t * pn_args ) {
218
246
size_t op_len ;
219
247
const char * op_str = (const char * )qstr_data (op , & op_len );
220
248
221
249
if (n_args == 0 ) {
222
- if (op == MP_QSTR_ret_n ) {
250
+ if (op == MP_QSTR_ret_n || op == MP_QSTR_ret ) {
223
251
asm_xtensa_op_ret_n (& emit -> as );
224
- } else {
225
- goto unknown_op ;
252
+ return ;
253
+ } else if (op == MP_QSTR_nop ) {
254
+ asm_xtensa_op24 (& emit -> as , 0x20F0 );
255
+ return ;
256
+ } else if (op == MP_QSTR_nop_n ) {
257
+ asm_xtensa_op16 (& emit -> as , 0xF03D );
258
+ return ;
226
259
}
260
+ #if MICROPY_EMIT_INLINE_XTENSA_UNCOMMON_OPCODES
261
+ for (size_t index = 0 ; index < MP_ARRAY_SIZE (NOARGS_OPCODES ); index ++ ) {
262
+ const single_opcode_t * opcode = & NOARGS_OPCODES [index ];
263
+ if (op == opcode -> name ) {
264
+ asm_xtensa_op24 (& emit -> as , opcode -> value );
265
+ return ;
266
+ }
267
+ }
268
+ #endif
269
+
270
+ goto unknown_op ;
227
271
228
272
} else if (n_args == 1 ) {
229
273
if (op == MP_QSTR_callx0 ) {
@@ -235,17 +279,49 @@ static void emit_inline_xtensa_op(emit_inline_asm_t *emit, qstr op, mp_uint_t n_
235
279
} else if (op == MP_QSTR_jx ) {
236
280
uint r0 = get_arg_reg (emit , op_str , pn_args [0 ]);
237
281
asm_xtensa_op_jx (& emit -> as , r0 );
282
+ } else if (op == MP_QSTR_ssl ) {
283
+ mp_uint_t r0 = get_arg_reg (emit , op_str , pn_args [0 ]);
284
+ asm_xtensa_op_ssl (& emit -> as , r0 );
285
+ } else if (op == MP_QSTR_ssr ) {
286
+ mp_uint_t r0 = get_arg_reg (emit , op_str , pn_args [0 ]);
287
+ asm_xtensa_op_ssr (& emit -> as , r0 );
288
+ } else if (op == MP_QSTR_ssai ) {
289
+ mp_uint_t sa = get_arg_i (emit , op_str , pn_args [0 ], 0 , 31 );
290
+ asm_xtensa_op24 (& emit -> as , ASM_XTENSA_ENCODE_RRR (0 , 0 , 4 , 4 , sa & 0x0F , (sa >> 4 ) & 0x01 ));
291
+ } else if (op == MP_QSTR_ssa8b ) {
292
+ mp_uint_t r0 = get_arg_reg (emit , op_str , pn_args [0 ]);
293
+ asm_xtensa_op24 (& emit -> as , ASM_XTENSA_ENCODE_RRR (0 , 0 , 4 , 3 , r0 , 0 ));
294
+ } else if (op == MP_QSTR_ssa8l ) {
295
+ mp_uint_t r0 = get_arg_reg (emit , op_str , pn_args [0 ]);
296
+ asm_xtensa_op24 (& emit -> as , ASM_XTENSA_ENCODE_RRR (0 , 0 , 4 , 2 , r0 , 0 ));
297
+ } else if (op == MP_QSTR_call0 ) {
298
+ mp_uint_t label = get_arg_label (emit , op_str , pn_args [0 ]);
299
+ asm_xtensa_call0 (& emit -> as , label );
300
+ #if MICROPY_EMIT_INLINE_XTENSA_UNCOMMON_OPCODES
301
+ } else if (op == MP_QSTR_fsync ) {
302
+ mp_uint_t imm3 = get_arg_i (emit , op_str , pn_args [0 ], 0 , 7 );
303
+ asm_xtensa_op24 (& emit -> as , ASM_XTENSA_ENCODE_RRR (0 , 0 , 0 , 2 , 8 | imm3 , 0 ));
304
+ } else if (op == MP_QSTR_ill_n ) {
305
+ asm_xtensa_op16 (& emit -> as , 0xF06D );
306
+ #endif
238
307
} else {
239
308
goto unknown_op ;
240
309
}
241
310
242
311
} else if (n_args == 2 ) {
243
312
uint r0 = get_arg_reg (emit , op_str , pn_args [0 ]);
244
- if (op == MP_QSTR_beqz ) {
245
- int label = get_arg_label (emit , op_str , pn_args [1 ]);
313
+ for (size_t index = 0 ; index < MP_ARRAY_SIZE (BCCZ_OPCODES ); index ++ ) {
314
+ if (op == BCCZ_OPCODES [index ]) {
315
+ mp_uint_t label = get_arg_label (emit , op_str , pn_args [1 ]);
316
+ asm_xtensa_bccz_reg_label (& emit -> as , index , r0 , label );
317
+ return ;
318
+ }
319
+ }
320
+ if (op == MP_QSTR_beqz_n ) {
321
+ mp_uint_t label = get_arg_label (emit , op_str , pn_args [1 ]);
246
322
asm_xtensa_bccz_reg_label (& emit -> as , ASM_XTENSA_CCZ_EQ , r0 , label );
247
- } else if (op == MP_QSTR_bnez ) {
248
- int label = get_arg_label (emit , op_str , pn_args [1 ]);
323
+ } else if (op == MP_QSTR_bnez_n ) {
324
+ mp_uint_t label = get_arg_label (emit , op_str , pn_args [1 ]);
249
325
asm_xtensa_bccz_reg_label (& emit -> as , ASM_XTENSA_CCZ_NE , r0 , label );
250
326
} else if (op == MP_QSTR_mov || op == MP_QSTR_mov_n ) {
251
327
// we emit mov.n for both "mov" and "mov_n" opcodes
@@ -255,7 +331,47 @@ static void emit_inline_xtensa_op(emit_inline_asm_t *emit, qstr op, mp_uint_t n_
255
331
// for convenience we emit l32r if the integer doesn't fit in movi
256
332
uint32_t imm = get_arg_i (emit , op_str , pn_args [1 ], 0 , 0 );
257
333
asm_xtensa_mov_reg_i32 (& emit -> as , r0 , imm );
258
- } else {
334
+ } else if (op == MP_QSTR_abs_ ) {
335
+ mp_uint_t r1 = get_arg_reg (emit , op_str , pn_args [1 ]);
336
+ asm_xtensa_op24 (& emit -> as , ASM_XTENSA_ENCODE_RRR (0 , 0 , 6 , r0 , 1 , r1 ));
337
+ } else if (op == MP_QSTR_neg ) {
338
+ mp_uint_t r1 = get_arg_reg (emit , op_str , pn_args [1 ]);
339
+ asm_xtensa_op24 (& emit -> as , ASM_XTENSA_ENCODE_RRR (0 , 0 , 6 , r0 , 0 , r1 ));
340
+ } else if (op == MP_QSTR_sll ) {
341
+ mp_uint_t r1 = get_arg_reg (emit , op_str , pn_args [1 ]);
342
+ asm_xtensa_op24 (& emit -> as , ASM_XTENSA_ENCODE_RRR (0 , 1 , 10 , r0 , r1 , 0 ));
343
+ } else if (op == MP_QSTR_sra ) {
344
+ mp_uint_t r1 = get_arg_reg (emit , op_str , pn_args [1 ]);
345
+ asm_xtensa_op24 (& emit -> as , ASM_XTENSA_ENCODE_RRR (0 , 1 , 11 , r0 , 0 , r1 ));
346
+ } else if (op == MP_QSTR_srl ) {
347
+ mp_uint_t r1 = get_arg_reg (emit , op_str , pn_args [1 ]);
348
+ asm_xtensa_op24 (& emit -> as , ASM_XTENSA_ENCODE_RRR (0 , 1 , 9 , r0 , 0 , r1 ));
349
+ } else if (op == MP_QSTR_l32r ) {
350
+ mp_uint_t label = get_arg_label (emit , op_str , pn_args [1 ]);
351
+ asm_xtensa_l32r (& emit -> as , r0 , label );
352
+ } else if (op == MP_QSTR_movi_n ) {
353
+ mp_int_t imm = get_arg_i (emit , op_str , pn_args [1 ], -32 , 95 );
354
+ asm_xtensa_op_movi_n (& emit -> as , r0 , imm );
355
+ } else
356
+ #if MICROPY_EMIT_INLINE_XTENSA_UNCOMMON_OPCODES
357
+ if (op == MP_QSTR_rsr ) {
358
+ mp_uint_t sr = get_arg_i (emit , op_str , pn_args [1 ], 0 , 255 );
359
+ asm_xtensa_op24 (& emit -> as , ASM_XTENSA_ENCODE_RSR (0 , 3 , 0 , sr , r0 ));
360
+ } else if (op == MP_QSTR_rur ) {
361
+ mp_uint_t imm8 = get_arg_i (emit , op_str , pn_args [1 ], 0 , 255 );
362
+ asm_xtensa_op24 (& emit -> as , ASM_XTENSA_ENCODE_RRR (0 , 3 , 14 , r0 , (imm8 >> 4 ) & 0x0F , imm8 & 0x0F ));
363
+ } else if (op == MP_QSTR_wsr ) {
364
+ mp_uint_t sr = get_arg_i (emit , op_str , pn_args [1 ], 0 , 255 );
365
+ asm_xtensa_op24 (& emit -> as , ASM_XTENSA_ENCODE_RSR (0 , 3 , 1 , sr , r0 ));
366
+ } else if (op == MP_QSTR_wur ) {
367
+ mp_uint_t sr = get_arg_i (emit , op_str , pn_args [1 ], 0 , 255 );
368
+ asm_xtensa_op24 (& emit -> as , ASM_XTENSA_ENCODE_RSR (0 , 3 , 15 , sr , r0 ));
369
+ } else if (op == MP_QSTR_xsr ) {
370
+ mp_uint_t sr = get_arg_i (emit , op_str , pn_args [1 ], 0 , 255 );
371
+ asm_xtensa_op24 (& emit -> as , ASM_XTENSA_ENCODE_RSR (0 , 1 , 6 , sr , r0 ));
372
+ } else
373
+ #endif
374
+ {
259
375
goto unknown_op ;
260
376
}
261
377
@@ -289,7 +405,72 @@ static void emit_inline_xtensa_op(emit_inline_asm_t *emit, qstr op, mp_uint_t n_
289
405
return ;
290
406
}
291
407
}
292
- goto unknown_op ;
408
+
409
+ if (op == MP_QSTR_add_n ) {
410
+ mp_uint_t r0 = get_arg_reg (emit , op_str , pn_args [0 ]);
411
+ mp_uint_t r1 = get_arg_reg (emit , op_str , pn_args [1 ]);
412
+ mp_uint_t r2 = get_arg_reg (emit , op_str , pn_args [2 ]);
413
+ asm_xtensa_op16 (& emit -> as , ASM_XTENSA_ENCODE_RRRN (10 , r0 , r1 , r2 ));
414
+ } else if (op == MP_QSTR_addi_n ) {
415
+ mp_uint_t r0 = get_arg_reg (emit , op_str , pn_args [0 ]);
416
+ mp_uint_t r1 = get_arg_reg (emit , op_str , pn_args [1 ]);
417
+ mp_int_t imm4 = get_arg_i (emit , op_str , pn_args [2 ], -1 , 15 );
418
+ asm_xtensa_op16 (& emit -> as , ASM_XTENSA_ENCODE_RRRN (11 , r0 , r1 , (imm4 != 0 ? imm4 : -1 )));
419
+ } else if (op == MP_QSTR_addmi ) {
420
+ mp_uint_t r0 = get_arg_reg (emit , op_str , pn_args [0 ]);
421
+ mp_uint_t r1 = get_arg_reg (emit , op_str , pn_args [1 ]);
422
+ mp_int_t imm8 = get_arg_i (emit , op_str , pn_args [2 ], -128 * 256 , 127 * 256 );
423
+ if ((imm8 & 0xFF ) != 0 ) {
424
+ emit_inline_xtensa_error_exc (emit , mp_obj_new_exception_msg_varg (& mp_type_SyntaxError , MP_ERROR_TEXT ("%d is not a multiple of %d" ), imm8 , 256 ));
425
+ } else {
426
+ asm_xtensa_op24 (& emit -> as , ASM_XTENSA_ENCODE_RRI8 (2 , 13 , r1 , r0 , imm8 >> 8 ));
427
+ }
428
+ } else if (op == MP_QSTR_bbci ) {
429
+ mp_uint_t r0 = get_arg_reg (emit , op_str , pn_args [0 ]);
430
+ mp_uint_t bit = get_arg_i (emit , op_str , pn_args [1 ], 0 , 31 );
431
+ mp_int_t label = get_arg_label (emit , op_str , pn_args [2 ]);
432
+ asm_xtensa_bit_branch (& emit -> as , r0 , bit , label , 6 );
433
+ } else if (op == MP_QSTR_bbsi ) {
434
+ mp_uint_t r0 = get_arg_reg (emit , op_str , pn_args [0 ]);
435
+ mp_uint_t bit = get_arg_i (emit , op_str , pn_args [1 ], 0 , 31 );
436
+ mp_uint_t label = get_arg_label (emit , op_str , pn_args [2 ]);
437
+ asm_xtensa_bit_branch (& emit -> as , r0 , bit , label , 14 );
438
+ } else if (op == MP_QSTR_slli ) {
439
+ mp_uint_t r0 = get_arg_reg (emit , op_str , pn_args [0 ]);
440
+ mp_uint_t r1 = get_arg_reg (emit , op_str , pn_args [1 ]);
441
+ mp_uint_t bits = 32 - get_arg_i (emit , op_str , pn_args [2 ], 1 , 31 );
442
+ asm_xtensa_op24 (& emit -> as , ASM_XTENSA_ENCODE_RRR (0 , 1 , 0 | ((bits >> 4 ) & 0x01 ), r0 , r1 , bits & 0x0F ));
443
+ } else if (op == MP_QSTR_srai ) {
444
+ mp_uint_t r0 = get_arg_reg (emit , op_str , pn_args [0 ]);
445
+ mp_uint_t r1 = get_arg_reg (emit , op_str , pn_args [1 ]);
446
+ mp_uint_t bits = get_arg_i (emit , op_str , pn_args [2 ], 0 , 31 );
447
+ asm_xtensa_op24 (& emit -> as , ASM_XTENSA_ENCODE_RRR (0 , 1 , 2 | ((bits >> 4 ) & 0x01 ), r0 , bits & 0x0F , r1 ));
448
+ } else if (op == MP_QSTR_srli ) {
449
+ mp_uint_t r0 = get_arg_reg (emit , op_str , pn_args [0 ]);
450
+ mp_uint_t r1 = get_arg_reg (emit , op_str , pn_args [1 ]);
451
+ mp_uint_t bits = get_arg_i (emit , op_str , pn_args [2 ], 0 , 15 );
452
+ asm_xtensa_op24 (& emit -> as , ASM_XTENSA_ENCODE_RRR (0 , 1 , 4 , r0 , bits , r1 ));
453
+ } else if (op == MP_QSTR_l32i_n ) {
454
+ mp_uint_t r0 = get_arg_reg (emit , op_str , pn_args [0 ]);
455
+ mp_uint_t r1 = get_arg_reg (emit , op_str , pn_args [1 ]);
456
+ mp_uint_t imm = get_arg_i (emit , op_str , pn_args [2 ], 0 , 60 );
457
+ if ((imm & 0x03 ) != 0 ) {
458
+ emit_inline_xtensa_error_exc (emit , mp_obj_new_exception_msg_varg (& mp_type_SyntaxError , MP_ERROR_TEXT ("%d is not a multiple of %d" ), imm , 4 ));
459
+ } else {
460
+ asm_xtensa_op_l32i_n (& emit -> as , r0 , r1 , imm >> 2 );
461
+ }
462
+ } else if (op == MP_QSTR_s32i_n ) {
463
+ mp_uint_t r0 = get_arg_reg (emit , op_str , pn_args [0 ]);
464
+ mp_uint_t r1 = get_arg_reg (emit , op_str , pn_args [1 ]);
465
+ mp_uint_t imm = get_arg_i (emit , op_str , pn_args [2 ], 0 , 60 );
466
+ if ((imm & 0x03 ) != 0 ) {
467
+ emit_inline_xtensa_error_exc (emit , mp_obj_new_exception_msg_varg (& mp_type_SyntaxError , MP_ERROR_TEXT ("%d is not a multiple of %d" ), imm , 4 ));
468
+ } else {
469
+ asm_xtensa_op_s32i_n (& emit -> as , r0 , r1 , imm >> 2 );
470
+ }
471
+ } else {
472
+ goto unknown_op ;
473
+ }
293
474
294
475
} else {
295
476
goto unknown_op ;
0 commit comments