static void tcg_out_op()

in tcg/ppc/tcg-target.c.inc [2475:3033]


static void tcg_out_op(TCGContext *s, TCGOpcode opc,
                       const TCGArg args[TCG_MAX_OP_ARGS],
                       const int const_args[TCG_MAX_OP_ARGS])
{
    TCGArg a0, a1, a2;

    switch (opc) {
    case INDEX_op_exit_tb:
        tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_R3, args[0]);
        tcg_out_b(s, 0, tcg_code_gen_epilogue);
        break;
    case INDEX_op_goto_tb:
        if (s->tb_jmp_insn_offset) {
            /* Direct jump. */
            if (TCG_TARGET_REG_BITS == 64) {
                /* Ensure the next insns are 8-byte aligned. */
                if ((uintptr_t)s->code_ptr & 7) {
                    tcg_out32(s, NOP);
                }
                s->tb_jmp_insn_offset[args[0]] = tcg_current_code_size(s);
                tcg_out32(s, ADDIS | TAI(TCG_REG_TB, TCG_REG_TB, 0));
                tcg_out32(s, ADDI | TAI(TCG_REG_TB, TCG_REG_TB, 0));
            } else {
                s->tb_jmp_insn_offset[args[0]] = tcg_current_code_size(s);
                tcg_out32(s, B);
                s->tb_jmp_reset_offset[args[0]] = tcg_current_code_size(s);
                break;
            }
        } else {
            /* Indirect jump. */
            tcg_debug_assert(s->tb_jmp_insn_offset == NULL);
            tcg_out_ld(s, TCG_TYPE_PTR, TCG_REG_TB, 0,
                       (intptr_t)(s->tb_jmp_insn_offset + args[0]));
        }
        tcg_out32(s, MTSPR | RS(TCG_REG_TB) | CTR);
        tcg_out32(s, BCCTR | BO_ALWAYS);
        set_jmp_reset_offset(s, args[0]);
        if (USE_REG_TB) {
            /* For the unlinked case, need to reset TCG_REG_TB.  */
            tcg_out_mem_long(s, ADDI, ADD, TCG_REG_TB, TCG_REG_TB,
                             -tcg_current_code_size(s));
        }
        break;
    case INDEX_op_goto_ptr:
        tcg_out32(s, MTSPR | RS(args[0]) | CTR);
        if (USE_REG_TB) {
            tcg_out_mov(s, TCG_TYPE_PTR, TCG_REG_TB, args[0]);
        }
        tcg_out32(s, ADDI | TAI(TCG_REG_R3, 0, 0));
        tcg_out32(s, BCCTR | BO_ALWAYS);
        break;
    case INDEX_op_br:
        {
            TCGLabel *l = arg_label(args[0]);
            uint32_t insn = B;

            if (l->has_value) {
                insn |= reloc_pc24_val(tcg_splitwx_to_rx(s->code_ptr),
                                       l->u.value_ptr);
            } else {
                tcg_out_reloc(s, s->code_ptr, R_PPC_REL24, l, 0);
            }
            tcg_out32(s, insn);
        }
        break;
    case INDEX_op_ld8u_i32:
    case INDEX_op_ld8u_i64:
        tcg_out_mem_long(s, LBZ, LBZX, args[0], args[1], args[2]);
        break;
    case INDEX_op_ld8s_i32:
    case INDEX_op_ld8s_i64:
        tcg_out_mem_long(s, LBZ, LBZX, args[0], args[1], args[2]);
        tcg_out_ext8s(s, args[0], args[0]);
        break;
    case INDEX_op_ld16u_i32:
    case INDEX_op_ld16u_i64:
        tcg_out_mem_long(s, LHZ, LHZX, args[0], args[1], args[2]);
        break;
    case INDEX_op_ld16s_i32:
    case INDEX_op_ld16s_i64:
        tcg_out_mem_long(s, LHA, LHAX, args[0], args[1], args[2]);
        break;
    case INDEX_op_ld_i32:
    case INDEX_op_ld32u_i64:
        tcg_out_mem_long(s, LWZ, LWZX, args[0], args[1], args[2]);
        break;
    case INDEX_op_ld32s_i64:
        tcg_out_mem_long(s, LWA, LWAX, args[0], args[1], args[2]);
        break;
    case INDEX_op_ld_i64:
        tcg_out_mem_long(s, LD, LDX, args[0], args[1], args[2]);
        break;
    case INDEX_op_st8_i32:
    case INDEX_op_st8_i64:
        tcg_out_mem_long(s, STB, STBX, args[0], args[1], args[2]);
        break;
    case INDEX_op_st16_i32:
    case INDEX_op_st16_i64:
        tcg_out_mem_long(s, STH, STHX, args[0], args[1], args[2]);
        break;
    case INDEX_op_st_i32:
    case INDEX_op_st32_i64:
        tcg_out_mem_long(s, STW, STWX, args[0], args[1], args[2]);
        break;
    case INDEX_op_st_i64:
        tcg_out_mem_long(s, STD, STDX, args[0], args[1], args[2]);
        break;

    case INDEX_op_add_i32:
        a0 = args[0], a1 = args[1], a2 = args[2];
        if (const_args[2]) {
        do_addi_32:
            tcg_out_mem_long(s, ADDI, ADD, a0, a1, (int32_t)a2);
        } else {
            tcg_out32(s, ADD | TAB(a0, a1, a2));
        }
        break;
    case INDEX_op_sub_i32:
        a0 = args[0], a1 = args[1], a2 = args[2];
        if (const_args[1]) {
            if (const_args[2]) {
                tcg_out_movi(s, TCG_TYPE_I32, a0, a1 - a2);
            } else {
                tcg_out32(s, SUBFIC | TAI(a0, a2, a1));
            }
        } else if (const_args[2]) {
            a2 = -a2;
            goto do_addi_32;
        } else {
            tcg_out32(s, SUBF | TAB(a0, a2, a1));
        }
        break;

    case INDEX_op_and_i32:
        a0 = args[0], a1 = args[1], a2 = args[2];
        if (const_args[2]) {
            tcg_out_andi32(s, a0, a1, a2);
        } else {
            tcg_out32(s, AND | SAB(a1, a0, a2));
        }
        break;
    case INDEX_op_and_i64:
        a0 = args[0], a1 = args[1], a2 = args[2];
        if (const_args[2]) {
            tcg_out_andi64(s, a0, a1, a2);
        } else {
            tcg_out32(s, AND | SAB(a1, a0, a2));
        }
        break;
    case INDEX_op_or_i64:
    case INDEX_op_or_i32:
        a0 = args[0], a1 = args[1], a2 = args[2];
        if (const_args[2]) {
            tcg_out_ori32(s, a0, a1, a2);
        } else {
            tcg_out32(s, OR | SAB(a1, a0, a2));
        }
        break;
    case INDEX_op_xor_i64:
    case INDEX_op_xor_i32:
        a0 = args[0], a1 = args[1], a2 = args[2];
        if (const_args[2]) {
            tcg_out_xori32(s, a0, a1, a2);
        } else {
            tcg_out32(s, XOR | SAB(a1, a0, a2));
        }
        break;
    case INDEX_op_andc_i32:
        a0 = args[0], a1 = args[1], a2 = args[2];
        if (const_args[2]) {
            tcg_out_andi32(s, a0, a1, ~a2);
        } else {
            tcg_out32(s, ANDC | SAB(a1, a0, a2));
        }
        break;
    case INDEX_op_andc_i64:
        a0 = args[0], a1 = args[1], a2 = args[2];
        if (const_args[2]) {
            tcg_out_andi64(s, a0, a1, ~a2);
        } else {
            tcg_out32(s, ANDC | SAB(a1, a0, a2));
        }
        break;
    case INDEX_op_orc_i32:
        if (const_args[2]) {
            tcg_out_ori32(s, args[0], args[1], ~args[2]);
            break;
        }
        /* FALLTHRU */
    case INDEX_op_orc_i64:
        tcg_out32(s, ORC | SAB(args[1], args[0], args[2]));
        break;
    case INDEX_op_eqv_i32:
        if (const_args[2]) {
            tcg_out_xori32(s, args[0], args[1], ~args[2]);
            break;
        }
        /* FALLTHRU */
    case INDEX_op_eqv_i64:
        tcg_out32(s, EQV | SAB(args[1], args[0], args[2]));
        break;
    case INDEX_op_nand_i32:
    case INDEX_op_nand_i64:
        tcg_out32(s, NAND | SAB(args[1], args[0], args[2]));
        break;
    case INDEX_op_nor_i32:
    case INDEX_op_nor_i64:
        tcg_out32(s, NOR | SAB(args[1], args[0], args[2]));
        break;

    case INDEX_op_clz_i32:
        tcg_out_cntxz(s, TCG_TYPE_I32, CNTLZW, args[0], args[1],
                      args[2], const_args[2]);
        break;
    case INDEX_op_ctz_i32:
        tcg_out_cntxz(s, TCG_TYPE_I32, CNTTZW, args[0], args[1],
                      args[2], const_args[2]);
        break;
    case INDEX_op_ctpop_i32:
        tcg_out32(s, CNTPOPW | SAB(args[1], args[0], 0));
        break;

    case INDEX_op_clz_i64:
        tcg_out_cntxz(s, TCG_TYPE_I64, CNTLZD, args[0], args[1],
                      args[2], const_args[2]);
        break;
    case INDEX_op_ctz_i64:
        tcg_out_cntxz(s, TCG_TYPE_I64, CNTTZD, args[0], args[1],
                      args[2], const_args[2]);
        break;
    case INDEX_op_ctpop_i64:
        tcg_out32(s, CNTPOPD | SAB(args[1], args[0], 0));
        break;

    case INDEX_op_mul_i32:
        a0 = args[0], a1 = args[1], a2 = args[2];
        if (const_args[2]) {
            tcg_out32(s, MULLI | TAI(a0, a1, a2));
        } else {
            tcg_out32(s, MULLW | TAB(a0, a1, a2));
        }
        break;

    case INDEX_op_div_i32:
        tcg_out32(s, DIVW | TAB(args[0], args[1], args[2]));
        break;

    case INDEX_op_divu_i32:
        tcg_out32(s, DIVWU | TAB(args[0], args[1], args[2]));
        break;

    case INDEX_op_shl_i32:
        if (const_args[2]) {
            /* Limit immediate shift count lest we create an illegal insn.  */
            tcg_out_shli32(s, args[0], args[1], args[2] & 31);
        } else {
            tcg_out32(s, SLW | SAB(args[1], args[0], args[2]));
        }
        break;
    case INDEX_op_shr_i32:
        if (const_args[2]) {
            /* Limit immediate shift count lest we create an illegal insn.  */
            tcg_out_shri32(s, args[0], args[1], args[2] & 31);
        } else {
            tcg_out32(s, SRW | SAB(args[1], args[0], args[2]));
        }
        break;
    case INDEX_op_sar_i32:
        if (const_args[2]) {
            tcg_out_sari32(s, args[0], args[1], args[2]);
        } else {
            tcg_out32(s, SRAW | SAB(args[1], args[0], args[2]));
        }
        break;
    case INDEX_op_rotl_i32:
        if (const_args[2]) {
            tcg_out_rlw(s, RLWINM, args[0], args[1], args[2], 0, 31);
        } else {
            tcg_out32(s, RLWNM | SAB(args[1], args[0], args[2])
                         | MB(0) | ME(31));
        }
        break;
    case INDEX_op_rotr_i32:
        if (const_args[2]) {
            tcg_out_rlw(s, RLWINM, args[0], args[1], 32 - args[2], 0, 31);
        } else {
            tcg_out32(s, SUBFIC | TAI(TCG_REG_R0, args[2], 32));
            tcg_out32(s, RLWNM | SAB(args[1], args[0], TCG_REG_R0)
                         | MB(0) | ME(31));
        }
        break;

    case INDEX_op_brcond_i32:
        tcg_out_brcond(s, args[2], args[0], args[1], const_args[1],
                       arg_label(args[3]), TCG_TYPE_I32);
        break;
    case INDEX_op_brcond_i64:
        tcg_out_brcond(s, args[2], args[0], args[1], const_args[1],
                       arg_label(args[3]), TCG_TYPE_I64);
        break;
    case INDEX_op_brcond2_i32:
        tcg_out_brcond2(s, args, const_args);
        break;

    case INDEX_op_neg_i32:
    case INDEX_op_neg_i64:
        tcg_out32(s, NEG | RT(args[0]) | RA(args[1]));
        break;

    case INDEX_op_not_i32:
    case INDEX_op_not_i64:
        tcg_out32(s, NOR | SAB(args[1], args[0], args[1]));
        break;

    case INDEX_op_add_i64:
        a0 = args[0], a1 = args[1], a2 = args[2];
        if (const_args[2]) {
        do_addi_64:
            tcg_out_mem_long(s, ADDI, ADD, a0, a1, a2);
        } else {
            tcg_out32(s, ADD | TAB(a0, a1, a2));
        }
        break;
    case INDEX_op_sub_i64:
        a0 = args[0], a1 = args[1], a2 = args[2];
        if (const_args[1]) {
            if (const_args[2]) {
                tcg_out_movi(s, TCG_TYPE_I64, a0, a1 - a2);
            } else {
                tcg_out32(s, SUBFIC | TAI(a0, a2, a1));
            }
        } else if (const_args[2]) {
            a2 = -a2;
            goto do_addi_64;
        } else {
            tcg_out32(s, SUBF | TAB(a0, a2, a1));
        }
        break;

    case INDEX_op_shl_i64:
        if (const_args[2]) {
            /* Limit immediate shift count lest we create an illegal insn.  */
            tcg_out_shli64(s, args[0], args[1], args[2] & 63);
        } else {
            tcg_out32(s, SLD | SAB(args[1], args[0], args[2]));
        }
        break;
    case INDEX_op_shr_i64:
        if (const_args[2]) {
            /* Limit immediate shift count lest we create an illegal insn.  */
            tcg_out_shri64(s, args[0], args[1], args[2] & 63);
        } else {
            tcg_out32(s, SRD | SAB(args[1], args[0], args[2]));
        }
        break;
    case INDEX_op_sar_i64:
        if (const_args[2]) {
            tcg_out_sari64(s, args[0], args[1], args[2]);
        } else {
            tcg_out32(s, SRAD | SAB(args[1], args[0], args[2]));
        }
        break;
    case INDEX_op_rotl_i64:
        if (const_args[2]) {
            tcg_out_rld(s, RLDICL, args[0], args[1], args[2], 0);
        } else {
            tcg_out32(s, RLDCL | SAB(args[1], args[0], args[2]) | MB64(0));
        }
        break;
    case INDEX_op_rotr_i64:
        if (const_args[2]) {
            tcg_out_rld(s, RLDICL, args[0], args[1], 64 - args[2], 0);
        } else {
            tcg_out32(s, SUBFIC | TAI(TCG_REG_R0, args[2], 64));
            tcg_out32(s, RLDCL | SAB(args[1], args[0], TCG_REG_R0) | MB64(0));
        }
        break;

    case INDEX_op_mul_i64:
        a0 = args[0], a1 = args[1], a2 = args[2];
        if (const_args[2]) {
            tcg_out32(s, MULLI | TAI(a0, a1, a2));
        } else {
            tcg_out32(s, MULLD | TAB(a0, a1, a2));
        }
        break;
    case INDEX_op_div_i64:
        tcg_out32(s, DIVD | TAB(args[0], args[1], args[2]));
        break;
    case INDEX_op_divu_i64:
        tcg_out32(s, DIVDU | TAB(args[0], args[1], args[2]));
        break;

    case INDEX_op_qemu_ld_i32:
        tcg_out_qemu_ld(s, args, false);
        break;
    case INDEX_op_qemu_ld_i64:
        tcg_out_qemu_ld(s, args, true);
        break;
    case INDEX_op_qemu_st_i32:
        tcg_out_qemu_st(s, args, false);
        break;
    case INDEX_op_qemu_st_i64:
        tcg_out_qemu_st(s, args, true);
        break;

    case INDEX_op_ext8s_i32:
    case INDEX_op_ext8s_i64:
        tcg_out_ext8s(s, args[0], args[1]);
        break;
    case INDEX_op_ext16s_i32:
    case INDEX_op_ext16s_i64:
        tcg_out_ext16s(s, args[0], args[1]);
        break;
    case INDEX_op_ext_i32_i64:
    case INDEX_op_ext32s_i64:
        tcg_out_ext32s(s, args[0], args[1]);
        break;
    case INDEX_op_extu_i32_i64:
        tcg_out_ext32u(s, args[0], args[1]);
        break;

    case INDEX_op_setcond_i32:
        tcg_out_setcond(s, TCG_TYPE_I32, args[3], args[0], args[1], args[2],
                        const_args[2]);
        break;
    case INDEX_op_setcond_i64:
        tcg_out_setcond(s, TCG_TYPE_I64, args[3], args[0], args[1], args[2],
                        const_args[2]);
        break;
    case INDEX_op_setcond2_i32:
        tcg_out_setcond2(s, args, const_args);
        break;

    case INDEX_op_bswap16_i32:
    case INDEX_op_bswap16_i64:
        tcg_out_bswap16(s, args[0], args[1], args[2]);
        break;
    case INDEX_op_bswap32_i32:
        tcg_out_bswap32(s, args[0], args[1], 0);
        break;
    case INDEX_op_bswap32_i64:
        tcg_out_bswap32(s, args[0], args[1], args[2]);
        break;
    case INDEX_op_bswap64_i64:
        tcg_out_bswap64(s, args[0], args[1]);
        break;

    case INDEX_op_deposit_i32:
        if (const_args[2]) {
            uint32_t mask = ((2u << (args[4] - 1)) - 1) << args[3];
            tcg_out_andi32(s, args[0], args[0], ~mask);
        } else {
            tcg_out_rlw(s, RLWIMI, args[0], args[2], args[3],
                        32 - args[3] - args[4], 31 - args[3]);
        }
        break;
    case INDEX_op_deposit_i64:
        if (const_args[2]) {
            uint64_t mask = ((2ull << (args[4] - 1)) - 1) << args[3];
            tcg_out_andi64(s, args[0], args[0], ~mask);
        } else {
            tcg_out_rld(s, RLDIMI, args[0], args[2], args[3],
                        64 - args[3] - args[4]);
        }
        break;

    case INDEX_op_extract_i32:
        tcg_out_rlw(s, RLWINM, args[0], args[1],
                    32 - args[2], 32 - args[3], 31);
        break;
    case INDEX_op_extract_i64:
        tcg_out_rld(s, RLDICL, args[0], args[1], 64 - args[2], 64 - args[3]);
        break;

    case INDEX_op_movcond_i32:
        tcg_out_movcond(s, TCG_TYPE_I32, args[5], args[0], args[1], args[2],
                        args[3], args[4], const_args[2]);
        break;
    case INDEX_op_movcond_i64:
        tcg_out_movcond(s, TCG_TYPE_I64, args[5], args[0], args[1], args[2],
                        args[3], args[4], const_args[2]);
        break;

#if TCG_TARGET_REG_BITS == 64
    case INDEX_op_add2_i64:
#else
    case INDEX_op_add2_i32:
#endif
        /* Note that the CA bit is defined based on the word size of the
           environment.  So in 64-bit mode it's always carry-out of bit 63.
           The fallback code using deposit works just as well for 32-bit.  */
        a0 = args[0], a1 = args[1];
        if (a0 == args[3] || (!const_args[5] && a0 == args[5])) {
            a0 = TCG_REG_R0;
        }
        if (const_args[4]) {
            tcg_out32(s, ADDIC | TAI(a0, args[2], args[4]));
        } else {
            tcg_out32(s, ADDC | TAB(a0, args[2], args[4]));
        }
        if (const_args[5]) {
            tcg_out32(s, (args[5] ? ADDME : ADDZE) | RT(a1) | RA(args[3]));
        } else {
            tcg_out32(s, ADDE | TAB(a1, args[3], args[5]));
        }
        if (a0 != args[0]) {
            tcg_out_mov(s, TCG_TYPE_REG, args[0], a0);
        }
        break;

#if TCG_TARGET_REG_BITS == 64
    case INDEX_op_sub2_i64:
#else
    case INDEX_op_sub2_i32:
#endif
        a0 = args[0], a1 = args[1];
        if (a0 == args[5] || (!const_args[3] && a0 == args[3])) {
            a0 = TCG_REG_R0;
        }
        if (const_args[2]) {
            tcg_out32(s, SUBFIC | TAI(a0, args[4], args[2]));
        } else {
            tcg_out32(s, SUBFC | TAB(a0, args[4], args[2]));
        }
        if (const_args[3]) {
            tcg_out32(s, (args[3] ? SUBFME : SUBFZE) | RT(a1) | RA(args[5]));
        } else {
            tcg_out32(s, SUBFE | TAB(a1, args[5], args[3]));
        }
        if (a0 != args[0]) {
            tcg_out_mov(s, TCG_TYPE_REG, args[0], a0);
        }
        break;

    case INDEX_op_muluh_i32:
        tcg_out32(s, MULHWU | TAB(args[0], args[1], args[2]));
        break;
    case INDEX_op_mulsh_i32:
        tcg_out32(s, MULHW | TAB(args[0], args[1], args[2]));
        break;
    case INDEX_op_muluh_i64:
        tcg_out32(s, MULHDU | TAB(args[0], args[1], args[2]));
        break;
    case INDEX_op_mulsh_i64:
        tcg_out32(s, MULHD | TAB(args[0], args[1], args[2]));
        break;

    case INDEX_op_mb:
        tcg_out_mb(s, args[0]);
        break;

    case INDEX_op_mov_i32:   /* Always emitted via tcg_out_mov.  */
    case INDEX_op_mov_i64:
    case INDEX_op_call:      /* Always emitted via tcg_out_call.  */
    default:
        tcg_abort();
    }
}