SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_flags()

in pcre/sljit/sljitNativeMIPS_common.c [2004:2116]


SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_flags(struct sljit_compiler *compiler, sljit_s32 op,
	sljit_s32 dst, sljit_sw dstw,
	sljit_s32 src, sljit_sw srcw,
	sljit_s32 type)
{
	sljit_s32 sugg_dst_ar, dst_ar;
	sljit_s32 flags = GET_ALL_FLAGS(op);
#if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
#	define mem_type WORD_DATA
#else
	sljit_s32 mem_type = (op & SLJIT_I32_OP) ? (INT_DATA | SIGNED_DATA) : WORD_DATA;
#endif

	CHECK_ERROR();
	CHECK(check_sljit_emit_op_flags(compiler, op, dst, dstw, src, srcw, type));
	ADJUST_LOCAL_OFFSET(dst, dstw);

	if (dst == SLJIT_UNUSED)
		return SLJIT_SUCCESS;

	op = GET_OPCODE(op);
#if (defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64)
	if (op == SLJIT_MOV_S32 || op == SLJIT_MOV_U32)
		mem_type = INT_DATA | SIGNED_DATA;
#endif
	sugg_dst_ar = DR((op < SLJIT_ADD && FAST_IS_REG(dst)) ? dst : TMP_REG2);

	compiler->cache_arg = 0;
	compiler->cache_argw = 0;
	if (op >= SLJIT_ADD && (src & SLJIT_MEM)) {
		ADJUST_LOCAL_OFFSET(src, srcw);
		FAIL_IF(emit_op_mem2(compiler, mem_type | LOAD_DATA, DR(TMP_REG1), src, srcw, dst, dstw));
		src = TMP_REG1;
		srcw = 0;
	}

	switch (type & 0xff) {
	case SLJIT_EQUAL:
	case SLJIT_NOT_EQUAL:
		FAIL_IF(push_inst(compiler, SLTIU | SA(EQUAL_FLAG) | TA(sugg_dst_ar) | IMM(1), sugg_dst_ar));
		dst_ar = sugg_dst_ar;
		break;
	case SLJIT_LESS:
	case SLJIT_GREATER_EQUAL:
	case SLJIT_LESS_F64:
	case SLJIT_GREATER_EQUAL_F64:
		dst_ar = ULESS_FLAG;
		break;
	case SLJIT_GREATER:
	case SLJIT_LESS_EQUAL:
	case SLJIT_GREATER_F64:
	case SLJIT_LESS_EQUAL_F64:
		dst_ar = UGREATER_FLAG;
		break;
	case SLJIT_SIG_LESS:
	case SLJIT_SIG_GREATER_EQUAL:
		dst_ar = LESS_FLAG;
		break;
	case SLJIT_SIG_GREATER:
	case SLJIT_SIG_LESS_EQUAL:
		dst_ar = GREATER_FLAG;
		break;
	case SLJIT_OVERFLOW:
	case SLJIT_NOT_OVERFLOW:
		dst_ar = OVERFLOW_FLAG;
		break;
	case SLJIT_MUL_OVERFLOW:
	case SLJIT_MUL_NOT_OVERFLOW:
		FAIL_IF(push_inst(compiler, SLTIU | SA(OVERFLOW_FLAG) | TA(sugg_dst_ar) | IMM(1), sugg_dst_ar));
		dst_ar = sugg_dst_ar;
		type ^= 0x1; /* Flip type bit for the XORI below. */
		break;
	case SLJIT_EQUAL_F64:
	case SLJIT_NOT_EQUAL_F64:
		dst_ar = EQUAL_FLAG;
		break;

	case SLJIT_UNORDERED_F64:
	case SLJIT_ORDERED_F64:
		FAIL_IF(push_inst(compiler, CFC1 | TA(sugg_dst_ar) | DA(FCSR_REG), sugg_dst_ar));
		FAIL_IF(push_inst(compiler, SRL | TA(sugg_dst_ar) | DA(sugg_dst_ar) | SH_IMM(23), sugg_dst_ar));
		FAIL_IF(push_inst(compiler, ANDI | SA(sugg_dst_ar) | TA(sugg_dst_ar) | IMM(1), sugg_dst_ar));
		dst_ar = sugg_dst_ar;
		break;

	default:
		SLJIT_ASSERT_STOP();
		dst_ar = sugg_dst_ar;
		break;
	}

	if (type & 0x1) {
		FAIL_IF(push_inst(compiler, XORI | SA(dst_ar) | TA(sugg_dst_ar) | IMM(1), sugg_dst_ar));
		dst_ar = sugg_dst_ar;
	}

	if (op >= SLJIT_ADD) {
		if (DR(TMP_REG2) != dst_ar)
			FAIL_IF(push_inst(compiler, ADDU_W | SA(dst_ar) | TA(0) | D(TMP_REG2), DR(TMP_REG2)));
		return emit_op(compiler, op | flags, mem_type | CUMULATIVE_OP | LOGICAL_OP | IMM_OP | ALT_KEEP_CACHE, dst, dstw, src, srcw, TMP_REG2, 0);
	}

	if (dst & SLJIT_MEM)
		return emit_op_mem(compiler, mem_type, dst_ar, dst, dstw);

	if (sugg_dst_ar != dst_ar)
		return push_inst(compiler, ADDU_W | SA(dst_ar) | TA(0) | DA(sugg_dst_ar), sugg_dst_ar);
	return SLJIT_SUCCESS;

#if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
#	undef mem_type
#endif
}