SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op1()

in pcre/sljit/sljitNativeX86_common.c [1258:1425]


SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op1(struct sljit_compiler *compiler, sljit_s32 op,
	sljit_s32 dst, sljit_sw dstw,
	sljit_s32 src, sljit_sw srcw)
{
	sljit_u8* inst;
	sljit_s32 update = 0;
	sljit_s32 op_flags = GET_ALL_FLAGS(op);
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
	sljit_s32 dst_is_ereg = 0;
	sljit_s32 src_is_ereg = 0;
#else
#	define src_is_ereg 0
#endif

	CHECK_ERROR();
	CHECK(check_sljit_emit_op1(compiler, op, dst, dstw, src, srcw));
	ADJUST_LOCAL_OFFSET(dst, dstw);
	ADJUST_LOCAL_OFFSET(src, srcw);

	CHECK_EXTRA_REGS(dst, dstw, dst_is_ereg = 1);
	CHECK_EXTRA_REGS(src, srcw, src_is_ereg = 1);
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
	compiler->mode32 = op_flags & SLJIT_I32_OP;
#endif

	op = GET_OPCODE(op);
	if (op >= SLJIT_MOV && op <= SLJIT_MOVU_P) {
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
		compiler->mode32 = 0;
#endif

		if (op_flags & SLJIT_I32_OP) {
			if (FAST_IS_REG(src) && src == dst) {
				if (!TYPE_CAST_NEEDED(op))
					return SLJIT_SUCCESS;
			}
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
			if (op == SLJIT_MOV_S32 && (src & SLJIT_MEM))
				op = SLJIT_MOV_U32;
			if (op == SLJIT_MOVU_S32 && (src & SLJIT_MEM))
				op = SLJIT_MOVU_U32;
			if (op == SLJIT_MOV_U32 && (src & SLJIT_IMM))
				op = SLJIT_MOV_S32;
			if (op == SLJIT_MOVU_U32 && (src & SLJIT_IMM))
				op = SLJIT_MOVU_S32;
#endif
		}

		SLJIT_COMPILE_ASSERT(SLJIT_MOV + 8 == SLJIT_MOVU, movu_offset);
		if (op >= SLJIT_MOVU) {
			update = 1;
			op -= 8;
		}

		if (src & SLJIT_IMM) {
			switch (op) {
			case SLJIT_MOV_U8:
				srcw = (sljit_u8)srcw;
				break;
			case SLJIT_MOV_S8:
				srcw = (sljit_s8)srcw;
				break;
			case SLJIT_MOV_U16:
				srcw = (sljit_u16)srcw;
				break;
			case SLJIT_MOV_S16:
				srcw = (sljit_s16)srcw;
				break;
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
			case SLJIT_MOV_U32:
				srcw = (sljit_u32)srcw;
				break;
			case SLJIT_MOV_S32:
				srcw = (sljit_s32)srcw;
				break;
#endif
			}
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
			if (SLJIT_UNLIKELY(dst_is_ereg))
				return emit_mov(compiler, dst, dstw, src, srcw);
#endif
		}

		if (SLJIT_UNLIKELY(update) && (src & SLJIT_MEM) && !src_is_ereg && (src & REG_MASK) && (srcw != 0 || (src & OFFS_REG_MASK) != 0)) {
			inst = emit_x86_instruction(compiler, 1, src & REG_MASK, 0, src, srcw);
			FAIL_IF(!inst);
			*inst = LEA_r_m;
			src &= SLJIT_MEM | 0xf;
			srcw = 0;
		}

#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
		if (SLJIT_UNLIKELY(dst_is_ereg) && (!(op == SLJIT_MOV || op == SLJIT_MOV_U32 || op == SLJIT_MOV_S32 || op == SLJIT_MOV_P) || (src & SLJIT_MEM))) {
			SLJIT_ASSERT(dst == SLJIT_MEM1(SLJIT_SP));
			dst = TMP_REG1;
		}
#endif

		switch (op) {
		case SLJIT_MOV:
		case SLJIT_MOV_P:
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
		case SLJIT_MOV_U32:
		case SLJIT_MOV_S32:
#endif
			FAIL_IF(emit_mov(compiler, dst, dstw, src, srcw));
			break;
		case SLJIT_MOV_U8:
			FAIL_IF(emit_mov_byte(compiler, 0, dst, dstw, src, srcw));
			break;
		case SLJIT_MOV_S8:
			FAIL_IF(emit_mov_byte(compiler, 1, dst, dstw, src, srcw));
			break;
		case SLJIT_MOV_U16:
			FAIL_IF(emit_mov_half(compiler, 0, dst, dstw, src, srcw));
			break;
		case SLJIT_MOV_S16:
			FAIL_IF(emit_mov_half(compiler, 1, dst, dstw, src, srcw));
			break;
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
		case SLJIT_MOV_U32:
			FAIL_IF(emit_mov_int(compiler, 0, dst, dstw, src, srcw));
			break;
		case SLJIT_MOV_S32:
			FAIL_IF(emit_mov_int(compiler, 1, dst, dstw, src, srcw));
			break;
#endif
		}

#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
		if (SLJIT_UNLIKELY(dst_is_ereg) && dst == TMP_REG1)
			return emit_mov(compiler, SLJIT_MEM1(SLJIT_SP), dstw, TMP_REG1, 0);
#endif

		if (SLJIT_UNLIKELY(update) && (dst & SLJIT_MEM) && (dst & REG_MASK) && (dstw != 0 || (dst & OFFS_REG_MASK) != 0)) {
			inst = emit_x86_instruction(compiler, 1, dst & REG_MASK, 0, dst, dstw);
			FAIL_IF(!inst);
			*inst = LEA_r_m;
		}
		return SLJIT_SUCCESS;
	}

	if (SLJIT_UNLIKELY(GET_FLAGS(op_flags)))
		compiler->flags_saved = 0;

	switch (op) {
	case SLJIT_NOT:
		if (SLJIT_UNLIKELY(op_flags & SLJIT_SET_E))
			return emit_not_with_flags(compiler, dst, dstw, src, srcw);
		return emit_unary(compiler, NOT_rm, dst, dstw, src, srcw);

	case SLJIT_NEG:
		if (SLJIT_UNLIKELY(op_flags & SLJIT_KEEP_FLAGS) && !compiler->flags_saved)
			FAIL_IF(emit_save_flags(compiler));
		return emit_unary(compiler, NEG_rm, dst, dstw, src, srcw);

	case SLJIT_CLZ:
		if (SLJIT_UNLIKELY(op_flags & SLJIT_KEEP_FLAGS) && !compiler->flags_saved)
			FAIL_IF(emit_save_flags(compiler));
		return emit_clz(compiler, op_flags, dst, dstw, src, srcw);
	}

	return SLJIT_SUCCESS;

#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
#	undef src_is_ereg
#endif
}