__asm__ __volatile()

in include/asm/futex.h [14:70]


	__asm__ __volatile( \
	"1: %0 = memw_locked(%3);\n" \
	    /* For example: %1 = %4 */ \
	    insn \
	"2: memw_locked(%3,p2) = %1;\n" \
	"   if (!p2) jump 1b;\n" \
	"   %1 = #0;\n" \
	"3:\n" \
	".section .fixup,\"ax\"\n" \
	"4: %1 = #%5;\n" \
	"   jump ##3b\n" \
	".previous\n" \
	".section __ex_table,\"a\"\n" \
	".long 1b,4b,2b,4b\n" \
	".previous\n" \
	: "=&r" (oldval), "=&r" (ret), "+m" (*uaddr) \
	: "r" (uaddr), "r" (oparg), "i" (-EFAULT) \
	: "p2", "memory")


static inline int
arch_futex_atomic_op_inuser(int op, int oparg, int *oval, u32 __user *uaddr)
{
	int oldval = 0, ret;

	if (!access_ok(uaddr, sizeof(u32)))
		return -EFAULT;

	switch (op) {
	case FUTEX_OP_SET:
		__futex_atomic_op("%1 = %4\n", ret, oldval, uaddr, oparg);
		break;
	case FUTEX_OP_ADD:
		__futex_atomic_op("%1 = add(%0,%4)\n", ret, oldval, uaddr,
				  oparg);
		break;
	case FUTEX_OP_OR:
		__futex_atomic_op("%1 = or(%0,%4)\n", ret, oldval, uaddr,
				  oparg);
		break;
	case FUTEX_OP_ANDN:
		__futex_atomic_op("%1 = not(%4); %1 = and(%0,%1)\n", ret,
				  oldval, uaddr, oparg);
		break;
	case FUTEX_OP_XOR:
		__futex_atomic_op("%1 = xor(%0,%4)\n", ret, oldval, uaddr,
				  oparg);
		break;
	default:
		ret = -ENOSYS;
	}

	if (!ret)
		*oval = oldval;

	return ret;
}