satip-axe/kernel/arch/x86/include/asm/cmpxchg_64.h
2015-03-26 17:24:57 +01:00

190 lines
4.6 KiB
C

#ifndef _ASM_X86_CMPXCHG_64_H
#define _ASM_X86_CMPXCHG_64_H
#include <asm/alternative.h> /* Provides LOCK_PREFIX */
#define xchg(ptr, v) ((__typeof__(*(ptr)))__xchg((unsigned long)(v), \
(ptr), sizeof(*(ptr))))
#define __xg(x) ((volatile long *)(x))
static inline void set_64bit(volatile u64 *ptr, u64 val)
{
*ptr = val;
}
/*
* Note: no "lock" prefix even on SMP: xchg always implies lock anyway
* Note 2: xchg has side effect, so that attribute volatile is necessary,
* but generally the primitive is invalid, *ptr is output argument. --ANK
*/
static inline unsigned long __xchg(unsigned long x, volatile void *ptr,
int size)
{
switch (size) {
case 1:
asm volatile("xchgb %b0,%1"
: "=q" (x), "+m" (*__xg(ptr))
: "0" (x)
: "memory");
break;
case 2:
asm volatile("xchgw %w0,%1"
: "=r" (x), "+m" (*__xg(ptr))
: "0" (x)
: "memory");
break;
case 4:
asm volatile("xchgl %k0,%1"
: "=r" (x), "+m" (*__xg(ptr))
: "0" (x)
: "memory");
break;
case 8:
asm volatile("xchgq %0,%1"
: "=r" (x), "+m" (*__xg(ptr))
: "0" (x)
: "memory");
break;
}
return x;
}
/*
* Atomic compare and exchange. Compare OLD with MEM, if identical,
* store NEW in MEM. Return the initial value in MEM. Success is
* indicated by comparing RETURN with OLD.
*/
#define __HAVE_ARCH_CMPXCHG 1
static inline unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
unsigned long new, int size)
{
unsigned long prev;
switch (size) {
case 1:
asm volatile(LOCK_PREFIX "cmpxchgb %b2,%1"
: "=a"(prev), "+m"(*__xg(ptr))
: "q"(new), "0"(old)
: "memory");
return prev;
case 2:
asm volatile(LOCK_PREFIX "cmpxchgw %w2,%1"
: "=a"(prev), "+m"(*__xg(ptr))
: "r"(new), "0"(old)
: "memory");
return prev;
case 4:
asm volatile(LOCK_PREFIX "cmpxchgl %k2,%1"
: "=a"(prev), "+m"(*__xg(ptr))
: "r"(new), "0"(old)
: "memory");
return prev;
case 8:
asm volatile(LOCK_PREFIX "cmpxchgq %2,%1"
: "=a"(prev), "+m"(*__xg(ptr))
: "r"(new), "0"(old)
: "memory");
return prev;
}
return old;
}
/*
* Always use locked operations when touching memory shared with a
* hypervisor, since the system may be SMP even if the guest kernel
* isn't.
*/
static inline unsigned long __sync_cmpxchg(volatile void *ptr,
unsigned long old,
unsigned long new, int size)
{
unsigned long prev;
switch (size) {
case 1:
asm volatile("lock; cmpxchgb %b2,%1"
: "=a"(prev), "+m"(*__xg(ptr))
: "q"(new), "0"(old)
: "memory");
return prev;
case 2:
asm volatile("lock; cmpxchgw %w2,%1"
: "=a"(prev), "+m"(*__xg(ptr))
: "r"(new), "0"(old)
: "memory");
return prev;
case 4:
asm volatile("lock; cmpxchgl %k2,%1"
: "=a"(prev), "+m"(*__xg(ptr))
: "r"(new), "0"(old)
: "memory");
return prev;
case 8:
asm volatile("lock; cmpxchgq %2,%1"
: "=a"(prev), "+m"(*__xg(ptr))
: "r"(new), "0"(old)
: "memory");
return prev;
}
return old;
}
static inline unsigned long __cmpxchg_local(volatile void *ptr,
unsigned long old,
unsigned long new, int size)
{
unsigned long prev;
switch (size) {
case 1:
asm volatile("cmpxchgb %b2,%1"
: "=a"(prev), "+m"(*__xg(ptr))
: "q"(new), "0"(old)
: "memory");
return prev;
case 2:
asm volatile("cmpxchgw %w2,%1"
: "=a"(prev), "+m"(*__xg(ptr))
: "r"(new), "0"(old)
: "memory");
return prev;
case 4:
asm volatile("cmpxchgl %k2,%1"
: "=a"(prev), "+m"(*__xg(ptr))
: "r"(new), "0"(old)
: "memory");
return prev;
case 8:
asm volatile("cmpxchgq %2,%1"
: "=a"(prev), "+m"(*__xg(ptr))
: "r"(new), "0"(old)
: "memory");
return prev;
}
return old;
}
#define cmpxchg(ptr, o, n) \
((__typeof__(*(ptr)))__cmpxchg((ptr), (unsigned long)(o), \
(unsigned long)(n), sizeof(*(ptr))))
#define cmpxchg64(ptr, o, n) \
({ \
BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
cmpxchg((ptr), (o), (n)); \
})
#define cmpxchg_local(ptr, o, n) \
((__typeof__(*(ptr)))__cmpxchg_local((ptr), (unsigned long)(o), \
(unsigned long)(n), \
sizeof(*(ptr))))
#define sync_cmpxchg(ptr, o, n) \
((__typeof__(*(ptr)))__sync_cmpxchg((ptr), (unsigned long)(o), \
(unsigned long)(n), \
sizeof(*(ptr))))
#define cmpxchg64_local(ptr, o, n) \
({ \
BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
cmpxchg_local((ptr), (o), (n)); \
})
#endif /* _ASM_X86_CMPXCHG_64_H */