00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019
00020
00021 #ifndef _RTAI_ASM_IA64_ATOMIC_H
00022 #define _RTAI_ASM_IA64_ATOMIC_H
00023
00024 #ifdef __KERNEL__
00025
00026 #include <linux/bitops.h>
00027 #include <asm/atomic.h>
00028 #include <asm/system.h>
00029
00030 #define atomic_xchg(ptr,v) ia64_xchg8(ptr,v)
00031 #define atomic_cmpxchg(ptr,o,n) ia64_cmpxchg8_acq(ptr,o,n)
00032 #define xnarch_memory_barrier() smp_mb()
00033
00034 #else
00035
00036 #ifndef likely
00037 #if __GNUC__ == 2 && __GNUC_MINOR__ < 96
00038 #define __builtin_expect(x, expected_value) (x)
00039 #endif
00040 #define likely(x) __builtin_expect(!!(x), 1)
00041 #define unlikely(x) __builtin_expect(!!(x), 0)
00042 #endif
00043
00044 #define fls(x) generic_fls(x)
00045
00046 #include <linux/bitops.h>
00047 #include <asm/atomic.h>
00048
00049 struct __rtai_xchg_dummy { unsigned long a[100]; };
00050 #define __rtai_xg(x) ((struct __rtai_xchg_dummy *)(x))
00051
00052 static inline unsigned long atomic_xchg (volatile void *ptr,
00053 unsigned long x)
00054 {
00055 __u64 ia64_intri_res;
00056 asm __volatile ("xchg8 %0=[%1],%2" : "=r" (ia64_intri_res)
00057 : "r" (ptr), "r" (x) : "memory");
00058 return ia64_intri_res;
00059 }
00060
00061 static inline unsigned long atomic_cmpxchg (volatile void *ptr,
00062 unsigned long o,
00063 unsigned long n)
00064 {
00065 __u64 ia64_intri_res;
00066
00067 asm volatile ("mov ar.ccv=%0;;" :: "rO"(o));
00068 asm volatile ("cmpxchg8.acq %0=[%1],%2,ar.ccv":
00069 "=r"(ia64_intri_res) : "r"(ptr), "r"(n) : "memory");
00070
00071 return ia64_intri_res;
00072 }
00073
00074 #define xnarch_memory_barrier() __asm__ __volatile__("": : :"memory")
00075
00076 #define cpu_relax() asm volatile ("hint @pause" ::: "memory")
00077
00078
00079 #undef ADDR
00080
00081 #endif
00082
00083 typedef atomic_t atomic_counter_t;
00084 typedef unsigned long atomic_flags_t;
00085
00086 #define xnarch_atomic_set(pcounter,i) atomic_set(pcounter,i)
00087 #define xnarch_atomic_get(pcounter) atomic_read(pcounter)
00088 #define xnarch_atomic_inc(pcounter) atomic_inc(pcounter)
00089 #define xnarch_atomic_dec(pcounter) atomic_dec(pcounter)
00090 #define xnarch_atomic_inc_and_test(pcounter) atomic_inc_and_test(pcounter)
00091 #define xnarch_atomic_dec_and_test(pcounter) atomic_dec_and_test(pcounter)
00092
00093
00094
00095
00096 static inline void atomic_set_mask(unsigned mask, unsigned long *addr)
00097 {
00098 __u32 old, new;
00099 volatile __u32 *m;
00100 CMPXCHG_BUGCHECK_DECL
00101
00102 m = (volatile __u32 *) addr;
00103 do {
00104 CMPXCHG_BUGCHECK(m);
00105 old = *m;
00106 new = old | mask;
00107 } while (cmpxchg_acq(m, old, new) != old);
00108 }
00109
00110 static inline void atomic_clear_mask(unsigned mask, unsigned long *addr)
00111 {
00112 __u32 old, new;
00113 volatile __u32 *m;
00114 CMPXCHG_BUGCHECK_DECL
00115
00116 m = (volatile __u32 *) addr;
00117 do {
00118 CMPXCHG_BUGCHECK(m);
00119 old = *m;
00120 new = old & ~mask;
00121 } while (cmpxchg_acq(m, old, new) != old);
00122 }
00123
00124 #define xnarch_atomic_set_mask(pflags,mask) atomic_set_mask(mask,pflags)
00125 #define xnarch_atomic_clear_mask(pflags,mask) atomic_clear_mask(mask,pflags)
00126
00127 #define xnarch_atomic_xchg(ptr,x) atomic_xchg(ptr,x)
00128
00129 #endif