00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019
00020
00021
00022
00023
00024 #ifndef _RTAI_ASM_PPC64_ATOMIC_H
00025 #define _RTAI_ASM_PPC64_ATOMIC_H
00026
00027 #include <asm/atomic.h>
00028
00029 static __inline__ void atomic_set_mask(unsigned long mask,
00030 unsigned long *ptr)
00031 {
00032 __asm__ __volatile__ ("\n\
00033 1: lwarx 5,0,%0 \n\
00034 or 5,5,%1\n"
00035 " stwcx. 5,0,%0 \n\
00036 bne- 1b"
00037 :
00038 : "r" (ptr), "r" (mask)
00039 : "r5", "cc", "memory");
00040 }
00041
00042 static __inline__ void atomic_clear_mask(unsigned long mask,
00043 unsigned long *ptr)
00044 {
00045 __asm__ __volatile__ ("\n\
00046 1: lwarx 5,0,%0 \n\
00047 andc 5,5,%1\n"
00048 " stwcx. 5,0,%0 \n\
00049 bne- 1b"
00050 :
00051 : "r" (ptr), "r" (mask)
00052 : "r5", "cc", "memory");
00053 }
00054
00055 #ifdef __KERNEL__
00056
00057 #include <linux/bitops.h>
00058 #include <asm/system.h>
00059
00060 #define atomic_xchg(ptr,v) xchg(ptr,v)
00061 #define atomic_cmpxchg(ptr,o,n) cmpxchg(ptr,o,n)
00062 #define xnarch_memory_barrier() smp_mb()
00063
00064 void atomic_set_mask(unsigned long mask,
00065 unsigned long *ptr);
00066
00067 #define xnarch_atomic_set(pcounter,i) atomic_set(pcounter,i)
00068 #define xnarch_atomic_get(pcounter) atomic_read(pcounter)
00069 #define xnarch_atomic_inc(pcounter) atomic_inc(pcounter)
00070 #define xnarch_atomic_dec(pcounter) atomic_dec(pcounter)
00071 #define xnarch_atomic_inc_and_test(pcounter) atomic_inc_and_test(pcounter)
00072 #define xnarch_atomic_dec_and_test(pcounter) atomic_dec_and_test(pcounter)
00073 #define xnarch_atomic_set_mask(pflags,mask) atomic_set_mask(mask,pflags)
00074 #define xnarch_atomic_clear_mask(pflags,mask) atomic_clear_mask(mask,pflags)
00075
00076 #else
00077
00078 #include <linux/config.h>
00079
00080
00081
00082
00083
00084
00085
00086 #ifdef CONFIG_SMP
00087 #define EIEIO_ON_SMP "eieio\n"
00088 #define ISYNC_ON_SMP "\n\tisync"
00089 #else
00090 #define EIEIO_ON_SMP
00091 #define ISYNC_ON_SMP
00092 #endif
00093
00094
00095
00096
00097
00098 static __inline__ unsigned long
00099 atomic_cmpxchg(volatile unsigned long *p, unsigned long old, unsigned long new)
00100 {
00101 unsigned long prev;
00102
00103 __asm__ __volatile__ (
00104 EIEIO_ON_SMP
00105 "1: ldarx %0,0,%2 # __cmpxchg_u64\n\
00106 cmpd 0,%0,%3\n\
00107 bne- 2f\n\
00108 stdcx. %4,0,%2\n\
00109 bne- 1b"
00110 ISYNC_ON_SMP
00111 "\n\
00112 2:"
00113 : "=&r" (prev), "=m" (*p)
00114 : "r" (p), "r" (old), "r" (new), "m" (*p)
00115 : "cc", "memory");
00116
00117 return prev;
00118 }
00119
00120 static __inline__ unsigned long
00121 atomic_xchg(volatile unsigned long *m, unsigned long val)
00122 {
00123 unsigned long dummy;
00124
00125 __asm__ __volatile__(
00126 EIEIO_ON_SMP
00127 "1: ldarx %0,0,%3 # __xchg_u64\n\
00128 stdcx. %2,0,%3\n\
00129 2: bne- 1b"
00130 ISYNC_ON_SMP
00131 : "=&r" (dummy), "=m" (*m)
00132 : "r" (val), "r" (m)
00133 : "cc", "memory");
00134
00135 return (dummy);
00136 }
00137
00138 #define xnarch_memory_barrier() __asm__ __volatile__ ("sync" : : : "memory")
00139
00140 #define xnarch_atomic_set(pcounter,i) (((pcounter)->counter) = (i))
00141 #define xnarch_atomic_get(pcounter) ((pcounter)->counter)
00142 #define xnarch_atomic_inc(pcounter) atomic_inc(pcounter)
00143 #define xnarch_atomic_dec(pcounter) atomic_dec(pcounter)
00144 #define xnarch_atomic_inc_and_test(pcounter) (atomic_inc_return(pcounter) == 0)
00145 #define xnarch_atomic_dec_and_test(pcounter) (atomic_dec_return(pcounter) == 0)
00146 #define xnarch_atomic_set_mask(pflags,mask) atomic_set_mask(mask,pflags)
00147 #define xnarch_atomic_clear_mask(pflags,mask) atomic_clear_mask(mask,pflags)
00148
00149 #define cpu_relax() xnarch_memory_barrier()
00150
00151 #endif
00152
00153 typedef atomic_t atomic_counter_t;
00154 typedef unsigned long atomic_flags_t;
00155
00156 #define xnarch_atomic_xchg(ptr,x) atomic_xchg(ptr,x)
00157
00158 #endif