00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019
00020
00021
00022
00023
00024
00025
00026
00027
00028
00029
00030
00031
00032
00033
00034
00035
00036
00037
00038
00039
00040
00041
00042
00043
00044
00045 #ifndef _RTAI_ASM_PPC_ATOMIC_H
00046 #define _RTAI_ASM_PPC_ATOMIC_H
00047
00048 #include <asm/atomic.h>
00049
00050 #ifdef __KERNEL__
00051
00052 #include <linux/bitops.h>
00053 #include <asm/system.h>
00054
00055 #define atomic_xchg(ptr,v) xchg(ptr,v)
00056 #define atomic_cmpxchg(ptr,o,n) cmpxchg(ptr,o,n)
00057 #define xnarch_memory_barrier() smp_mb()
00058
00059 void atomic_set_mask(unsigned long mask,
00060 unsigned long *ptr);
00061
00062 #define xnarch_atomic_set(pcounter,i) atomic_set(pcounter,i)
00063 #define xnarch_atomic_get(pcounter) atomic_read(pcounter)
00064 #define xnarch_atomic_inc(pcounter) atomic_inc(pcounter)
00065 #define xnarch_atomic_dec(pcounter) atomic_dec(pcounter)
00066 #define xnarch_atomic_inc_and_test(pcounter) atomic_inc_and_test(pcounter)
00067 #define xnarch_atomic_dec_and_test(pcounter) atomic_dec_and_test(pcounter)
00068 #define xnarch_atomic_set_mask(pflags,mask) atomic_set_mask(mask,pflags)
00069 #define xnarch_atomic_clear_mask(pflags,mask) atomic_clear_mask(mask,pflags)
00070
00071 #else
00072
00073 #include <asm/ppc_asm.h>
00074
00075
00076
00077
00078
00079
00080 static inline unsigned long atomic_cmpxchg (volatile void *ptr,
00081 unsigned long o,
00082 unsigned long n)
00083 {
00084 unsigned long prev;
00085
00086 __asm__ __volatile__ ("\n\
00087 1: lwarx %0,0,%2 \n\
00088 cmpw 0,%0,%3 \n\
00089 bne 2f \n"
00090 PPC405_ERR77(0,%2) \
00091 " stwcx. %4,0,%2 \n\
00092 bne- 1b\n"
00093 #ifdef CONFIG_SMP
00094 " sync\n"
00095 #endif
00096 "2:"
00097 : "=&r" (prev), "=m" (*(volatile unsigned long *)ptr)
00098 : "r" (ptr), "r" (o), "r" (n), "m" (*(volatile unsigned long *)ptr)
00099 : "cc", "memory");
00100
00101 return prev;
00102 }
00103
00104 static inline unsigned long atomic_xchg (volatile void *ptr,
00105 unsigned long x)
00106 {
00107 unsigned long prev;
00108
00109 __asm__ __volatile__ ("\n\
00110 1: lwarx %0,0,%2 \n"
00111 PPC405_ERR77(0,%2) \
00112 " stwcx. %3,0,%2 \n\
00113 bne- 1b"
00114 : "=&r" (prev), "=m" (*(volatile unsigned long *)ptr)
00115 : "r" (ptr), "r" (x), "m" (*(volatile unsigned long *)ptr)
00116 : "cc", "memory");
00117
00118 return prev;
00119 }
00120
00121 #ifdef CONFIG_SMP
00122 #define SMP_SYNC "sync"
00123 #define SMP_ISYNC "\n\tisync"
00124 #else
00125 #define SMP_SYNC ""
00126 #define SMP_ISYNC
00127 #endif
00128
00129 static __inline__ void atomic_inc(atomic_t *v)
00130
00131 {
00132 int t;
00133
00134 __asm__ __volatile__(
00135 "1: lwarx %0,0,%2\n\
00136 addic %0,%0,1\n"
00137 PPC405_ERR77(0,%2)
00138 " stwcx. %0,0,%2 \n\
00139 bne- 1b"
00140 : "=&r" (t), "=m" (v->counter)
00141 : "r" (&v->counter), "m" (v->counter)
00142 : "cc");
00143 }
00144
00145 static __inline__ int atomic_inc_return(atomic_t *v)
00146
00147 {
00148 int t;
00149
00150 __asm__ __volatile__(
00151 "1: lwarx %0,0,%1\n\
00152 addic %0,%0,1\n"
00153 PPC405_ERR77(0,%1)
00154 " stwcx. %0,0,%1 \n\
00155 bne- 1b"
00156 SMP_ISYNC
00157 : "=&r" (t)
00158 : "r" (&v->counter)
00159 : "cc", "memory");
00160
00161 return t;
00162 }
00163
00164 static __inline__ void atomic_dec(atomic_t *v)
00165
00166 {
00167 int t;
00168
00169 __asm__ __volatile__(
00170 "1: lwarx %0,0,%2\n\
00171 addic %0,%0,-1\n"
00172 PPC405_ERR77(0,%2)\
00173 " stwcx. %0,0,%2\n\
00174 bne- 1b"
00175 : "=&r" (t), "=m" (v->counter)
00176 : "r" (&v->counter), "m" (v->counter)
00177 : "cc");
00178 }
00179
00180 static __inline__ int atomic_dec_return(atomic_t *v)
00181
00182 {
00183 int t;
00184
00185 __asm__ __volatile__(
00186 "1: lwarx %0,0,%1\n\
00187 addic %0,%0,-1\n"
00188 PPC405_ERR77(0,%1)
00189 " stwcx. %0,0,%1\n\
00190 bne- 1b"
00191 SMP_ISYNC
00192 : "=&r" (t)
00193 : "r" (&v->counter)
00194 : "cc", "memory");
00195
00196 return t;
00197 }
00198
00199 static __inline__ void atomic_set_mask(unsigned long mask,
00200 unsigned long *ptr)
00201 {
00202 __asm__ __volatile__ ("\n\
00203 1: lwarx 5,0,%0 \n\
00204 or 5,5,%1\n"
00205 PPC405_ERR77(0,%0) \
00206 " stwcx. 5,0,%0 \n\
00207 bne- 1b"
00208 :
00209 : "r" (ptr), "r" (mask)
00210 : "r5", "cc", "memory");
00211 }
00212
00213 static __inline__ void atomic_clear_mask(unsigned long mask,
00214 unsigned long *ptr)
00215 {
00216 __asm__ __volatile__ ("\n\
00217 1: lwarx 5,0,%0 \n\
00218 andc 5,5,%1\n"
00219 PPC405_ERR77(0,%0) \
00220 " stwcx. 5,0,%0 \n\
00221 bne- 1b"
00222 :
00223 : "r" (ptr), "r" (mask)
00224 : "r5", "cc", "memory");
00225 }
00226
00227 #define xnarch_memory_barrier() __asm__ __volatile__("": : :"memory")
00228
00229 #define xnarch_atomic_set(pcounter,i) (((pcounter)->counter) = (i))
00230 #define xnarch_atomic_get(pcounter) ((pcounter)->counter)
00231 #define xnarch_atomic_inc(pcounter) atomic_inc(pcounter)
00232 #define xnarch_atomic_dec(pcounter) atomic_dec(pcounter)
00233 #define xnarch_atomic_inc_and_test(pcounter) (atomic_inc_return(pcounter) == 0)
00234 #define xnarch_atomic_dec_and_test(pcounter) (atomic_dec_return(pcounter) == 0)
00235 #define xnarch_atomic_set_mask(pflags,mask) atomic_set_mask(mask,pflags)
00236 #define xnarch_atomic_clear_mask(pflags,mask) atomic_clear_mask(mask,pflags)
00237
00238 #endif
00239
00240 typedef atomic_t atomic_counter_t;
00241 typedef unsigned long atomic_flags_t;
00242
00243 #define xnarch_atomic_xchg(ptr,x) atomic_xchg(ptr,x)
00244
00245 #endif