00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019
00020 #ifndef _RTAI_ASM_PPC_ATOMIC_H
00021 #define _RTAI_ASM_PPC_ATOMIC_H
00022
00023 #include <asm/atomic.h>
00024
00025 #ifdef __KERNEL__
00026
00027 #include <linux/bitops.h>
00028 #include <asm/system.h>
00029
00030 #define atomic_xchg(ptr,v) xchg(ptr,v)
00031 #define atomic_cmpxchg(ptr,o,n) cmpxchg(ptr,o,n)
00032 #define xnarch_memory_barrier() smp_mb()
00033
00034 void atomic_set_mask(unsigned long mask,
00035 unsigned long *ptr);
00036
00037 #define xnarch_atomic_set(pcounter,i) atomic_set(pcounter,i)
00038 #define xnarch_atomic_get(pcounter) atomic_read(pcounter)
00039 #define xnarch_atomic_inc(pcounter) atomic_inc(pcounter)
00040 #define xnarch_atomic_dec(pcounter) atomic_dec(pcounter)
00041 #define xnarch_atomic_inc_and_test(pcounter) atomic_inc_and_test(pcounter)
00042 #define xnarch_atomic_dec_and_test(pcounter) atomic_dec_and_test(pcounter)
00043 #define xnarch_atomic_set_mask(pflags,mask) atomic_set_mask(mask,pflags)
00044 #define xnarch_atomic_clear_mask(pflags,mask) atomic_clear_mask(mask,pflags)
00045
00046 #else
00047
00048 #include <linux/config.h>
00049
00050 #ifdef CONFIG_IBM405_ERR77
00051 #define PPC405_ERR77(ra,rb) "dcbt " #ra "," #rb ";"
00052 #else
00053 #define PPC405_ERR77(ra,rb)
00054 #endif
00055
00056
00057
00058
00059
00060
00061 static inline unsigned long atomic_cmpxchg (volatile void *ptr,
00062 unsigned long o,
00063 unsigned long n)
00064 {
00065 unsigned long prev;
00066
00067 __asm__ __volatile__ ("\n\
00068 1: lwarx %0,0,%2 \n\
00069 cmpw 0,%0,%3 \n\
00070 bne 2f \n"
00071 PPC405_ERR77(0,%2) \
00072 " stwcx. %4,0,%2 \n\
00073 bne- 1b\n"
00074 #ifdef CONFIG_SMP
00075 " sync\n"
00076 #endif
00077 "2:"
00078 : "=&r" (prev), "=m" (*(volatile unsigned long *)ptr)
00079 : "r" (ptr), "r" (o), "r" (n), "m" (*(volatile unsigned long *)ptr)
00080 : "cc", "memory");
00081
00082 return prev;
00083 }
00084
00085 static inline unsigned long atomic_xchg (volatile void *ptr,
00086 unsigned long x)
00087 {
00088 unsigned long prev;
00089
00090 __asm__ __volatile__ ("\n\
00091 1: lwarx %0,0,%2 \n"
00092 PPC405_ERR77(0,%2) \
00093 " stwcx. %3,0,%2 \n\
00094 bne- 1b"
00095 : "=&r" (prev), "=m" (*(volatile unsigned long *)ptr)
00096 : "r" (ptr), "r" (x), "m" (*(volatile unsigned long *)ptr)
00097 : "cc", "memory");
00098
00099 return prev;
00100 }
00101
00102 #ifdef CONFIG_SMP
00103 #define SMP_SYNC "sync"
00104 #define SMP_ISYNC "\n\tisync"
00105 #else
00106 #define SMP_SYNC ""
00107 #define SMP_ISYNC
00108 #endif
00109
00110 static __inline__ void atomic_inc(atomic_t *v)
00111
00112 {
00113 int t;
00114
00115 __asm__ __volatile__(
00116 "1: lwarx %0,0,%2\n\
00117 addic %0,%0,1\n"
00118 PPC405_ERR77(0,%2)
00119 " stwcx. %0,0,%2 \n\
00120 bne- 1b"
00121 : "=&r" (t), "=m" (v->counter)
00122 : "r" (&v->counter), "m" (v->counter)
00123 : "cc");
00124 }
00125
00126 static __inline__ int atomic_inc_return(atomic_t *v)
00127
00128 {
00129 int t;
00130
00131 __asm__ __volatile__(
00132 "1: lwarx %0,0,%1\n\
00133 addic %0,%0,1\n"
00134 PPC405_ERR77(0,%1)
00135 " stwcx. %0,0,%1 \n\
00136 bne- 1b"
00137 SMP_ISYNC
00138 : "=&r" (t)
00139 : "r" (&v->counter)
00140 : "cc", "memory");
00141
00142 return t;
00143 }
00144
00145 static __inline__ void atomic_dec(atomic_t *v)
00146
00147 {
00148 int t;
00149
00150 __asm__ __volatile__(
00151 "1: lwarx %0,0,%2\n\
00152 addic %0,%0,-1\n"
00153 PPC405_ERR77(0,%2)\
00154 " stwcx. %0,0,%2\n\
00155 bne- 1b"
00156 : "=&r" (t), "=m" (v->counter)
00157 : "r" (&v->counter), "m" (v->counter)
00158 : "cc");
00159 }
00160
00161 static __inline__ int atomic_dec_return(atomic_t *v)
00162
00163 {
00164 int t;
00165
00166 __asm__ __volatile__(
00167 "1: lwarx %0,0,%1\n\
00168 addic %0,%0,-1\n"
00169 PPC405_ERR77(0,%1)
00170 " stwcx. %0,0,%1\n\
00171 bne- 1b"
00172 SMP_ISYNC
00173 : "=&r" (t)
00174 : "r" (&v->counter)
00175 : "cc", "memory");
00176
00177 return t;
00178 }
00179
00180 static __inline__ void atomic_set_mask(unsigned long mask,
00181 unsigned long *ptr)
00182 {
00183 __asm__ __volatile__ ("\n\
00184 1: lwarx 5,0,%0 \n\
00185 or 5,5,%1\n"
00186 PPC405_ERR77(0,%0) \
00187 " stwcx. 5,0,%0 \n\
00188 bne- 1b"
00189 :
00190 : "r" (ptr), "r" (mask)
00191 : "r5", "cc", "memory");
00192 }
00193
00194 static __inline__ void atomic_clear_mask(unsigned long mask,
00195 unsigned long *ptr)
00196 {
00197 __asm__ __volatile__ ("\n\
00198 1: lwarx 5,0,%0 \n\
00199 andc 5,5,%1\n"
00200 PPC405_ERR77(0,%0) \
00201 " stwcx. 5,0,%0 \n\
00202 bne- 1b"
00203 :
00204 : "r" (ptr), "r" (mask)
00205 : "r5", "cc", "memory");
00206 }
00207
00208 #define xnarch_memory_barrier() __asm__ __volatile__("": : :"memory")
00209
00210 #define xnarch_atomic_set(pcounter,i) (((pcounter)->counter) = (i))
00211 #define xnarch_atomic_get(pcounter) ((pcounter)->counter)
00212 #define xnarch_atomic_inc(pcounter) atomic_inc(pcounter)
00213 #define xnarch_atomic_dec(pcounter) atomic_dec(pcounter)
00214 #define xnarch_atomic_inc_and_test(pcounter) (atomic_inc_return(pcounter) == 0)
00215 #define xnarch_atomic_dec_and_test(pcounter) (atomic_dec_return(pcounter) == 0)
00216 #define xnarch_atomic_set_mask(pflags,mask) atomic_set_mask(mask,pflags)
00217 #define xnarch_atomic_clear_mask(pflags,mask) atomic_clear_mask(mask,pflags)
00218
00219 #define cpu_relax() xnarch_memory_barrier()
00220
00221 #endif
00222
00223 typedef atomic_t atomic_counter_t;
00224 typedef unsigned long atomic_flags_t;
00225
00226 #define xnarch_atomic_xchg(ptr,x) atomic_xchg(ptr,x)
00227
00228 #endif