00001
00034 #ifndef _RTAI_ASM_PPC_HAL_H
00035 #define _RTAI_ASM_PPC_HAL_H
00036
00037 #include <nucleus/asm-generic/hal.h>
00038 #include <asm/div64.h>
00039
00040 typedef unsigned long long rthal_time_t;
00041
00042 #define __rthal_u64tou32(ull, h, l) ({ \
00043 union { unsigned long long _ull; \
00044 struct { u_long _h; u_long _l; } _s; } _u; \
00045 _u._ull = (ull); \
00046 (h) = _u._s._h; \
00047 (l) = _u._s._l; \
00048 })
00049
00050 #define __rthal_u64fromu32(h, l) ({ \
00051 union { unsigned long long _ull; \
00052 struct { u_long _h; u_long _l; } _s; } _u; \
00053 _u._s._h = (h); \
00054 _u._s._l = (l); \
00055 _u._ull; \
00056 })
00057
00058 static inline unsigned long long rthal_ullmul(const unsigned long m0,
00059 const unsigned long m1)
00060 {
00061 return (unsigned long long) m0 * m1;
00062 }
00063
00064 static inline unsigned long long rthal_ulldiv (unsigned long long ull,
00065 const unsigned long uld,
00066 unsigned long *const rp)
00067 {
00068 #if defined(__KERNEL__) && BITS_PER_LONG == 32
00069 const unsigned long r = __div64_32(&ull, uld);
00070 #else
00071 const unsigned long r = ull % uld;
00072 ull /= uld;
00073 #endif
00074
00075 if (rp)
00076 *rp = r;
00077
00078 return ull;
00079 }
00080
00081 #define rthal_uldivrem(ull,ul,rp) ((u_long) rthal_ulldiv((ull),(ul),(rp)))
00082
00083 static inline int rthal_imuldiv (int i, int mult, int div) {
00084
00085
00086 const unsigned long long ull = rthal_ullmul(i, mult);
00087 return rthal_uldivrem(ull, div, NULL);
00088 }
00089
00090 static inline __attribute_const__
00091 unsigned long long __rthal_ullimd (const unsigned long long op,
00092 const unsigned long m,
00093 const unsigned long d)
00094 {
00095 u_long oph, opl, tlh, tll, qh, rh, ql;
00096 unsigned long long th, tl;
00097
00098 __rthal_u64tou32(op, oph, opl);
00099 tl = rthal_ullmul(opl, m);
00100 __rthal_u64tou32(tl, tlh, tll);
00101 th = rthal_ullmul(oph, m);
00102 th += tlh;
00103
00104 qh = rthal_uldivrem(th, d, &rh);
00105 th = __rthal_u64fromu32(rh, tll);
00106 ql = rthal_uldivrem(th, d, NULL);
00107 return __rthal_u64fromu32(qh, ql);
00108 }
00109
00110 static inline long long rthal_llimd (long long op,
00111 unsigned long m,
00112 unsigned long d)
00113 {
00114
00115 if(op < 0LL)
00116 return -__rthal_ullimd(-op, m, d);
00117 return __rthal_ullimd(op, m, d);
00118 }
00119
00120 static inline __attribute_const__ unsigned long ffnz (unsigned long ul) {
00121
00122 __asm__ ("cntlzw %0, %1" : "=r" (ul) : "r" (ul & (-ul)));
00123 return 31 - ul;
00124 }
00125
00126 #if defined(__KERNEL__) && !defined(__cplusplus)
00127 #include <asm/system.h>
00128 #include <asm/time.h>
00129 #include <asm/timex.h>
00130 #include <nucleus/asm/atomic.h>
00131 #include <asm/processor.h>
00132
00133 #ifdef CONFIG_ADEOS_CORE
00134 #define RTHAL_TIMER_IRQ ADEOS_TIMER_VIRQ
00135 #else
00136 #define RTHAL_TIMER_IRQ IPIPE_TIMER_VIRQ
00137 #endif
00138
00139 #define rthal_irq_descp(irq) (irq_desc + irq)
00140
00141 static inline unsigned long long rthal_rdtsc (void) {
00142 unsigned long long t;
00143 rthal_read_tsc(t);
00144 return t;
00145 }
00146
00147 #if defined(CONFIG_ADEOS_CORE) && !defined(CONFIG_ADEOS_NOTHREADS)
00148
00149
00150
00151
00152
00153
00154 static inline struct task_struct *rthal_root_host_task (int cpuid) {
00155 return ((struct thread_info *)(rthal_root_domain->esp[cpuid] & (~8191UL)))->task;
00156 }
00157
00158 static inline struct task_struct *rthal_current_host_task (int cpuid)
00159
00160 {
00161 register unsigned long esp asm ("r1");
00162
00163 if (esp >= rthal_domain.estackbase[cpuid] && esp < rthal_domain.estackbase[cpuid] + 8192)
00164 return rthal_root_host_task(cpuid);
00165
00166 return current;
00167 }
00168
00169 #else
00170
00171 static inline struct task_struct *rthal_root_host_task (int cpuid) {
00172 return current;
00173 }
00174
00175 static inline struct task_struct *rthal_current_host_task (int cpuid) {
00176 return current;
00177 }
00178
00179 #endif
00180
00181 static inline void rthal_timer_program_shot (unsigned long delay)
00182 {
00183 if(!delay) delay = 1;
00184 #ifdef CONFIG_40x
00185 mtspr(SPRN_PIT,delay);
00186 #else
00187 set_dec(delay);
00188 #endif
00189 }
00190
00191
00192
00193
00194
00195 #define RTHAL_SWITCH_FRAME_SIZE 108
00196
00197 void rthal_switch_context(unsigned long *out_kspp,
00198 unsigned long *in_kspp);
00199
00200 #ifdef CONFIG_RTAI_HW_FPU
00201
00202 typedef struct rthal_fpenv {
00203
00204
00205
00206
00207
00208
00209 double fpr[32];
00210 unsigned long fpscr_pad;
00211 unsigned long fpscr;
00212
00213 } rthal_fpenv_t;
00214
00215 void rthal_init_fpu(rthal_fpenv_t *fpuenv);
00216
00217 void rthal_save_fpu(rthal_fpenv_t *fpuenv);
00218
00219 void rthal_restore_fpu(rthal_fpenv_t *fpuenv);
00220
00221 #ifndef CONFIG_SMP
00222 #define rthal_get_fpu_owner(cur) last_task_used_math
00223 #else
00224 #define rthal_get_fpu_owner(cur) ({ \
00225 struct task_struct * _cur = (cur); \
00226 ((_cur->thread.regs && (_cur->thread.regs->msr & MSR_FP)) \
00227 ? _cur : NULL); \
00228 })
00229 #endif
00230
00231 #define rthal_disable_fpu() ({ \
00232 register long _msr; \
00233 __asm__ __volatile__ ( "mfmsr %0" : "=r"(_msr) ); \
00234 __asm__ __volatile__ ( "mtmsr %0" \
00235 : \
00236 : "r"(_msr & ~(MSR_FP)) \
00237 : "memory" ); \
00238 })
00239
00240 #define rthal_enable_fpu() ({ \
00241 register long _msr; \
00242 __asm__ __volatile__ ( "mfmsr %0" : "=r"(_msr) ); \
00243 __asm__ __volatile__ ( "mtmsr %0" \
00244 : \
00245 : "r"(_msr | MSR_FP) \
00246 : "memory" ); \
00247 })
00248
00249 #endif
00250
00251 static const char *const rthal_fault_labels[] = {
00252 [0] = "Data or instruction access",
00253 [1] = "Alignment",
00254 [2] = "Altivec unavailable",
00255 [3] = "Program check exception",
00256 [4] = "Machine check exception",
00257 [5] = "Unknown",
00258 [6] = "Instruction breakpoint",
00259 [7] = "Run mode exception",
00260 [8] = "Single-step exception",
00261 [9] = "Non-recoverable exception",
00262 [10] = "Software emulation",
00263 [11] = "Debug",
00264 [12] = "SPE",
00265 [13] = "Altivec assist",
00266 [14] = NULL
00267 };
00268
00269 #endif
00270
00271 #endif