OpenCores
URL https://opencores.org/ocsvn/or1k/or1k/trunk

Subversion Repositories or1k

[/] [or1k/] [tags/] [LINUX_2_4_26_OR32/] [linux/] [linux-2.4/] [include/] [asm-sh/] [system.h] - Blame information for rev 1765

Details | Compare with Previous | View Log

Line No. Rev Author Line
1 1275 phoenix
#ifndef __ASM_SH_SYSTEM_H
2
#define __ASM_SH_SYSTEM_H
3
 
4
/*
5
 * Copyright (C) 1999, 2000  Niibe Yutaka  &  Kaz Kojima
6
 */
7
 
8
#include <linux/config.h>
9
#include <linux/kernel.h>
10
 
11
/*
12
 *      switch_to() should switch tasks to task nr n, first
13
 */
14
 
15
typedef struct {
16
        unsigned long seg;
17
} mm_segment_t;
18
 
19
#ifdef CONFIG_SMP
20
#error no SMP SuperH
21
#else
22
#define prepare_to_switch()     do { } while(0)
23
#define switch_to(prev,next,last) do { \
24
 register struct task_struct *__last; \
25
 register unsigned long *__ts1 __asm__ ("r1") = &prev->thread.sp; \
26
 register unsigned long *__ts2 __asm__ ("r2") = &prev->thread.pc; \
27
 register unsigned long *__ts4 __asm__ ("r4") = (unsigned long *)prev; \
28
 register unsigned long *__ts5 __asm__ ("r5") = (unsigned long *)next; \
29
 register unsigned long *__ts6 __asm__ ("r6") = &next->thread.sp; \
30
 register unsigned long __ts7 __asm__ ("r7") = next->thread.pc; \
31
 __asm__ __volatile__ (".balign 4\n\t" \
32
                       "stc.l   gbr, @-r15\n\t" \
33
                       "sts.l   pr, @-r15\n\t" \
34
                       "mov.l   r8, @-r15\n\t" \
35
                       "mov.l   r9, @-r15\n\t" \
36
                       "mov.l   r10, @-r15\n\t" \
37
                       "mov.l   r11, @-r15\n\t" \
38
                       "mov.l   r12, @-r15\n\t" \
39
                       "mov.l   r13, @-r15\n\t" \
40
                       "mov.l   r14, @-r15\n\t" \
41
                       "mov.l   r15, @r1        ! save SP\n\t" \
42
                       "mov.l   @r6, r15        ! change to new stack\n\t" \
43
                       "mov.l   %0, @-r15       ! push R0 onto new stack\n\t" \
44
                       "mova    1f, %0\n\t" \
45
                       "mov.l   %0, @r2 ! save PC\n\t" \
46
                       "mov.l   2f, %0\n\t" \
47
                       "jmp     @%0             ! call __switch_to\n\t" \
48
                       " lds    r7, pr  !  with return to new PC\n\t" \
49
                       ".balign 4\n"    \
50
                       "2:\n\t" \
51
                       ".long   " "__switch_to\n" \
52
                       "1:\n\t" \
53
                       "mov.l   @r15+, %0       ! pop R0 from new stack\n\t" \
54
                       "mov.l   @r15+, r14\n\t" \
55
                       "mov.l   @r15+, r13\n\t" \
56
                       "mov.l   @r15+, r12\n\t" \
57
                       "mov.l   @r15+, r11\n\t" \
58
                       "mov.l   @r15+, r10\n\t" \
59
                       "mov.l   @r15+, r9\n\t" \
60
                       "mov.l   @r15+, r8\n\t" \
61
                       "lds.l   @r15+, pr\n\t" \
62
                       "ldc.l   @r15+, gbr\n\t" \
63
                       :"=&z" (__last) \
64
                       :"0" (prev), \
65
                        "r" (__ts1), "r" (__ts2), \
66
                        "r" (__ts4), "r" (__ts5), "r" (__ts6), "r" (__ts7) \
67
                       :"r3", "t"); \
68
  last = __last; \
69
} while (0)
70
#endif
71
 
72
#define nop() __asm__ __volatile__ ("nop")
73
 
74
 
75
#define xchg(ptr,x) ((__typeof__(*(ptr)))__xchg((unsigned long)(x),(ptr),sizeof(*(ptr))))
76
 
77
static __inline__ unsigned long tas(volatile int *m)
78
{ /* #define tas(ptr) (xchg((ptr),1)) */
79
        unsigned long retval;
80
 
81
        __asm__ __volatile__ ("tas.b    @%1\n\t"
82
                              "movt     %0"
83
                              : "=r" (retval): "r" (m): "t", "memory");
84
        return retval;
85
}
86
 
87
extern void __xchg_called_with_bad_pointer(void);
88
 
89
#define mb()    __asm__ __volatile__ ("": : :"memory")
90
#define rmb()   mb()
91
#define wmb()   __asm__ __volatile__ ("": : :"memory")
92
 
93
#ifdef CONFIG_SMP
94
#define smp_mb()        mb()
95
#define smp_rmb()       rmb()
96
#define smp_wmb()       wmb()
97
#else
98
#define smp_mb()        barrier()
99
#define smp_rmb()       barrier()
100
#define smp_wmb()       barrier()
101
#endif
102
 
103
#define set_mb(var, value)  do { var = value; mb(); } while (0)
104
#define set_wmb(var, value) do { var = value; wmb(); } while (0)
105
 
106
/* Interrupt Control */
107
static __inline__ void __sti(void)
108
{
109
        unsigned long __dummy0, __dummy1;
110
 
111
        __asm__ __volatile__("stc       sr, %0\n\t"
112
                             "and       %1, %0\n\t"
113
                             "stc       r6_bank, %1\n\t"
114
                             "or        %1, %0\n\t"
115
                             "ldc       %0, sr"
116
                             : "=&r" (__dummy0), "=r" (__dummy1)
117
                             : "1" (~0x000000f0)
118
                             : "memory");
119
}
120
 
121
static __inline__ void __cli(void)
122
{
123
        unsigned long __dummy;
124
        __asm__ __volatile__("stc       sr, %0\n\t"
125
                             "or        #0xf0, %0\n\t"
126
                             "ldc       %0, sr"
127
                             : "=&z" (__dummy)
128
                             : /* no inputs */
129
                             : "memory");
130
}
131
 
132
#define __save_flags(x) \
133
        __asm__ __volatile__("stc sr, %0\n\t" \
134
                             "and #0xf0, %0"  \
135
                             : "=&z" (x) :/**/: "memory" )
136
 
137
static __inline__ unsigned long __save_and_cli(void)
138
{
139
        unsigned long flags, __dummy;
140
 
141
        __asm__ __volatile__("stc       sr, %1\n\t"
142
                             "mov       %1, %0\n\t"
143
                             "or        #0xf0, %0\n\t"
144
                             "ldc       %0, sr\n\t"
145
                             "mov       %1, %0\n\t"
146
                             "and       #0xf0, %0"
147
                             : "=&z" (flags), "=&r" (__dummy)
148
                             :/**/
149
                             : "memory" );
150
        return flags;
151
}
152
 
153
#ifdef DEBUG_CLI_STI
154
static __inline__ void  __restore_flags(unsigned long x)
155
{
156
        if ((x & 0x000000f0) != 0x000000f0)
157
                __sti();
158
        else {
159
                unsigned long flags;
160
                __save_flags(flags);
161
 
162
                if (flags == 0) {
163
                        extern void dump_stack(void);
164
                        printk(KERN_ERR "BUG!\n");
165
                        dump_stack();
166
                        __cli();
167
                }
168
        }
169
}
170
#else
171
#define __restore_flags(x) do {                         \
172
        if ((x & 0x000000f0) != 0x000000f0)             \
173
                __sti();                                \
174
} while (0)
175
#endif
176
 
177
#define really_restore_flags(x) do {                    \
178
        if ((x & 0x000000f0) != 0x000000f0)             \
179
                __sti();                                \
180
        else                                            \
181
                __cli();                                \
182
} while (0)
183
 
184
/*
185
 * Jump to P2 area.
186
 * When handling TLB or caches, we need to do it from P2 area.
187
 */
188
#define jump_to_P2()                    \
189
do {                                    \
190
        unsigned long __dummy;          \
191
        __asm__ __volatile__(           \
192
                "mov.l  1f, %0\n\t"     \
193
                "or     %1, %0\n\t"     \
194
                "jmp    @%0\n\t"        \
195
                " nop\n\t"              \
196
                ".balign 4\n"           \
197
                "1:     .long 2f\n"     \
198
                "2:"                    \
199
                : "=&r" (__dummy)       \
200
                : "r" (0x20000000));    \
201
} while (0)
202
 
203
/*
204
 * Back to P1 area.
205
 */
206
#define back_to_P1()                                    \
207
do {                                                    \
208
        unsigned long __dummy;                          \
209
        __asm__ __volatile__(                           \
210
                "nop;nop;nop;nop;nop;nop;nop\n\t"       \
211
                "mov.l  1f, %0\n\t"                     \
212
                "jmp    @%0\n\t"                        \
213
                " nop\n\t"                              \
214
                ".balign 4\n"                           \
215
                "1:     .long 2f\n"                     \
216
                "2:"                                    \
217
                : "=&r" (__dummy));                     \
218
} while (0)
219
 
220
#define __save_and_sti(x)       do { __save_flags(x); __sti(); } while(0);
221
 
222
/* For spinlocks etc */
223
#define local_irq_save(x)       x = __save_and_cli()
224
#define local_irq_set(x)        __save_and_sti(x)
225
#define local_irq_restore(x)    __restore_flags(x)
226
#define local_irq_disable()     __cli()
227
#define local_irq_enable()      __sti()
228
 
229
#ifdef CONFIG_SMP
230
 
231
extern void __global_cli(void);
232
extern void __global_sti(void);
233
extern unsigned long __global_save_flags(void);
234
extern void __global_restore_flags(unsigned long);
235
#define cli() __global_cli()
236
#define sti() __global_sti()
237
#define save_flags(x) ((x)=__global_save_flags())
238
#define restore_flags(x) __global_restore_flags(x)
239
#define save_and_sti(x) do { save_flags(x); sti(); } while(0);
240
#else
241
 
242
#define cli() __cli()
243
#define sti() __sti()
244
#define save_flags(x) __save_flags(x)
245
#define save_and_cli(x) x = __save_and_cli()
246
#define save_and_sti(x) __save_and_sti(x)
247
#define restore_flags(x) __restore_flags(x)
248
 
249
#endif
250
 
251
static __inline__ unsigned long xchg_u32(volatile int * m, unsigned long val)
252
{
253
        unsigned long flags, retval;
254
 
255
        save_and_cli(flags);
256
        retval = *m;
257
        *m = val;
258
        restore_flags(flags);
259
        return retval;
260
}
261
 
262
static __inline__ unsigned long xchg_u8(volatile unsigned char * m, unsigned long val)
263
{
264
        unsigned long flags, retval;
265
 
266
        save_and_cli(flags);
267
        retval = *m;
268
        *m = val & 0xff;
269
        restore_flags(flags);
270
        return retval;
271
}
272
 
273
static __inline__ unsigned long __xchg(unsigned long x, volatile void * ptr, int size)
274
{
275
        switch (size) {
276
        case 4:
277
                return xchg_u32(ptr, x);
278
                break;
279
        case 1:
280
                return xchg_u8(ptr, x);
281
                break;
282
        }
283
        __xchg_called_with_bad_pointer();
284
        return x;
285
}
286
 
287
/* XXX
288
 * disable hlt during certain critical i/o operations
289
 */
290
#define HAVE_DISABLE_HLT
291
void disable_hlt(void);
292
void enable_hlt(void);
293
 
294
#endif

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.