OpenCores
URL https://opencores.org/ocsvn/or1k/or1k/trunk

Subversion Repositories or1k

[/] [or1k/] [trunk/] [uclinux/] [uClinux-2.0.x/] [include/] [asm-m68knommu/] [system.h] - Blame information for rev 199

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 199 simons
#ifndef _M68K_SYSTEM_H
2
#define _M68K_SYSTEM_H
3
 
4
#include <linux/config.h> /* get configuration macros */
5
#include <linux/linkage.h>
6
#include <asm/segment.h>
7
 
8
#ifdef CONFIG_M68328
9
#include <asm/MC68328.h>
10
#endif
11
 
12
#ifdef CONFIG_M68EZ328
13
#include <asm/MC68EZ328.h>
14
#endif
15
 
16
extern inline unsigned long rdusp(void) {
17
#ifdef CONFIG_COLDFIRE
18
        extern unsigned int     sw_usp;
19
        return(sw_usp);
20
#else
21
        unsigned long usp;
22
        __asm__ __volatile__("move %/usp,%0"
23
                             : "=a" (usp));
24
        return usp;
25
#endif
26
}
27
 
28
extern inline void wrusp(unsigned long usp) {
29
#ifdef CONFIG_COLDFIRE
30
        extern unsigned int     sw_usp;
31
        sw_usp = usp;
32
#else
33
        __asm__ __volatile__("move %0,%/usp"
34
                             :
35
                             : "a" (usp));
36
#endif
37
}
38
 
39
extern inline unsigned long rda5(void) {
40
        unsigned long a5;
41
 
42
        __asm__ __volatile__("movel %/a5,%0"
43
                             : "=a" (a5));
44
        return a5;
45
}
46
 
47
extern inline void wra5(unsigned long a5) {
48
        __asm__ __volatile__("movel %0,%/a5"
49
                             :
50
                             : "a" (a5));
51
}
52
 
53
/*
54
 * switch_to(n) should switch tasks to task ptr, first checking that
55
 * ptr isn't the current task, in which case it does nothing.  This
56
 * also clears the TS-flag if the task we switched to has used the
57
 * math co-processor latest.
58
 */
59
/*
60
 * switch_to() saves the extra registers, that are not saved
61
 * automatically by SAVE_SWITCH_STACK in resume(), ie. d0-d5 and
62
 * a0-a1. Some of these are used by schedule() and its predecessors
63
 * and so we might get see unexpected behaviors when a task returns
64
 * with unexpected register values.
65
 *
66
 * syscall stores these registers itself and none of them are used
67
 * by syscall after the function in the syscall has been called.
68
 *
69
 * Beware that resume now expects *next to be in d1 and the offset of
70
 * tss to be in a1. This saves a few instructions as we no longer have
71
 * to push them onto the stack and read them back right after.
72
 *
73
 * 02/17/96 - Jes Sorensen (jds@kom.auc.dk)
74
 *
75
 * Changed 96/09/19 by Andreas Schwab
76
 * pass prev in a0, next in a1, offset of tss in d1, and whether
77
 * the mm structures are shared in d2 (to avoid atc flushing).
78
 */
79
asmlinkage void resume(void);
80
#define switch_to(prev,next) { \
81
  register void *_prev __asm__ ("a0") = (prev); \
82
  register void *_next __asm__ ("a1") = (next); \
83
  register int _tssoff __asm__ ("d1") = (int)&((struct task_struct *)0)->tss; \
84
  register char _shared __asm__ ("d2") = ((prev)->mm == (next)->mm); \
85
  __asm__ __volatile__("jbsr " SYMBOL_NAME_STR(resume) "\n\t" \
86
                       : : "a" (_prev), "a" (_next), "d" (_tssoff), \
87
                           "d" (_shared) \
88
                       : "d0", "d1", "d2", "d3", "d4", "d5", "a0", "a1"); \
89
}
90
 
91
#define xchg(ptr,x) ((__typeof__(*(ptr)))__xchg((unsigned long)(x),(ptr),sizeof(*(ptr))))
92
#define tas(ptr) (xchg((ptr),1))
93
 
94
struct __xchg_dummy { unsigned long a[100]; };
95
#define __xg(x) ((volatile struct __xchg_dummy *)(x))
96
 
97
#ifdef CONFIG_COLDFIRE
98
#define sti() __asm__ __volatile__ ( \
99
        "move %/sr,%%d0\n\t" \
100
        "andi.l #0xf8ff,%%d0\n\t" \
101
        "move %%d0,%/sr\n" \
102
        : /* no outputs */ \
103
        : \
104
        : "%d0", "memory")
105
#define cli() __asm__ __volatile__ ( \
106
        "move %/sr,%%d0\n\t" \
107
        "ori.l  #0x0700,%%d0\n\t" \
108
        "move %%d0,%/sr\n" \
109
        : /* no inputs */ \
110
        : \
111
        : "%d0", "memory")
112
#else
113
#if defined(CONFIG_ATARI) && !defined(CONFIG_AMIGA) && !defined(CONFIG_MAC)
114
/* block out HSYNC on the atari */
115
#define sti() __asm__ __volatile__ ("andiw #0xfbff,%/sr": : : "memory")
116
#else /* portable version */
117
#define sti() __asm__ __volatile__ ("andiw #0xf8ff,%/sr": : : "memory")
118
#endif /* machine compilation types */ 
119
#define cli() __asm__ __volatile__ ("oriw  #0x0700,%/sr": : : "memory")
120
#endif
121
 
122
#define nop() __asm__ __volatile__ ("nop"::)
123
#define mb()  __asm__ __volatile__ (""   : : :"memory")
124
 
125
#define save_flags(x) \
126
__asm__ __volatile__("movew %/sr,%0":"=d" (x) : /* no input */ :"memory")
127
 
128
#define restore_flags(x) \
129
__asm__ __volatile__("movew %0,%/sr": /* no outputs */ :"d" (x) : "memory")
130
 
131
#define iret() __asm__ __volatile__ ("rte": : :"memory", "sp", "cc")
132
 
133
#ifndef CONFIG_RMW_INSNS
134
static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int size)
135
{
136
  unsigned long tmp, flags;
137
 
138
  save_flags(flags);
139
  cli();
140
 
141
  switch (size) {
142
  case 1:
143
    __asm__ __volatile__
144
    ("moveb %2,%0\n\t"
145
     "moveb %1,%2"
146
    : "=&d" (tmp) : "d" (x), "m" (*__xg(ptr)) : "memory");
147
    break;
148
  case 2:
149
    __asm__ __volatile__
150
    ("movew %2,%0\n\t"
151
     "movew %1,%2"
152
    : "=&d" (tmp) : "d" (x), "m" (*__xg(ptr)) : "memory");
153
    break;
154
  case 4:
155
    __asm__ __volatile__
156
    ("movel %2,%0\n\t"
157
     "movel %1,%2"
158
    : "=&d" (tmp) : "d" (x), "m" (*__xg(ptr)) : "memory");
159
    break;
160
  }
161
  restore_flags(flags);
162
  return tmp;
163
}
164
#else
165
static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int size)
166
{
167
        switch (size) {
168
            case 1:
169
                __asm__ __volatile__
170
                        ("moveb %2,%0\n\t"
171
                         "1:\n\t"
172
                         "casb %0,%1,%2\n\t"
173
                         "jne 1b"
174
                         : "=&d" (x) : "d" (x), "m" (*__xg(ptr)) : "memory");
175
                break;
176
            case 2:
177
                __asm__ __volatile__
178
                        ("movew %2,%0\n\t"
179
                         "1:\n\t"
180
                         "casw %0,%1,%2\n\t"
181
                         "jne 1b"
182
                         : "=&d" (x) : "d" (x), "m" (*__xg(ptr)) : "memory");
183
                break;
184
            case 4:
185
                __asm__ __volatile__
186
                        ("movel %2,%0\n\t"
187
                         "1:\n\t"
188
                         "casl %0,%1,%2\n\t"
189
                         "jne 1b"
190
                         : "=&d" (x) : "d" (x), "m" (*__xg(ptr)) : "memory");
191
                break;
192
        }
193
        return x;
194
}
195
#endif
196
 
197
#ifdef CONFIG_M68332
198
#define HARD_RESET_NOW() ({             \
199
        cli();                          \
200
        asm("                           \
201
        movew   #0x0000, 0xfffa6a;      \
202
        reset;                          \
203
        /*movew #0x1557, 0xfffa44;*/    \
204
        /*movew #0x0155, 0xfffa46;*/    \
205
        moveal #0, %a0;                 \
206
        movec %a0, %vbr;                \
207
        moveal 0, %sp;                  \
208
        moveal 4, %a0;                  \
209
        jmp (%a0);                      \
210
        ");                             \
211
})
212
#endif
213
 
214
#if defined( CONFIG_M68328 ) || defined( CONFIG_M68EZ328 )
215
#define HARD_RESET_NOW() ({             \
216
        cli();                          \
217
        asm("                           \
218
        moveal #0x10c00000, %a0;        \
219
        moveb #0, 0xFFFFF300;           \
220
        moveal 0(%a0), %sp;             \
221
        moveal 4(%a0), %a0;             \
222
        jmp (%a0);                      \
223
        ");                             \
224
})
225
#endif
226
 
227
#ifdef CONFIG_COLDFIRE
228
#if defined(CONFIG_NETtel) || defined(CONFIG_eLIA) || defined(CONFIG_MATtel)
229
#define HARD_RESET_NOW() ({             \
230
        asm("                           \
231
        movew #0x2700, %sr;             \
232
        moveal #0x10000044, %a0;        \
233
        movel #0xffffffff, (%a0);       \
234
        moveal #0x10000001, %a0;        \
235
        moveb #0x00, (%a0);             \
236
        moveal #0xf0000004, %a0;        \
237
        moveal (%a0), %a0;              \
238
        jmp (%a0);                      \
239
        ");                             \
240
})
241
#else
242
#define HARD_RESET_NOW() ({             \
243
        asm("                           \
244
        movew #0x2700, %sr;             \
245
        moveal #0x4, %a0;               \
246
        moveal (%a0), %a0;              \
247
        jmp (%a0);                      \
248
        ");                             \
249
})
250
#endif
251
#endif
252
 
253
#endif /* _M68K_SYSTEM_H */

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.