OpenCores
URL https://opencores.org/ocsvn/or1k/or1k/trunk

Subversion Repositories or1k

[/] [or1k/] [tags/] [LINUX_2_4_26_OR32/] [linux/] [linux-2.4/] [include/] [asm-sparc64/] [rwsem.h] - Blame information for rev 1279

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 1276 phoenix
/* $Id: rwsem.h,v 1.1.1.1 2004-04-15 03:01:03 phoenix Exp $
2
 * rwsem.h: R/W semaphores implemented using CAS
3
 *
4
 * Written by David S. Miller (davem@redhat.com), 2001.
5
 * Derived from asm-i386/rwsem.h
6
 */
7
#ifndef _SPARC64_RWSEM_H
8
#define _SPARC64_RWSEM_H
9
 
10
#ifndef _LINUX_RWSEM_H
11
#error please dont include asm/rwsem.h directly, use linux/rwsem.h instead
12
#endif
13
 
14
#ifdef __KERNEL__
15
 
16
#include <linux/list.h>
17
#include <linux/spinlock.h>
18
 
19
struct rwsem_waiter;
20
 
21
extern struct rw_semaphore *FASTCALL(rwsem_down_read_failed(struct rw_semaphore *sem));
22
extern struct rw_semaphore *FASTCALL(rwsem_down_write_failed(struct rw_semaphore *sem));
23
extern struct rw_semaphore *FASTCALL(rwsem_wake(struct rw_semaphore *));
24
 
25
struct rw_semaphore {
26
        signed int count;
27
#define RWSEM_UNLOCKED_VALUE            0x00000000
28
#define RWSEM_ACTIVE_BIAS               0x00000001
29
#define RWSEM_ACTIVE_MASK               0x0000ffff
30
#define RWSEM_WAITING_BIAS              0xffff0000
31
#define RWSEM_ACTIVE_READ_BIAS          RWSEM_ACTIVE_BIAS
32
#define RWSEM_ACTIVE_WRITE_BIAS         (RWSEM_WAITING_BIAS + RWSEM_ACTIVE_BIAS)
33
        spinlock_t              wait_lock;
34
        struct list_head        wait_list;
35
};
36
 
37
#define __RWSEM_INITIALIZER(name) \
38
{ RWSEM_UNLOCKED_VALUE, SPIN_LOCK_UNLOCKED, LIST_HEAD_INIT((name).wait_list) }
39
 
40
#define DECLARE_RWSEM(name) \
41
        struct rw_semaphore name = __RWSEM_INITIALIZER(name)
42
 
43
static inline void init_rwsem(struct rw_semaphore *sem)
44
{
45
        sem->count = RWSEM_UNLOCKED_VALUE;
46
        spin_lock_init(&sem->wait_lock);
47
        INIT_LIST_HEAD(&sem->wait_list);
48
}
49
 
50
static inline void __down_read(struct rw_semaphore *sem)
51
{
52
        __asm__ __volatile__(
53
                "! beginning __down_read\n"
54
                "1:\tlduw       [%0], %%g5\n\t"
55
                "add            %%g5, 1, %%g7\n\t"
56
                "cas            [%0], %%g5, %%g7\n\t"
57
                "cmp            %%g5, %%g7\n\t"
58
                "bne,pn         %%icc, 1b\n\t"
59
                " add           %%g7, 1, %%g7\n\t"
60
                "cmp            %%g7, 0\n\t"
61
                "bl,pn          %%icc, 3f\n\t"
62
                " membar        #StoreLoad | #StoreStore\n"
63
                "2:\n\t"
64
                ".subsection    2\n"
65
                "3:\tmov        %0, %%g5\n\t"
66
                "save           %%sp, -160, %%sp\n\t"
67
                "mov            %%g1, %%l1\n\t"
68
                "mov            %%g2, %%l2\n\t"
69
                "mov            %%g3, %%l3\n\t"
70
                "call           %1\n\t"
71
                " mov           %%g5, %%o0\n\t"
72
                "mov            %%l1, %%g1\n\t"
73
                "mov            %%l2, %%g2\n\t"
74
                "ba,pt          %%xcc, 2b\n\t"
75
                " restore       %%l3, %%g0, %%g3\n\t"
76
                ".previous\n\t"
77
                "! ending __down_read"
78
                : : "r" (sem), "i" (rwsem_down_read_failed)
79
                : "g5", "g7", "memory", "cc");
80
}
81
 
82
static __inline__ int __down_read_trylock(struct rw_semaphore *sem)
83
{
84
        int result;
85
 
86
        __asm__ __volatile__(
87
                "! beginning __down_read_trylock\n"
88
                "1:\tlduw       [%1], %%g5\n\t"
89
                "add            %%g5, 1, %%g7\n\t"
90
                "cmp            %%g7, 0\n\t"
91
                "bl,pn          %%icc, 2f\n\t"
92
                " mov           0, %0\n\t"
93
                "cas            [%1], %%g5, %%g7\n\t"
94
                "cmp            %%g5, %%g7\n\t"
95
                "bne,pn         %%icc, 1b\n\t"
96
                " mov           1, %0\n\t"
97
                "membar         #StoreLoad | #StoreStore\n"
98
                "2:\n\t"
99
                "! ending __down_read_trylock"
100
                : "=&r" (result)
101
                : "r" (sem)
102
                : "g5", "g7", "memory", "cc");
103
 
104
        return result;
105
}
106
 
107
static inline void __down_write(struct rw_semaphore *sem)
108
{
109
        __asm__ __volatile__(
110
                "! beginning __down_write\n\t"
111
                "sethi          %%hi(%2), %%g1\n\t"
112
                "or             %%g1, %%lo(%2), %%g1\n"
113
                "1:\tlduw       [%0], %%g5\n\t"
114
                "add            %%g5, %%g1, %%g7\n\t"
115
                "cas            [%0], %%g5, %%g7\n\t"
116
                "cmp            %%g5, %%g7\n\t"
117
                "bne,pn         %%icc, 1b\n\t"
118
                " cmp           %%g7, 0\n\t"
119
                "bne,pn         %%icc, 3f\n\t"
120
                " membar        #StoreLoad | #StoreStore\n"
121
                "2:\n\t"
122
                ".subsection    2\n"
123
                "3:\tmov        %0, %%g5\n\t"
124
                "save           %%sp, -160, %%sp\n\t"
125
                "mov            %%g2, %%l2\n\t"
126
                "mov            %%g3, %%l3\n\t"
127
                "call           %1\n\t"
128
                " mov           %%g5, %%o0\n\t"
129
                "mov            %%l2, %%g2\n\t"
130
                "ba,pt          %%xcc, 2b\n\t"
131
                " restore       %%l3, %%g0, %%g3\n\t"
132
                ".previous\n\t"
133
                "! ending __down_write"
134
                : : "r" (sem), "i" (rwsem_down_write_failed),
135
                    "i" (RWSEM_ACTIVE_WRITE_BIAS)
136
                : "g1", "g5", "g7", "memory", "cc");
137
}
138
 
139
static __inline__ int __down_write_trylock(struct rw_semaphore *sem)
140
{
141
        int result;
142
 
143
        __asm__ __volatile__(
144
                "! beginning __down_write_trylock\n\t"
145
                "sethi          %%hi(%2), %%g1\n\t"
146
                "or             %%g1, %%lo(%2), %%g1\n"
147
                "1:\tlduw       [%1], %%g5\n\t"
148
                "cmp            %%g5, 0\n\t"
149
                "bne,pn         %%icc, 2f\n\t"
150
                " mov           0, %0\n\t"
151
                "add            %%g5, %%g1, %%g7\n\t"
152
                "cas            [%1], %%g5, %%g7\n\t"
153
                "cmp            %%g5, %%g7\n\t"
154
                "bne,pn         %%icc, 1b\n\t"
155
                " mov           1, %0\n\t"
156
                "membar         #StoreLoad | #StoreStore\n"
157
                "2:\n\t"
158
                "! ending __down_write_trylock"
159
                : "=&r" (result)
160
                : "r" (sem), "i" (RWSEM_ACTIVE_WRITE_BIAS)
161
                : "g1", "g5", "g7", "memory", "cc");
162
 
163
        return result;
164
}
165
 
166
static inline void __up_read(struct rw_semaphore *sem)
167
{
168
        __asm__ __volatile__(
169
                "! beginning __up_read\n\t"
170
                "1:\tlduw       [%0], %%g5\n\t"
171
                "sub            %%g5, 1, %%g7\n\t"
172
                "cas            [%0], %%g5, %%g7\n\t"
173
                "cmp            %%g5, %%g7\n\t"
174
                "bne,pn         %%icc, 1b\n\t"
175
                " cmp           %%g7, 0\n\t"
176
                "bl,pn          %%icc, 3f\n\t"
177
                " membar        #StoreLoad | #StoreStore\n"
178
                "2:\n\t"
179
                ".subsection    2\n"
180
                "3:\tsethi      %%hi(%2), %%g1\n\t"
181
                "sub            %%g7, 1, %%g7\n\t"
182
                "or             %%g1, %%lo(%2), %%g1\n\t"
183
                "andcc          %%g7, %%g1, %%g0\n\t"
184
                "bne,pn         %%icc, 2b\n\t"
185
                " mov           %0, %%g5\n\t"
186
                "save           %%sp, -160, %%sp\n\t"
187
                "mov            %%g2, %%l2\n\t"
188
                "mov            %%g3, %%l3\n\t"
189
                "call           %1\n\t"
190
                " mov           %%g5, %%o0\n\t"
191
                "mov            %%l2, %%g2\n\t"
192
                "ba,pt          %%xcc, 2b\n\t"
193
                " restore       %%l3, %%g0, %%g3\n\t"
194
                ".previous\n\t"
195
                "! ending __up_read"
196
                : : "r" (sem), "i" (rwsem_wake),
197
                    "i" (RWSEM_ACTIVE_MASK)
198
                : "g1", "g5", "g7", "memory", "cc");
199
}
200
 
201
static inline void __up_write(struct rw_semaphore *sem)
202
{
203
        __asm__ __volatile__(
204
                "! beginning __up_write\n\t"
205
                "sethi          %%hi(%2), %%g1\n\t"
206
                "or             %%g1, %%lo(%2), %%g1\n"
207
                "1:\tlduw       [%0], %%g5\n\t"
208
                "sub            %%g5, %%g1, %%g7\n\t"
209
                "cas            [%0], %%g5, %%g7\n\t"
210
                "cmp            %%g5, %%g7\n\t"
211
                "bne,pn         %%icc, 1b\n\t"
212
                " sub           %%g7, %%g1, %%g7\n\t"
213
                "cmp            %%g7, 0\n\t"
214
                "bl,pn          %%icc, 3f\n\t"
215
                " membar        #StoreLoad | #StoreStore\n"
216
                "2:\n\t"
217
                ".subsection 2\n"
218
                "3:\tmov        %0, %%g5\n\t"
219
                "save           %%sp, -160, %%sp\n\t"
220
                "mov            %%g2, %%l2\n\t"
221
                "mov            %%g3, %%l3\n\t"
222
                "call           %1\n\t"
223
                " mov           %%g5, %%o0\n\t"
224
                "mov            %%l2, %%g2\n\t"
225
                "ba,pt          %%xcc, 2b\n\t"
226
                " restore       %%l3, %%g0, %%g3\n\t"
227
                ".previous\n\t"
228
                "! ending __up_write"
229
                : : "r" (sem), "i" (rwsem_wake),
230
                    "i" (RWSEM_ACTIVE_WRITE_BIAS)
231
                : "g1", "g5", "g7", "memory", "cc");
232
}
233
 
234
static inline int rwsem_atomic_update(int delta, struct rw_semaphore *sem)
235
{
236
        int tmp = delta;
237
 
238
        __asm__ __volatile__(
239
                "1:\tlduw       [%2], %%g5\n\t"
240
                "add            %%g5, %1, %%g7\n\t"
241
                "cas            [%2], %%g5, %%g7\n\t"
242
                "cmp            %%g5, %%g7\n\t"
243
                "bne,pn         %%icc, 1b\n\t"
244
                " membar        #StoreLoad | #StoreStore\n\t"
245
                "mov            %%g7, %0\n\t"
246
                : "=&r" (tmp)
247
                : "0" (tmp), "r" (sem)
248
                : "g5", "g7", "memory");
249
 
250
        return tmp + delta;
251
}
252
 
253
#define rwsem_atomic_add rwsem_atomic_update
254
 
255
static inline __u16 rwsem_cmpxchgw(struct rw_semaphore *sem, __u16 __old, __u16 __new)
256
{
257
        u32 old = (sem->count & 0xffff0000) | (u32) __old;
258
        u32 new = (old & 0xffff0000) | (u32) __new;
259
        u32 prev;
260
 
261
again:
262
        __asm__ __volatile__("cas       [%2], %3, %0\n\t"
263
                             "membar    #StoreLoad | #StoreStore"
264
                             : "=&r" (prev)
265
                             : "0" (new), "r" (sem), "r" (old)
266
                             : "memory");
267
 
268
        /* To give the same semantics as x86 cmpxchgw, keep trying
269
         * if only the upper 16-bits changed.
270
         */
271
        if (prev != old &&
272
            ((prev & 0xffff) == (old & 0xffff)))
273
                goto again;
274
 
275
        return prev & 0xffff;
276
}
277
 
278
static inline signed long rwsem_cmpxchg(struct rw_semaphore *sem, signed long old, signed long new)
279
{
280
        return cmpxchg(&sem->count,old,new);
281
}
282
 
283
#endif /* __KERNEL__ */
284
 
285
#endif /* _SPARC64_RWSEM_H */

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.