]> git.proxmox.com Git - mirror_ubuntu-artful-kernel.git/blob - include/asm-sparc/semaphore.h
Merge git://git.infradead.org/~dhowells/irq-2.6
[mirror_ubuntu-artful-kernel.git] / include / asm-sparc / semaphore.h
1 #ifndef _SPARC_SEMAPHORE_H
2 #define _SPARC_SEMAPHORE_H
3
4 /* Dinky, good for nothing, just barely irq safe, Sparc semaphores. */
5
6 #ifdef __KERNEL__
7
8 #include <asm/atomic.h>
9 #include <linux/wait.h>
10 #include <linux/rwsem.h>
11
12 struct semaphore {
13 atomic24_t count;
14 int sleepers;
15 wait_queue_head_t wait;
16 };
17
18 #define __SEMAPHORE_INITIALIZER(name, n) \
19 { \
20 .count = ATOMIC24_INIT(n), \
21 .sleepers = 0, \
22 .wait = __WAIT_QUEUE_HEAD_INITIALIZER((name).wait) \
23 }
24
25 #define __DECLARE_SEMAPHORE_GENERIC(name,count) \
26 struct semaphore name = __SEMAPHORE_INITIALIZER(name,count)
27
28 #define DECLARE_MUTEX(name) __DECLARE_SEMAPHORE_GENERIC(name,1)
29 #define DECLARE_MUTEX_LOCKED(name) __DECLARE_SEMAPHORE_GENERIC(name,0)
30
31 static inline void sema_init (struct semaphore *sem, int val)
32 {
33 atomic24_set(&sem->count, val);
34 sem->sleepers = 0;
35 init_waitqueue_head(&sem->wait);
36 }
37
38 static inline void init_MUTEX (struct semaphore *sem)
39 {
40 sema_init(sem, 1);
41 }
42
43 static inline void init_MUTEX_LOCKED (struct semaphore *sem)
44 {
45 sema_init(sem, 0);
46 }
47
48 extern void __down(struct semaphore * sem);
49 extern int __down_interruptible(struct semaphore * sem);
50 extern int __down_trylock(struct semaphore * sem);
51 extern void __up(struct semaphore * sem);
52
53 static inline void down(struct semaphore * sem)
54 {
55 register volatile int *ptr asm("g1");
56 register int increment asm("g2");
57
58 might_sleep();
59
60 ptr = &(sem->count.counter);
61 increment = 1;
62
63 __asm__ __volatile__(
64 "mov %%o7, %%g4\n\t"
65 "call ___atomic24_sub\n\t"
66 " add %%o7, 8, %%o7\n\t"
67 "tst %%g2\n\t"
68 "bl 2f\n\t"
69 " nop\n"
70 "1:\n\t"
71 ".subsection 2\n"
72 "2:\n\t"
73 "save %%sp, -64, %%sp\n\t"
74 "mov %%g1, %%l1\n\t"
75 "mov %%g5, %%l5\n\t"
76 "call %3\n\t"
77 " mov %%g1, %%o0\n\t"
78 "mov %%l1, %%g1\n\t"
79 "ba 1b\n\t"
80 " restore %%l5, %%g0, %%g5\n\t"
81 ".previous\n"
82 : "=&r" (increment)
83 : "0" (increment), "r" (ptr), "i" (__down)
84 : "g3", "g4", "g7", "memory", "cc");
85 }
86
87 static inline int down_interruptible(struct semaphore * sem)
88 {
89 register volatile int *ptr asm("g1");
90 register int increment asm("g2");
91
92 might_sleep();
93
94 ptr = &(sem->count.counter);
95 increment = 1;
96
97 __asm__ __volatile__(
98 "mov %%o7, %%g4\n\t"
99 "call ___atomic24_sub\n\t"
100 " add %%o7, 8, %%o7\n\t"
101 "tst %%g2\n\t"
102 "bl 2f\n\t"
103 " clr %%g2\n"
104 "1:\n\t"
105 ".subsection 2\n"
106 "2:\n\t"
107 "save %%sp, -64, %%sp\n\t"
108 "mov %%g1, %%l1\n\t"
109 "mov %%g5, %%l5\n\t"
110 "call %3\n\t"
111 " mov %%g1, %%o0\n\t"
112 "mov %%l1, %%g1\n\t"
113 "mov %%l5, %%g5\n\t"
114 "ba 1b\n\t"
115 " restore %%o0, %%g0, %%g2\n\t"
116 ".previous\n"
117 : "=&r" (increment)
118 : "0" (increment), "r" (ptr), "i" (__down_interruptible)
119 : "g3", "g4", "g7", "memory", "cc");
120
121 return increment;
122 }
123
124 static inline int down_trylock(struct semaphore * sem)
125 {
126 register volatile int *ptr asm("g1");
127 register int increment asm("g2");
128
129 ptr = &(sem->count.counter);
130 increment = 1;
131
132 __asm__ __volatile__(
133 "mov %%o7, %%g4\n\t"
134 "call ___atomic24_sub\n\t"
135 " add %%o7, 8, %%o7\n\t"
136 "tst %%g2\n\t"
137 "bl 2f\n\t"
138 " clr %%g2\n"
139 "1:\n\t"
140 ".subsection 2\n"
141 "2:\n\t"
142 "save %%sp, -64, %%sp\n\t"
143 "mov %%g1, %%l1\n\t"
144 "mov %%g5, %%l5\n\t"
145 "call %3\n\t"
146 " mov %%g1, %%o0\n\t"
147 "mov %%l1, %%g1\n\t"
148 "mov %%l5, %%g5\n\t"
149 "ba 1b\n\t"
150 " restore %%o0, %%g0, %%g2\n\t"
151 ".previous\n"
152 : "=&r" (increment)
153 : "0" (increment), "r" (ptr), "i" (__down_trylock)
154 : "g3", "g4", "g7", "memory", "cc");
155
156 return increment;
157 }
158
159 static inline void up(struct semaphore * sem)
160 {
161 register volatile int *ptr asm("g1");
162 register int increment asm("g2");
163
164 ptr = &(sem->count.counter);
165 increment = 1;
166
167 __asm__ __volatile__(
168 "mov %%o7, %%g4\n\t"
169 "call ___atomic24_add\n\t"
170 " add %%o7, 8, %%o7\n\t"
171 "tst %%g2\n\t"
172 "ble 2f\n\t"
173 " nop\n"
174 "1:\n\t"
175 ".subsection 2\n"
176 "2:\n\t"
177 "save %%sp, -64, %%sp\n\t"
178 "mov %%g1, %%l1\n\t"
179 "mov %%g5, %%l5\n\t"
180 "call %3\n\t"
181 " mov %%g1, %%o0\n\t"
182 "mov %%l1, %%g1\n\t"
183 "ba 1b\n\t"
184 " restore %%l5, %%g0, %%g5\n\t"
185 ".previous\n"
186 : "=&r" (increment)
187 : "0" (increment), "r" (ptr), "i" (__up)
188 : "g3", "g4", "g7", "memory", "cc");
189 }
190
191 #endif /* __KERNEL__ */
192
193 #endif /* !(_SPARC_SEMAPHORE_H) */