]> git.proxmox.com Git - mirror_edk2.git/blame - StdLib/Include/Arm/machine/lock.h
* StdLib.dsc was changed to always build the sockets code.
[mirror_edk2.git] / StdLib / Include / Arm / machine / lock.h
CommitLineData
2aa62f2b 1/* $NetBSD: lock.h,v 1.7 2005/12/28 19:09:29 perry Exp $ */\r
2\r
3/*-\r
4 * Copyright (c) 2000, 2001 The NetBSD Foundation, Inc.\r
5 * All rights reserved.\r
6 *\r
7 * This code is derived from software contributed to The NetBSD Foundation\r
8 * by Jason R. Thorpe.\r
9 *\r
10 * Redistribution and use in source and binary forms, with or without\r
11 * modification, are permitted provided that the following conditions\r
12 * are met:\r
13 * 1. Redistributions of source code must retain the above copyright\r
14 * notice, this list of conditions and the following disclaimer.\r
15 * 2. Redistributions in binary form must reproduce the above copyright\r
16 * notice, this list of conditions and the following disclaimer in the\r
17 * documentation and/or other materials provided with the distribution.\r
18 * 3. All advertising materials mentioning features or use of this software\r
19 * must display the following acknowledgement:\r
20 * This product includes software developed by the NetBSD\r
21 * Foundation, Inc. and its contributors.\r
22 * 4. Neither the name of The NetBSD Foundation nor the names of its\r
23 * contributors may be used to endorse or promote products derived\r
24 * from this software without specific prior written permission.\r
25 *\r
26 * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS\r
27 * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED\r
28 * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR\r
29 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS\r
30 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR\r
31 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF\r
32 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS\r
33 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN\r
34 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)\r
35 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE\r
36 * POSSIBILITY OF SUCH DAMAGE.\r
37 */\r
38\r
39/*\r
40 * Machine-dependent spin lock operations.\r
41 *\r
42 * NOTE: The SWP insn used here is available only on ARM architecture\r
43 * version 3 and later (as well as 2a). What we are going to do is\r
44 * expect that the kernel will trap and emulate the insn. That will\r
45 * be slow, but give us the atomicity that we need.\r
46 */\r
47\r
48#ifndef _ARM_LOCK_H_\r
49#define _ARM_LOCK_H_\r
50\r
51static __inline int\r
52__swp(int __val, volatile int *__ptr)\r
53{\r
54\r
55 __asm volatile("swp %0, %1, [%2]"\r
56 : "=r" (__val) : "r" (__val), "r" (__ptr) : "memory");\r
57 return __val;\r
58}\r
59\r
60static __inline void __attribute__((__unused__))\r
61__cpu_simple_lock_init(__cpu_simple_lock_t *alp)\r
62{\r
63\r
64 *alp = __SIMPLELOCK_UNLOCKED;\r
65}\r
66\r
67static __inline void __attribute__((__unused__))\r
68__cpu_simple_lock(__cpu_simple_lock_t *alp)\r
69{\r
70\r
71 while (__swp(__SIMPLELOCK_LOCKED, alp) != __SIMPLELOCK_UNLOCKED)\r
72 continue;\r
73}\r
74\r
75static __inline int __attribute__((__unused__))\r
76__cpu_simple_lock_try(__cpu_simple_lock_t *alp)\r
77{\r
78\r
79 return (__swp(__SIMPLELOCK_LOCKED, alp) == __SIMPLELOCK_UNLOCKED);\r
80}\r
81\r
82static __inline void __attribute__((__unused__))\r
83__cpu_simple_unlock(__cpu_simple_lock_t *alp)\r
84{\r
85\r
86 *alp = __SIMPLELOCK_UNLOCKED;\r
87}\r
88\r
89#endif /* _ARM_LOCK_H_ */\r