]> git.proxmox.com Git - mirror_spl-debian.git/blob - include/sys/atomic.h
Go through and add a header with the proper UCRL number.
[mirror_spl-debian.git] / include / sys / atomic.h
1 /*
2 * This file is part of the SPL: Solaris Porting Layer.
3 *
4 * Copyright (c) 2008 Lawrence Livermore National Security, LLC.
5 * Produced at Lawrence Livermore National Laboratory
6 * Written by:
7 * Brian Behlendorf <behlendorf1@llnl.gov>,
8 * Herb Wartens <wartens2@llnl.gov>,
9 * Jim Garlick <garlick@llnl.gov>
10 * UCRL-CODE-235197
11 *
12 * This is free software; you can redistribute it and/or modify it
13 * under the terms of the GNU General Public License as published by
14 * the Free Software Foundation; either version 2 of the License, or
15 * (at your option) any later version.
16 *
17 * This is distributed in the hope that it will be useful, but WITHOUT
18 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
19 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
20 * for more details.
21 *
22 * You should have received a copy of the GNU General Public License along
23 * with this program; if not, write to the Free Software Foundation, Inc.,
24 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
25 */
26
27 #ifndef _SPL_ATOMIC_H
28 #define _SPL_ATOMIC_H
29
30 #ifdef __cplusplus
31 extern "C" {
32 #endif
33
34 #include <linux/module.h>
35 #include <linux/spinlock.h>
36
37 /* XXX: Serialize everything through global locks. This is
38 * going to be bad for performance, but for now it's the easiest
39 * way to ensure correct behavior. I don't like it at all.
40 * It would be nicer to make these function to the atomic linux
41 * functions, but the normal uint64_t type complicates this.
42 */
43 extern spinlock_t atomic64_lock;
44 extern spinlock_t atomic32_lock;
45
46 static __inline__ uint32_t
47 atomic_add_32(volatile uint32_t *target, int32_t delta)
48 {
49 uint32_t rc;
50
51 spin_lock(&atomic32_lock);
52 rc = *target;
53 *target += delta;
54 spin_unlock(&atomic32_lock);
55
56 return rc;
57 }
58
59 static __inline__ void
60 atomic_inc_64(volatile uint64_t *target)
61 {
62 spin_lock(&atomic64_lock);
63 (*target)++;
64 spin_unlock(&atomic64_lock);
65 }
66
67 static __inline__ void
68 atomic_dec_64(volatile uint64_t *target)
69 {
70 spin_lock(&atomic64_lock);
71 (*target)--;
72 spin_unlock(&atomic64_lock);
73 }
74
75 static __inline__ uint64_t
76 atomic_add_64(volatile uint64_t *target, uint64_t delta)
77 {
78 uint64_t rc;
79
80 spin_lock(&atomic64_lock);
81 rc = *target;
82 *target += delta;
83 spin_unlock(&atomic64_lock);
84
85 return rc;
86 }
87
88 static __inline__ uint64_t
89 atomic_sub_64(volatile uint64_t *target, uint64_t delta)
90 {
91 uint64_t rc;
92
93 spin_lock(&atomic64_lock);
94 rc = *target;
95 *target -= delta;
96 spin_unlock(&atomic64_lock);
97
98 return rc;
99 }
100
101 static __inline__ uint64_t
102 atomic_add_64_nv(volatile uint64_t *target, uint64_t delta)
103 {
104 spin_lock(&atomic64_lock);
105 *target += delta;
106 spin_unlock(&atomic64_lock);
107
108 return *target;
109 }
110
111 static __inline__ uint64_t
112 atomic_sub_64_nv(volatile uint64_t *target, uint64_t delta)
113 {
114 spin_lock(&atomic64_lock);
115 *target -= delta;
116 spin_unlock(&atomic64_lock);
117
118 return *target;
119 }
120
121 static __inline__ uint64_t
122 atomic_cas_64(volatile uint64_t *target, uint64_t cmp,
123 uint64_t newval)
124 {
125 uint64_t rc;
126
127 spin_lock(&atomic64_lock);
128 rc = *target;
129 if (*target == cmp)
130 *target = newval;
131 spin_unlock(&atomic64_lock);
132
133 return rc;
134 }
135
136 #if defined(__x86_64__)
137 /* XXX: Implement atomic_cas_ptr() in terms of uint64'ts. This
138 * is of course only safe and correct for 64 bit arches... but
139 * for now I'm OK with that.
140 */
141 static __inline__ void *
142 atomic_cas_ptr(volatile void *target, void *cmp, void *newval)
143 {
144 return (void *)atomic_cas_64((volatile uint64_t *)target,
145 (uint64_t)cmp, (uint64_t)newval);
146 }
147 #endif
148
149 #ifdef __cplusplus
150 }
151 #endif
152
153 #endif /* _SPL_ATOMIC_H */
154