]>
Commit | Line | Data |
---|---|---|
e306af50 TL |
1 | // -*- mode:C++; tab-width:8; c-basic-offset:2; indent-tabs-mode:t -*- |
2 | // vim: ts=8 sw=2 smarttab | |
3 | ||
4 | #include "HybridAllocator.h" | |
5 | ||
6 | #include <limits> | |
7 | ||
8 | #include "common/config_proxy.h" | |
9 | #include "common/debug.h" | |
10 | ||
11 | #define dout_context cct | |
12 | #define dout_subsys ceph_subsys_bluestore | |
13 | #undef dout_prefix | |
14 | #define dout_prefix *_dout << "HybridAllocator " | |
15 | ||
16 | ||
17 | int64_t HybridAllocator::allocate( | |
18 | uint64_t want, | |
19 | uint64_t unit, | |
20 | uint64_t max_alloc_size, | |
21 | int64_t hint, | |
22 | PExtentVector* extents) | |
23 | { | |
24 | ldout(cct, 10) << __func__ << std::hex | |
25 | << " want 0x" << want | |
26 | << " unit 0x" << unit | |
27 | << " max_alloc_size 0x" << max_alloc_size | |
28 | << " hint 0x" << hint | |
29 | << std::dec << dendl; | |
30 | ceph_assert(isp2(unit)); | |
31 | ceph_assert(want % unit == 0); | |
32 | ||
33 | if (max_alloc_size == 0) { | |
34 | max_alloc_size = want; | |
35 | } | |
36 | if (constexpr auto cap = std::numeric_limits<decltype(bluestore_pextent_t::length)>::max(); | |
37 | max_alloc_size >= cap) { | |
38 | max_alloc_size = p2align(uint64_t(cap), (uint64_t)get_block_size()); | |
39 | } | |
40 | ||
41 | std::lock_guard l(lock); | |
42 | ||
43 | int64_t res; | |
44 | PExtentVector local_extents; | |
45 | ||
46 | // preserve original 'extents' vector state | |
47 | auto orig_size = extents->size(); | |
48 | auto orig_pos = extents->end(); | |
49 | if (orig_size) { | |
50 | --orig_pos; | |
51 | } | |
52 | ||
53 | // try bitmap first to avoid unneeded contiguous extents split if | |
54 | // desired amount is less than shortes range in AVL | |
55 | if (bmap_alloc && bmap_alloc->get_free() && | |
56 | want < _lowest_size_available()) { | |
57 | res = bmap_alloc->allocate(want, unit, max_alloc_size, hint, extents); | |
58 | if (res < 0) { | |
59 | // got a failure, release already allocated and | |
60 | // start over allocation from avl | |
61 | if (orig_size) { | |
62 | local_extents.insert( | |
63 | local_extents.end(), ++orig_pos, extents->end()); | |
64 | extents->resize(orig_size); | |
65 | } else { | |
66 | extents->swap(local_extents); | |
67 | } | |
68 | bmap_alloc->release(local_extents); | |
69 | res = 0; | |
70 | } | |
71 | if ((uint64_t)res < want) { | |
72 | auto res2 = _allocate(want - res, unit, max_alloc_size, hint, extents); | |
73 | if (res2 < 0) { | |
74 | res = res2; // caller to do the release | |
75 | } else { | |
76 | res += res2; | |
77 | } | |
78 | } | |
79 | } else { | |
80 | res = _allocate(want, unit, max_alloc_size, hint, extents); | |
81 | if (res < 0) { | |
82 | // got a failure, release already allocated and | |
83 | // start over allocation from bitmap | |
84 | if (orig_size) { | |
85 | local_extents.insert( | |
86 | local_extents.end(), ++orig_pos, extents->end()); | |
87 | extents->resize(orig_size); | |
88 | } else { | |
89 | extents->swap(local_extents); | |
90 | } | |
91 | _release(local_extents); | |
92 | res = 0; | |
93 | } | |
94 | if ((uint64_t)res < want ) { | |
95 | auto res2 = bmap_alloc ? | |
96 | bmap_alloc->allocate(want - res, unit, max_alloc_size, hint, extents) : | |
97 | 0; | |
98 | if (res2 < 0 ) { | |
99 | res = res2; // caller to do the release | |
100 | } else { | |
101 | res += res2; | |
102 | } | |
103 | } | |
104 | } | |
105 | return res ? res : -ENOSPC; | |
106 | } | |
107 | ||
108 | void HybridAllocator::release(const interval_set<uint64_t>& release_set) { | |
109 | std::lock_guard l(lock); | |
110 | // this will attempt to put free ranges into AvlAllocator first and | |
111 | // fallback to bitmap one via _try_insert_range call | |
112 | _release(release_set); | |
113 | } | |
114 | ||
115 | uint64_t HybridAllocator::get_free() | |
116 | { | |
117 | std::lock_guard l(lock); | |
118 | return (bmap_alloc ? bmap_alloc->get_free() : 0) + _get_free(); | |
119 | } | |
120 | ||
121 | double HybridAllocator::get_fragmentation() | |
122 | { | |
123 | std::lock_guard l(lock); | |
124 | auto f = AvlAllocator::_get_fragmentation(); | |
125 | auto bmap_free = bmap_alloc ? bmap_alloc->get_free() : 0; | |
126 | if (bmap_free) { | |
127 | auto _free = _get_free() + bmap_free; | |
128 | auto bf = bmap_alloc->get_fragmentation(); | |
129 | ||
130 | f = f * _get_free() / _free + bf * bmap_free / _free; | |
131 | } | |
132 | return f; | |
133 | } | |
134 | ||
135 | void HybridAllocator::dump() | |
136 | { | |
137 | std::lock_guard l(lock); | |
138 | AvlAllocator::_dump(); | |
139 | if (bmap_alloc) { | |
140 | bmap_alloc->dump(); | |
141 | } | |
142 | ldout(cct, 0) << __func__ | |
143 | << " avl_free: " << _get_free() | |
144 | << " bmap_free: " << (bmap_alloc ? bmap_alloc->get_free() : 0) | |
145 | << dendl; | |
146 | } | |
147 | ||
148 | void HybridAllocator::dump(std::function<void(uint64_t offset, uint64_t length)> notify) | |
149 | { | |
150 | AvlAllocator::dump(notify); | |
151 | if (bmap_alloc) { | |
152 | bmap_alloc->dump(notify); | |
153 | } | |
154 | } | |
155 | ||
156 | void HybridAllocator::init_rm_free(uint64_t offset, uint64_t length) | |
157 | { | |
158 | std::lock_guard l(lock); | |
159 | ldout(cct, 10) << __func__ << std::hex | |
160 | << " offset 0x" << offset | |
161 | << " length 0x" << length | |
162 | << std::dec << dendl; | |
163 | _try_remove_from_tree(offset, length, | |
164 | [&](uint64_t o, uint64_t l, bool found) { | |
165 | if (!found) { | |
166 | if (bmap_alloc) { | |
167 | bmap_alloc->init_rm_free(o, l); | |
168 | } else { | |
169 | lderr(cct) << "init_rm_free lambda" << std::hex | |
170 | << "Uexpected extent: " | |
171 | << " 0x" << o << "~" << l | |
172 | << std::dec << dendl; | |
173 | ceph_assert(false); | |
174 | } | |
175 | } | |
176 | }); | |
177 | } | |
178 | ||
179 | void HybridAllocator::shutdown() | |
180 | { | |
181 | std::lock_guard l(lock); | |
182 | _shutdown(); | |
183 | if (bmap_alloc) { | |
184 | bmap_alloc->shutdown(); | |
185 | delete bmap_alloc; | |
186 | bmap_alloc = nullptr; | |
187 | } | |
188 | } | |
189 | ||
190 | void HybridAllocator::_spillover_range(uint64_t start, uint64_t end) | |
191 | { | |
192 | auto size = end - start; | |
193 | dout(20) << __func__ | |
194 | << std::hex << " " | |
195 | << start << "~" << size | |
196 | << std::dec | |
197 | << dendl; | |
198 | ceph_assert(size); | |
199 | if (!bmap_alloc) { | |
200 | dout(1) << __func__ | |
201 | << std::hex | |
202 | << " constructing fallback allocator" | |
203 | << dendl; | |
204 | bmap_alloc = new BitmapAllocator(cct, | |
205 | get_capacity(), | |
206 | get_block_size(), | |
207 | get_name()); | |
208 | } | |
209 | bmap_alloc->init_add_free(start, size); | |
210 | } | |
211 | ||
212 | void HybridAllocator::_add_to_tree(uint64_t start, uint64_t size) | |
213 | { | |
214 | if (bmap_alloc) { | |
215 | uint64_t head = bmap_alloc->claim_free_to_left(start); | |
216 | uint64_t tail = bmap_alloc->claim_free_to_right(start + size); | |
217 | ceph_assert(head <= start); | |
218 | start -= head; | |
219 | size += head + tail; | |
220 | } | |
221 | AvlAllocator::_add_to_tree(start, size); | |
222 | } |