]> git.proxmox.com Git - ceph.git/blame - ceph/src/os/bluestore/HybridAllocator.cc
bump version to 18.2.4-pve3
[ceph.git] / ceph / src / os / bluestore / HybridAllocator.cc
CommitLineData
e306af50
TL
1// -*- mode:C++; tab-width:8; c-basic-offset:2; indent-tabs-mode:t -*-
2// vim: ts=8 sw=2 smarttab
3
4#include "HybridAllocator.h"
5
1e59de90 6#include <bit>
e306af50
TL
7#include <limits>
8
9#include "common/config_proxy.h"
10#include "common/debug.h"
11
12#define dout_context cct
13#define dout_subsys ceph_subsys_bluestore
14#undef dout_prefix
15#define dout_prefix *_dout << "HybridAllocator "
16
17
18int64_t HybridAllocator::allocate(
19 uint64_t want,
20 uint64_t unit,
21 uint64_t max_alloc_size,
22 int64_t hint,
23 PExtentVector* extents)
24{
25 ldout(cct, 10) << __func__ << std::hex
26 << " want 0x" << want
27 << " unit 0x" << unit
28 << " max_alloc_size 0x" << max_alloc_size
29 << " hint 0x" << hint
30 << std::dec << dendl;
1e59de90 31 ceph_assert(std::has_single_bit(unit));
e306af50
TL
32 ceph_assert(want % unit == 0);
33
34 if (max_alloc_size == 0) {
35 max_alloc_size = want;
36 }
37 if (constexpr auto cap = std::numeric_limits<decltype(bluestore_pextent_t::length)>::max();
38 max_alloc_size >= cap) {
39 max_alloc_size = p2align(uint64_t(cap), (uint64_t)get_block_size());
40 }
41
42 std::lock_guard l(lock);
43
44 int64_t res;
45 PExtentVector local_extents;
46
47 // preserve original 'extents' vector state
48 auto orig_size = extents->size();
49 auto orig_pos = extents->end();
50 if (orig_size) {
51 --orig_pos;
52 }
53
54 // try bitmap first to avoid unneeded contiguous extents split if
55 // desired amount is less than shortes range in AVL
56 if (bmap_alloc && bmap_alloc->get_free() &&
57 want < _lowest_size_available()) {
58 res = bmap_alloc->allocate(want, unit, max_alloc_size, hint, extents);
59 if (res < 0) {
60 // got a failure, release already allocated and
61 // start over allocation from avl
62 if (orig_size) {
63 local_extents.insert(
64 local_extents.end(), ++orig_pos, extents->end());
65 extents->resize(orig_size);
66 } else {
67 extents->swap(local_extents);
68 }
69 bmap_alloc->release(local_extents);
70 res = 0;
71 }
72 if ((uint64_t)res < want) {
73 auto res2 = _allocate(want - res, unit, max_alloc_size, hint, extents);
74 if (res2 < 0) {
75 res = res2; // caller to do the release
76 } else {
77 res += res2;
78 }
79 }
80 } else {
81 res = _allocate(want, unit, max_alloc_size, hint, extents);
82 if (res < 0) {
83 // got a failure, release already allocated and
84 // start over allocation from bitmap
85 if (orig_size) {
86 local_extents.insert(
87 local_extents.end(), ++orig_pos, extents->end());
88 extents->resize(orig_size);
89 } else {
90 extents->swap(local_extents);
91 }
92 _release(local_extents);
93 res = 0;
94 }
95 if ((uint64_t)res < want ) {
96 auto res2 = bmap_alloc ?
97 bmap_alloc->allocate(want - res, unit, max_alloc_size, hint, extents) :
98 0;
99 if (res2 < 0 ) {
100 res = res2; // caller to do the release
101 } else {
102 res += res2;
103 }
104 }
105 }
106 return res ? res : -ENOSPC;
107}
108
109void HybridAllocator::release(const interval_set<uint64_t>& release_set) {
110 std::lock_guard l(lock);
111 // this will attempt to put free ranges into AvlAllocator first and
112 // fallback to bitmap one via _try_insert_range call
113 _release(release_set);
114}
115
116uint64_t HybridAllocator::get_free()
117{
118 std::lock_guard l(lock);
119 return (bmap_alloc ? bmap_alloc->get_free() : 0) + _get_free();
120}
121
122double HybridAllocator::get_fragmentation()
123{
124 std::lock_guard l(lock);
125 auto f = AvlAllocator::_get_fragmentation();
126 auto bmap_free = bmap_alloc ? bmap_alloc->get_free() : 0;
127 if (bmap_free) {
128 auto _free = _get_free() + bmap_free;
129 auto bf = bmap_alloc->get_fragmentation();
130
131 f = f * _get_free() / _free + bf * bmap_free / _free;
132 }
133 return f;
134}
135
136void HybridAllocator::dump()
137{
138 std::lock_guard l(lock);
139 AvlAllocator::_dump();
140 if (bmap_alloc) {
141 bmap_alloc->dump();
142 }
143 ldout(cct, 0) << __func__
144 << " avl_free: " << _get_free()
145 << " bmap_free: " << (bmap_alloc ? bmap_alloc->get_free() : 0)
146 << dendl;
147}
148
1e59de90
TL
149void HybridAllocator::foreach(
150 std::function<void(uint64_t offset, uint64_t length)> notify)
e306af50 151{
1e59de90
TL
152 std::lock_guard l(lock);
153 AvlAllocator::_foreach(notify);
e306af50 154 if (bmap_alloc) {
1e59de90 155 bmap_alloc->foreach(notify);
e306af50
TL
156 }
157}
158
159void HybridAllocator::init_rm_free(uint64_t offset, uint64_t length)
160{
b3b6e05e
TL
161 if (!length)
162 return;
e306af50
TL
163 std::lock_guard l(lock);
164 ldout(cct, 10) << __func__ << std::hex
165 << " offset 0x" << offset
166 << " length 0x" << length
167 << std::dec << dendl;
168 _try_remove_from_tree(offset, length,
169 [&](uint64_t o, uint64_t l, bool found) {
170 if (!found) {
171 if (bmap_alloc) {
172 bmap_alloc->init_rm_free(o, l);
173 } else {
20effc67 174 lderr(cct) << "init_rm_free lambda " << std::hex
e306af50
TL
175 << "Uexpected extent: "
176 << " 0x" << o << "~" << l
177 << std::dec << dendl;
178 ceph_assert(false);
179 }
180 }
181 });
182}
183
184void HybridAllocator::shutdown()
185{
186 std::lock_guard l(lock);
187 _shutdown();
188 if (bmap_alloc) {
189 bmap_alloc->shutdown();
190 delete bmap_alloc;
191 bmap_alloc = nullptr;
192 }
193}
194
195void HybridAllocator::_spillover_range(uint64_t start, uint64_t end)
196{
197 auto size = end - start;
198 dout(20) << __func__
199 << std::hex << " "
200 << start << "~" << size
201 << std::dec
202 << dendl;
203 ceph_assert(size);
204 if (!bmap_alloc) {
205 dout(1) << __func__
206 << std::hex
207 << " constructing fallback allocator"
208 << dendl;
209 bmap_alloc = new BitmapAllocator(cct,
210 get_capacity(),
211 get_block_size(),
adb31ebb 212 get_name() + ".fallback");
e306af50
TL
213 }
214 bmap_alloc->init_add_free(start, size);
215}
216
217void HybridAllocator::_add_to_tree(uint64_t start, uint64_t size)
218{
219 if (bmap_alloc) {
220 uint64_t head = bmap_alloc->claim_free_to_left(start);
221 uint64_t tail = bmap_alloc->claim_free_to_right(start + size);
222 ceph_assert(head <= start);
223 start -= head;
224 size += head + tail;
225 }
226 AvlAllocator::_add_to_tree(start, size);
227}