1 // -*- mode:C++; tab-width:8; c-basic-offset:2; indent-tabs-mode:nil -*-
2 // vim: ts=8 sw=2 smarttab
7 #include "include/cpp-btree/btree_map.h"
8 #include "include/cpp-btree/btree_set.h"
10 #include "os/bluestore/bluestore_types.h"
11 #include "include/mempool.h"
13 class BtreeAllocator
: public Allocator
{
15 uint64_t start
; ///< starting offset of this segment
16 uint64_t end
; ///< ending offset (non-inclusive)
18 range_seg_t(uint64_t start
, uint64_t end
)
22 inline uint64_t length() const {
27 struct range_value_t
{
30 range_value_t(uint64_t start
, uint64_t end
)
34 range_value_t(const range_seg_t
& rs
)
40 struct compare_range_value_t
{
41 int operator()(const range_value_t
& lhs
,
42 const range_value_t
& rhs
) const noexcept
{
43 if (lhs
.size
< rhs
.size
) {
45 } else if (lhs
.size
> rhs
.size
) {
48 if (lhs
.start
< rhs
.start
) {
50 } else if (lhs
.start
> rhs
.start
) {
59 * ctor intended for the usage from descendant class(es) which
60 * provides handling for spilled over entries
61 * (when entry count >= max_entries)
63 BtreeAllocator(CephContext
* cct
, int64_t device_size
, int64_t block_size
,
65 std::string_view name
);
68 BtreeAllocator(CephContext
* cct
, int64_t device_size
, int64_t block_size
,
69 std::string_view name
);
71 const char* get_type() const override
78 uint64_t max_alloc_size
,
80 PExtentVector
*extents
) override
;
81 void release(const interval_set
<uint64_t>& release_set
) override
;
82 uint64_t get_free() override
;
83 double get_fragmentation() override
;
86 void dump(std::function
<void(uint64_t offset
, uint64_t length
)> notify
) override
;
87 void init_add_free(uint64_t offset
, uint64_t length
) override
;
88 void init_rm_free(uint64_t offset
, uint64_t length
) override
;
89 void shutdown() override
;
92 // pick a range by search from cursor forward
93 uint64_t _pick_block_after(
97 // pick a range with exactly the same size or larger
98 uint64_t _pick_block_fits(
108 using pool_allocator
= mempool::bluestore_alloc::pool_allocator
<T
>;
111 uint64_t /* start */,
114 pool_allocator
<std::pair
<uint64_t, uint64_t>>>;
115 range_tree_t range_tree
; ///< main range tree
117 * The range_size_tree should always contain the
118 * same number of segments as the range_tree.
119 * The only difference is that the range_size_tree
120 * is ordered by segment sizes.
122 using range_size_tree_t
=
124 range_value_t
/* size, start */,
125 compare_range_value_t
,
126 pool_allocator
<range_value_t
>>;
127 range_size_tree_t range_size_tree
;
129 uint64_t num_free
= 0; ///< total bytes in freelist
132 * This value defines the number of elements in the ms_lbas array.
133 * The value of 64 was chosen as it covers all power of 2 buckets
135 * This is the equivalent of highest-bit of UINT64_MAX.
137 static constexpr unsigned MAX_LBAS
= 64;
138 uint64_t lbas
[MAX_LBAS
] = {0};
141 * Minimum size which forces the dynamic allocator to change
142 * it's allocation strategy. Once the allocator cannot satisfy
143 * an allocation of this size then it switches to using more
144 * aggressive strategy (i.e search by size rather than offset).
146 uint64_t range_size_alloc_threshold
= 0;
148 * The minimum free space, in percent, which must be available
149 * in allocator to continue allocations in a first-fit fashion.
150 * Once the allocator's free space drops below this level we dynamically
151 * switch to using best-fit allocations.
153 int range_size_alloc_free_pct
= 0;
156 * Max amount of range entries allowed. 0 - unlimited
158 int64_t range_count_cap
= 0;
164 double _get_fragmentation() const {
165 auto free_blocks
= p2align(num_free
, (uint64_t)block_size
) / block_size
;
166 if (free_blocks
<= 1) {
169 return (static_cast<double>(range_tree
.size() - 1) / (free_blocks
- 1));
173 uint64_t _lowest_size_available() const {
174 auto rs
= range_size_tree
.begin();
175 return rs
!= range_size_tree
.end() ? rs
->size
: 0;
181 uint64_t max_alloc_size
,
183 PExtentVector
*extents
);
185 void _release(const interval_set
<uint64_t>& release_set
);
186 void _release(const PExtentVector
& release_set
);
189 // called when extent to be released/marked free
190 void _add_to_tree(uint64_t start
, uint64_t size
);
191 void _process_range_removal(uint64_t start
, uint64_t end
, range_tree_t::iterator
& rs
);
192 void _remove_from_tree(uint64_t start
, uint64_t size
);
193 void _try_remove_from_tree(uint64_t start
, uint64_t size
,
194 std::function
<void(uint64_t offset
, uint64_t length
, bool found
)> cb
);
196 uint64_t _get_free() const {