char *data;
unsigned len;
std::atomic<unsigned> nref { 0 };
- int mempool = mempool::mempool_buffer_anon;
+ int mempool;
mutable std::atomic_flag crc_spinlock = ATOMIC_FLAG_INIT;
map<pair<size_t, size_t>, pair<uint32_t, uint32_t> > crc_map;
- explicit raw(unsigned l)
- : data(NULL), len(l), nref(0) {
+ explicit raw(unsigned l, int mempool=mempool::mempool_buffer_anon)
+ : data(NULL), len(l), nref(0), mempool(mempool) {
mempool::get_pool(mempool::pool_index_t(mempool)).adjust_count(1, len);
}
- raw(char *c, unsigned l)
- : data(c), len(l), nref(0) {
+ raw(char *c, unsigned l, int mempool=mempool::mempool_buffer_anon)
+ : data(c), len(l), nref(0), mempool(mempool) {
mempool::get_pool(mempool::pool_index_t(mempool)).adjust_count(1, len);
}
virtual ~raw() {
class buffer::raw_combined : public buffer::raw {
size_t alignment;
public:
- raw_combined(char *dataptr, unsigned l, unsigned align=0)
- : raw(dataptr, l),
+ raw_combined(char *dataptr, unsigned l, unsigned align,
+ int mempool)
+ : raw(dataptr, l, mempool),
alignment(align) {
inc_total_alloc(len);
inc_history_alloc(len);
return create(len, alignment);
}
- static raw_combined *create(unsigned len, unsigned align=0) {
+ static raw_combined *create(unsigned len,
+ unsigned align,
+ int mempool = mempool::mempool_buffer_anon) {
if (!align)
align = sizeof(size_t);
size_t rawlen = ROUND_UP_TO(sizeof(buffer::raw_combined),
// actual data first, since it has presumably larger alignment restriction
// then put the raw_combined at the end
- return new (ptr + datalen) raw_combined(ptr, len, align);
+ return new (ptr + datalen) raw_combined(ptr, len, align, mempool);
}
static void operator delete(void *ptr) {
buffer::raw* buffer::create(unsigned len) {
return buffer::create_aligned(len, sizeof(size_t));
}
+ buffer::raw* buffer::create_in_mempool(unsigned len, int mempool) {
+ return buffer::create_aligned_in_mempool(len, sizeof(size_t), mempool);
+ }
buffer::raw* buffer::claim_char(unsigned len, char *buf) {
return new raw_claimed_char(len, buf);
}
return new raw_claim_buffer(buf, len, std::move(del));
}
- buffer::raw* buffer::create_aligned(unsigned len, unsigned align) {
+ buffer::raw* buffer::create_aligned_in_mempool(
+ unsigned len, unsigned align, int mempool) {
// If alignment is a page multiple, use a separate buffer::raw to
// avoid fragmenting the heap.
//
return new raw_hack_aligned(len, align);
#endif
}
- return raw_combined::create(len, align);
+ return raw_combined::create(len, align, mempool);
+ }
+ buffer::raw* buffer::create_aligned(
+ unsigned len, unsigned align) {
+ return create_aligned_in_mempool(len, align,
+ mempool::mempool_buffer_anon);
}
buffer::raw* buffer::create_page_aligned(unsigned len) {
bool buffer::ptr::at_buffer_tail() const { return _off + _len == _raw->len; }
+ int buffer::ptr::get_mempool() const {
+ if (_raw) {
+ return _raw->mempool;
+ }
+ return mempool::mempool_buffer_anon;
+ }
+
+ void buffer::ptr::reassign_to_mempool(int pool) {
+ if (_raw) {
+ _raw->reassign_to_mempool(pool);
+ }
+ }
+ void buffer::ptr::try_assign_to_mempool(int pool) {
+ if (_raw) {
+ _raw->try_assign_to_mempool(pool);
+ }
+ }
+
const char *buffer::ptr::c_str() const {
assert(_raw);
if (buffer_track_c_str)
{
std::swap(_len, other._len);
std::swap(_memcopy_count, other._memcopy_count);
- std::swap(_mempool, other._mempool);
_buffers.swap(other._buffers);
append_buffer.swap(other.append_buffer);
//last_p.swap(other.last_p);
return is_aligned(CEPH_PAGE_SIZE);
}
+ int buffer::list::get_mempool() const
+ {
+ if (_buffers.empty()) {
+ return mempool::mempool_buffer_anon;
+ }
+ return _buffers.back().get_mempool();
+ }
+
void buffer::list::reassign_to_mempool(int pool)
{
- _mempool = pool;
if (append_buffer.get_raw()) {
append_buffer.get_raw()->reassign_to_mempool(pool);
}
void buffer::list::try_assign_to_mempool(int pool)
{
- _mempool = pool;
if (append_buffer.get_raw()) {
append_buffer.get_raw()->try_assign_to_mempool(pool);
}
void buffer::list::reserve(size_t prealloc)
{
if (append_buffer.unused_tail_length() < prealloc) {
- append_buffer = buffer::create(prealloc);
- if (_mempool >= 0) {
- append_buffer.get_raw()->reassign_to_mempool(_mempool);
- }
+ append_buffer = buffer::create_in_mempool(prealloc, get_mempool());
append_buffer.set_length(0); // unused, so far.
}
}
unsigned gap = append_buffer.unused_tail_length();
if (!gap) {
// make a new append_buffer!
- append_buffer = raw_combined::create(CEPH_BUFFER_APPEND_SIZE);
+ append_buffer = raw_combined::create(CEPH_BUFFER_APPEND_SIZE, 0,
+ get_mempool());
append_buffer.set_length(0); // unused, so far.
- if (_mempool >= 0) {
- append_buffer.get_raw()->reassign_to_mempool(_mempool);
- }
}
append(append_buffer, append_buffer.append(c) - 1, 1); // add segment to the list
}
size_t need = ROUND_UP_TO(len, sizeof(size_t)) + sizeof(raw_combined);
size_t alen = ROUND_UP_TO(need, CEPH_BUFFER_ALLOC_UNIT) -
sizeof(raw_combined);
- append_buffer = raw_combined::create(alen);
+ append_buffer = raw_combined::create(alen, 0, get_mempool());
append_buffer.set_length(0); // unused, so far.
- if (_mempool >= 0) {
- append_buffer.get_raw()->reassign_to_mempool(_mempool);
- }
}
}