1 // -*- mode:C++; tab-width:8; c-basic-offset:2; indent-tabs-mode:t -*-
2 // vim: ts=8 sw=2 smarttab
4 * Ceph - scalable distributed file system
6 * Copyright (C) 2014 Red Hat
8 * This is free software; you can redistribute it and/or
9 * modify it under the terms of the GNU Lesser General Public
10 * License version 2.1, as published by the Free Software
11 * Foundation. See file COPYING.
15 #include "bluestore_types.h"
16 #include "common/Formatter.h"
17 #include "common/Checksummer.h"
18 #include "include/stringify.h"
20 void ExtentList::add_extents(int64_t start
, int64_t count
) {
21 AllocExtent
*last_extent
= NULL
;
22 bool can_merge
= false;
24 if (!m_extents
->empty()) {
25 last_extent
= &(m_extents
->back());
26 uint64_t last_offset
= last_extent
->end() / m_block_size
;
27 uint32_t last_length
= last_extent
->length
/ m_block_size
;
28 if ((last_offset
== (uint64_t) start
) &&
29 (!m_max_blocks
|| (last_length
+ count
) <= m_max_blocks
)) {
35 last_extent
->length
+= (count
* m_block_size
);
37 m_extents
->emplace_back(AllocExtent(start
* m_block_size
,
38 count
* m_block_size
));
42 // bluestore_bdev_label_t
44 void bluestore_bdev_label_t::encode(bufferlist
& bl
) const
46 // be slightly friendly to someone who looks at the device
47 bl
.append("bluestore block device\n");
48 bl
.append(stringify(osd_uuid
));
50 ENCODE_START(1, 1, bl
);
51 ::encode(osd_uuid
, bl
);
54 ::encode(description
, bl
);
58 void bluestore_bdev_label_t::decode(bufferlist::iterator
& p
)
60 p
.advance(60); // see above
62 ::decode(osd_uuid
, p
);
65 ::decode(description
, p
);
69 void bluestore_bdev_label_t::dump(Formatter
*f
) const
71 f
->dump_stream("osd_uuid") << osd_uuid
;
72 f
->dump_unsigned("size", size
);
73 f
->dump_stream("btime") << btime
;
74 f
->dump_string("description", description
);
77 void bluestore_bdev_label_t::generate_test_instances(
78 list
<bluestore_bdev_label_t
*>& o
)
80 o
.push_back(new bluestore_bdev_label_t
);
81 o
.push_back(new bluestore_bdev_label_t
);
83 o
.back()->btime
= utime_t(4, 5);
84 o
.back()->description
= "fakey";
87 ostream
& operator<<(ostream
& out
, const bluestore_bdev_label_t
& l
)
89 return out
<< "bdev(osd_uuid " << l
.osd_uuid
90 << " size 0x" << std::hex
<< l
.size
<< std::dec
91 << " btime " << l
.btime
92 << " desc " << l
.description
<< ")";
97 void bluestore_cnode_t::dump(Formatter
*f
) const
99 f
->dump_unsigned("bits", bits
);
102 void bluestore_cnode_t::generate_test_instances(list
<bluestore_cnode_t
*>& o
)
104 o
.push_back(new bluestore_cnode_t());
105 o
.push_back(new bluestore_cnode_t(0));
106 o
.push_back(new bluestore_cnode_t(123));
109 // bluestore_extent_ref_map_t
111 void bluestore_extent_ref_map_t::_check() const
115 for (const auto &p
: ref_map
) {
117 assert(0 == "overlap");
118 if (p
.first
== pos
&& p
.second
.refs
== refs
)
119 assert(0 == "unmerged");
120 pos
= p
.first
+ p
.second
.length
;
121 refs
= p
.second
.refs
;
125 void bluestore_extent_ref_map_t::_maybe_merge_left(
126 map
<uint64_t,record_t
>::iterator
& p
)
128 if (p
== ref_map
.begin())
132 if (q
->second
.refs
== p
->second
.refs
&&
133 q
->first
+ q
->second
.length
== p
->first
) {
134 q
->second
.length
+= p
->second
.length
;
140 void bluestore_extent_ref_map_t::get(uint64_t offset
, uint32_t length
)
142 auto p
= ref_map
.lower_bound(offset
);
143 if (p
!= ref_map
.begin()) {
145 if (p
->first
+ p
->second
.length
<= offset
) {
150 if (p
== ref_map
.end()) {
151 // nothing after offset; add the whole thing.
153 map
<uint64_t,record_t
>::value_type(offset
, record_t(length
, 1))).first
;
156 if (p
->first
> offset
) {
158 uint64_t newlen
= MIN(p
->first
- offset
, length
);
160 map
<uint64_t,record_t
>::value_type(offset
,
161 record_t(newlen
, 1))).first
;
164 _maybe_merge_left(p
);
168 if (p
->first
< offset
) {
169 // split off the portion before offset
170 assert(p
->first
+ p
->second
.length
> offset
);
171 uint64_t left
= p
->first
+ p
->second
.length
- offset
;
172 p
->second
.length
= offset
- p
->first
;
173 p
= ref_map
.insert(map
<uint64_t,record_t
>::value_type(
174 offset
, record_t(left
, p
->second
.refs
))).first
;
177 assert(p
->first
== offset
);
178 if (length
< p
->second
.length
) {
179 ref_map
.insert(make_pair(offset
+ length
,
180 record_t(p
->second
.length
- length
,
182 p
->second
.length
= length
;
187 offset
+= p
->second
.length
;
188 length
-= p
->second
.length
;
189 _maybe_merge_left(p
);
192 if (p
!= ref_map
.end())
193 _maybe_merge_left(p
);
197 void bluestore_extent_ref_map_t::put(
198 uint64_t offset
, uint32_t length
,
199 PExtentVector
*release
)
201 //NB: existing entries in 'release' container must be preserved!
203 auto p
= ref_map
.lower_bound(offset
);
204 if (p
== ref_map
.end() || p
->first
> offset
) {
205 if (p
== ref_map
.begin()) {
206 assert(0 == "put on missing extent (nothing before)");
209 if (p
->first
+ p
->second
.length
<= offset
) {
210 assert(0 == "put on missing extent (gap)");
213 if (p
->first
< offset
) {
214 uint64_t left
= p
->first
+ p
->second
.length
- offset
;
215 p
->second
.length
= offset
- p
->first
;
216 p
= ref_map
.insert(map
<uint64_t,record_t
>::value_type(
217 offset
, record_t(left
, p
->second
.refs
))).first
;
220 assert(p
->first
== offset
);
221 if (length
< p
->second
.length
) {
222 ref_map
.insert(make_pair(offset
+ length
,
223 record_t(p
->second
.length
- length
,
225 if (p
->second
.refs
> 1) {
226 p
->second
.length
= length
;
228 _maybe_merge_left(p
);
231 release
->push_back(bluestore_pextent_t(p
->first
, length
));
236 offset
+= p
->second
.length
;
237 length
-= p
->second
.length
;
238 if (p
->second
.refs
> 1) {
240 _maybe_merge_left(p
);
244 release
->push_back(bluestore_pextent_t(p
->first
, p
->second
.length
));
248 if (p
!= ref_map
.end())
249 _maybe_merge_left(p
);
253 bool bluestore_extent_ref_map_t::contains(uint64_t offset
, uint32_t length
) const
255 auto p
= ref_map
.lower_bound(offset
);
256 if (p
== ref_map
.end() || p
->first
> offset
) {
257 if (p
== ref_map
.begin()) {
258 return false; // nothing before
261 if (p
->first
+ p
->second
.length
<= offset
) {
266 if (p
== ref_map
.end())
268 if (p
->first
> offset
)
270 if (p
->first
+ p
->second
.length
>= offset
+ length
)
272 uint64_t overlap
= p
->first
+ p
->second
.length
- offset
;
280 bool bluestore_extent_ref_map_t::intersects(
282 uint32_t length
) const
284 auto p
= ref_map
.lower_bound(offset
);
285 if (p
!= ref_map
.begin()) {
287 if (p
->first
+ p
->second
.length
<= offset
) {
291 if (p
== ref_map
.end())
293 if (p
->first
>= offset
+ length
)
295 return true; // intersects p!
298 void bluestore_extent_ref_map_t::dump(Formatter
*f
) const
300 f
->open_array_section("ref_map");
301 for (auto& p
: ref_map
) {
302 f
->open_object_section("ref");
303 f
->dump_unsigned("offset", p
.first
);
304 f
->dump_unsigned("length", p
.second
.length
);
305 f
->dump_unsigned("refs", p
.second
.refs
);
311 void bluestore_extent_ref_map_t::generate_test_instances(
312 list
<bluestore_extent_ref_map_t
*>& o
)
314 o
.push_back(new bluestore_extent_ref_map_t
);
315 o
.push_back(new bluestore_extent_ref_map_t
);
316 o
.back()->get(10, 10);
317 o
.back()->get(18, 22);
318 o
.back()->get(20, 20);
319 o
.back()->get(10, 25);
320 o
.back()->get(15, 20);
323 ostream
& operator<<(ostream
& out
, const bluestore_extent_ref_map_t
& m
)
326 for (auto p
= m
.ref_map
.begin(); p
!= m
.ref_map
.end(); ++p
) {
327 if (p
!= m
.ref_map
.begin())
329 out
<< std::hex
<< "0x" << p
->first
<< "~" << p
->second
.length
<< std::dec
330 << "=" << p
->second
.refs
;
336 // bluestore_blob_use_tracker_t
338 void bluestore_blob_use_tracker_t::allocate()
341 bytes_per_au
= new uint32_t[num_au
];
342 for (uint32_t i
= 0; i
< num_au
; ++i
) {
347 void bluestore_blob_use_tracker_t::init(
348 uint32_t full_length
, uint32_t _au_size
) {
349 assert(!au_size
|| is_empty());
350 assert(_au_size
> 0);
351 assert(full_length
> 0);
353 uint32_t _num_au
= ROUND_UP_TO(full_length
, _au_size
) / _au_size
;
361 void bluestore_blob_use_tracker_t::get(
362 uint32_t offset
, uint32_t length
)
366 total_bytes
+= length
;
368 auto end
= offset
+ length
;
370 while (offset
< end
) {
371 auto phase
= offset
% au_size
;
372 bytes_per_au
[offset
/ au_size
] +=
373 MIN(au_size
- phase
, end
- offset
);
374 offset
+= (phase
? au_size
- phase
: au_size
);
379 bool bluestore_blob_use_tracker_t::put(
380 uint32_t offset
, uint32_t length
,
381 PExtentVector
*release_units
)
385 release_units
->clear();
387 bool maybe_empty
= true;
389 assert(total_bytes
>= length
);
390 total_bytes
-= length
;
392 auto end
= offset
+ length
;
393 uint64_t next_offs
= 0;
394 while (offset
< end
) {
395 auto phase
= offset
% au_size
;
396 size_t pos
= offset
/ au_size
;
397 auto diff
= MIN(au_size
- phase
, end
- offset
);
398 assert(diff
<= bytes_per_au
[pos
]);
399 bytes_per_au
[pos
] -= diff
;
400 offset
+= (phase
? au_size
- phase
: au_size
);
401 if (bytes_per_au
[pos
] == 0) {
403 if (release_units
->empty() || next_offs
!= pos
* au_size
) {
404 release_units
->emplace_back(pos
* au_size
, au_size
);
406 release_units
->back().length
+= au_size
;
408 next_offs
+= au_size
;
411 maybe_empty
= false; // micro optimization detecting we aren't empty
412 // even in the affected extent
416 bool empty
= maybe_empty
? !is_not_empty() : false;
417 if (empty
&& release_units
) {
418 release_units
->clear();
423 bool bluestore_blob_use_tracker_t::can_split() const
428 bool bluestore_blob_use_tracker_t::can_split_at(uint32_t blob_offset
) const
431 return (blob_offset
% au_size
) == 0 &&
432 blob_offset
< num_au
* au_size
;
435 void bluestore_blob_use_tracker_t::split(
436 uint32_t blob_offset
,
437 bluestore_blob_use_tracker_t
* r
)
441 assert(can_split_at(blob_offset
));
442 assert(r
->is_empty());
444 uint32_t new_num_au
= blob_offset
/ au_size
;
445 r
->init( (num_au
- new_num_au
) * au_size
, au_size
);
447 for (auto i
= new_num_au
; i
< num_au
; i
++) {
448 r
->get((i
- new_num_au
) * au_size
, bytes_per_au
[i
]);
451 if (new_num_au
== 0) {
453 } else if (new_num_au
== 1) {
454 uint32_t tmp
= bytes_per_au
[0];
455 uint32_t _au_size
= au_size
;
464 bool bluestore_blob_use_tracker_t::equal(
465 const bluestore_blob_use_tracker_t
& other
) const
467 if (!num_au
&& !other
.num_au
) {
468 return total_bytes
== other
.total_bytes
&& au_size
== other
.au_size
;
469 } else if (num_au
&& other
.num_au
) {
470 if (num_au
!= other
.num_au
|| au_size
!= other
.au_size
) {
473 for (size_t i
= 0; i
< num_au
; i
++) {
474 if (bytes_per_au
[i
] != other
.bytes_per_au
[i
]) {
481 uint32_t n
= num_au
? num_au
: other
.num_au
;
482 uint32_t referenced
=
483 num_au
? other
.get_referenced_bytes() : get_referenced_bytes();
484 auto bytes_per_au_tmp
= num_au
? bytes_per_au
: other
.bytes_per_au
;
485 uint32_t my_referenced
= 0;
486 for (size_t i
= 0; i
< n
; i
++) {
487 my_referenced
+= bytes_per_au_tmp
[i
];
488 if (my_referenced
> referenced
) {
492 return my_referenced
== referenced
;
495 void bluestore_blob_use_tracker_t::dump(Formatter
*f
) const
497 f
->dump_unsigned("num_au", num_au
);
498 f
->dump_unsigned("au_size", au_size
);
500 f
->dump_unsigned("total_bytes", total_bytes
);
502 f
->open_array_section("bytes_per_au");
503 for (size_t i
= 0; i
< num_au
; ++i
) {
504 f
->dump_unsigned("", bytes_per_au
[i
]);
510 void bluestore_blob_use_tracker_t::generate_test_instances(
511 list
<bluestore_blob_use_tracker_t
*>& o
)
513 o
.push_back(new bluestore_blob_use_tracker_t());
514 o
.back()->init(16, 16);
515 o
.back()->get(10, 10);
516 o
.back()->get(10, 5);
517 o
.push_back(new bluestore_blob_use_tracker_t());
518 o
.back()->init(60, 16);
519 o
.back()->get(18, 22);
520 o
.back()->get(20, 20);
521 o
.back()->get(15, 20);
524 ostream
& operator<<(ostream
& out
, const bluestore_blob_use_tracker_t
& m
)
526 out
<< "use_tracker(" << std::hex
;
528 out
<< "0x" << m
.au_size
530 << "0x" << m
.total_bytes
;
532 out
<< "0x" << m
.num_au
533 << "*0x" << m
.au_size
535 for (size_t i
= 0; i
< m
.num_au
; ++i
) {
538 out
<< m
.bytes_per_au
[i
];
542 out
<< std::dec
<< ")";
546 // bluestore_pextent_t
548 void bluestore_pextent_t::dump(Formatter
*f
) const
550 f
->dump_unsigned("offset", offset
);
551 f
->dump_unsigned("length", length
);
554 ostream
& operator<<(ostream
& out
, const bluestore_pextent_t
& o
) {
556 return out
<< "0x" << std::hex
<< o
.offset
<< "~" << o
.length
<< std::dec
;
558 return out
<< "!~" << std::hex
<< o
.length
<< std::dec
;
561 void bluestore_pextent_t::generate_test_instances(list
<bluestore_pextent_t
*>& ls
)
563 ls
.push_back(new bluestore_pextent_t
);
564 ls
.push_back(new bluestore_pextent_t(1, 2));
569 string
bluestore_blob_t::get_flags_string(unsigned flags
)
572 if (flags
& FLAG_MUTABLE
) {
575 if (flags
& FLAG_COMPRESSED
) {
580 if (flags
& FLAG_CSUM
) {
585 if (flags
& FLAG_HAS_UNUSED
) {
590 if (flags
& FLAG_SHARED
) {
599 size_t bluestore_blob_t::get_csum_value_size() const
601 return Checksummer::get_csum_value_size(csum_type
);
604 void bluestore_blob_t::dump(Formatter
*f
) const
606 f
->open_array_section("extents");
607 for (auto& p
: extents
) {
608 f
->dump_object("extent", p
);
611 f
->dump_unsigned("logical_length", logical_length
);
612 f
->dump_unsigned("compressed_length", compressed_length
);
613 f
->dump_unsigned("flags", flags
);
614 f
->dump_unsigned("csum_type", csum_type
);
615 f
->dump_unsigned("csum_chunk_order", csum_chunk_order
);
616 f
->open_array_section("csum_data");
617 size_t n
= get_csum_count();
618 for (unsigned i
= 0; i
< n
; ++i
)
619 f
->dump_unsigned("csum", get_csum_item(i
));
621 f
->dump_unsigned("unused", unused
);
624 void bluestore_blob_t::generate_test_instances(list
<bluestore_blob_t
*>& ls
)
626 ls
.push_back(new bluestore_blob_t
);
627 ls
.push_back(new bluestore_blob_t(0));
628 ls
.push_back(new bluestore_blob_t
);
629 ls
.back()->allocated_test(bluestore_pextent_t(111, 222));
630 ls
.push_back(new bluestore_blob_t
);
631 ls
.back()->init_csum(Checksummer::CSUM_XXHASH32
, 16, 65536);
632 ls
.back()->csum_data
= buffer::claim_malloc(4, strdup("abcd"));
633 ls
.back()->add_unused(0, 3);
634 ls
.back()->add_unused(8, 8);
635 ls
.back()->allocated_test(bluestore_pextent_t(0x40100000, 0x10000));
636 ls
.back()->allocated_test(
637 bluestore_pextent_t(bluestore_pextent_t::INVALID_OFFSET
, 0x1000));
638 ls
.back()->allocated_test(bluestore_pextent_t(0x40120000, 0x10000));
641 ostream
& operator<<(ostream
& out
, const bluestore_blob_t
& o
)
643 out
<< "blob(" << o
.get_extents();
644 if (o
.is_compressed()) {
645 out
<< " clen 0x" << std::hex
646 << o
.get_logical_length()
648 << o
.get_compressed_payload_length()
652 out
<< " " << o
.get_flags_string();
655 out
<< " " << Checksummer::get_csum_type_string(o
.csum_type
)
656 << "/0x" << std::hex
<< (1ull << o
.csum_chunk_order
) << std::dec
;
659 out
<< " unused=0x" << std::hex
<< o
.unused
<< std::dec
;
664 void bluestore_blob_t::calc_csum(uint64_t b_off
, const bufferlist
& bl
)
667 case Checksummer::CSUM_XXHASH32
:
668 Checksummer::calculate
<Checksummer::xxhash32
>(
669 get_csum_chunk_size(), b_off
, bl
.length(), bl
, &csum_data
);
671 case Checksummer::CSUM_XXHASH64
:
672 Checksummer::calculate
<Checksummer::xxhash64
>(
673 get_csum_chunk_size(), b_off
, bl
.length(), bl
, &csum_data
);
675 case Checksummer::CSUM_CRC32C
:
676 Checksummer::calculate
<Checksummer::crc32c
>(
677 get_csum_chunk_size(), b_off
, bl
.length(), bl
, &csum_data
);
679 case Checksummer::CSUM_CRC32C_16
:
680 Checksummer::calculate
<Checksummer::crc32c_16
>(
681 get_csum_chunk_size(), b_off
, bl
.length(), bl
, &csum_data
);
683 case Checksummer::CSUM_CRC32C_8
:
684 Checksummer::calculate
<Checksummer::crc32c_8
>(
685 get_csum_chunk_size(), b_off
, bl
.length(), bl
, &csum_data
);
690 int bluestore_blob_t::verify_csum(uint64_t b_off
, const bufferlist
& bl
,
691 int* b_bad_off
, uint64_t *bad_csum
) const
697 case Checksummer::CSUM_NONE
:
699 case Checksummer::CSUM_XXHASH32
:
700 *b_bad_off
= Checksummer::verify
<Checksummer::xxhash32
>(
701 get_csum_chunk_size(), b_off
, bl
.length(), bl
, csum_data
, bad_csum
);
703 case Checksummer::CSUM_XXHASH64
:
704 *b_bad_off
= Checksummer::verify
<Checksummer::xxhash64
>(
705 get_csum_chunk_size(), b_off
, bl
.length(), bl
, csum_data
, bad_csum
);
707 case Checksummer::CSUM_CRC32C
:
708 *b_bad_off
= Checksummer::verify
<Checksummer::crc32c
>(
709 get_csum_chunk_size(), b_off
, bl
.length(), bl
, csum_data
, bad_csum
);
711 case Checksummer::CSUM_CRC32C_16
:
712 *b_bad_off
= Checksummer::verify
<Checksummer::crc32c_16
>(
713 get_csum_chunk_size(), b_off
, bl
.length(), bl
, csum_data
, bad_csum
);
715 case Checksummer::CSUM_CRC32C_8
:
716 *b_bad_off
= Checksummer::verify
<Checksummer::crc32c_8
>(
717 get_csum_chunk_size(), b_off
, bl
.length(), bl
, csum_data
, bad_csum
);
726 else if (*b_bad_off
>= 0)
727 return -1; // bad checksum
732 void bluestore_blob_t::allocated(uint32_t b_off
, uint32_t length
, const AllocExtentVector
& allocs
)
734 if (extents
.size() == 0) {
735 // if blob is compressed then logical length to be already configured
736 // otherwise - to be unset.
737 assert((is_compressed() && logical_length
!= 0) ||
738 (!is_compressed() && logical_length
== 0));
740 extents
.reserve(allocs
.size() + (b_off
? 1 : 0));
742 extents
.emplace_back(
743 bluestore_pextent_t(bluestore_pextent_t::INVALID_OFFSET
, b_off
));
745 uint32_t new_len
= b_off
;
746 for (auto& a
: allocs
) {
747 extents
.emplace_back(a
.offset
, a
.length
);
750 if (!is_compressed()) {
751 logical_length
= new_len
;
754 assert(!is_compressed()); // partial allocations are forbidden when
756 assert(b_off
< logical_length
);
757 uint32_t cur_offs
= 0;
758 auto start_it
= extents
.begin();
761 if (cur_offs
+ start_it
->length
> b_off
) {
764 cur_offs
+= start_it
->length
;
768 uint32_t head
= b_off
- cur_offs
;
769 uint32_t end_off
= b_off
+ length
;
770 auto end_it
= start_it
;
773 assert(!end_it
->is_valid());
774 if (cur_offs
+ end_it
->length
>= end_off
) {
777 cur_offs
+= end_it
->length
;
780 assert(cur_offs
+ end_it
->length
>= end_off
);
781 uint32_t tail
= cur_offs
+ end_it
->length
- end_off
;
783 start_it
= extents
.erase(start_it
, end_it
+ 1);
784 size_t count
= allocs
.size();
785 count
+= head
? 1 : 0;
786 count
+= tail
? 1 : 0;
787 extents
.insert(start_it
,
790 bluestore_pextent_t::INVALID_OFFSET
, 0));
792 // Workaround to resolve lack of proper iterator return in vector::insert
793 // Looks like some gcc/stl implementations still lack it despite c++11
795 start_it
= extents
.begin() + pos
;
798 start_it
->length
= head
;
801 for(auto& e
: allocs
) {
806 start_it
->length
= tail
;
811 // cut it out of extents
814 uint64_t invalid
= 0;
816 void add_invalid(uint64_t length
) {
821 v
.emplace_back(bluestore_pextent_t(bluestore_pextent_t::INVALID_OFFSET
,
826 void add(uint64_t offset
, uint64_t length
) {
827 if (offset
== bluestore_pextent_t::INVALID_OFFSET
) {
832 v
.emplace_back(bluestore_pextent_t(offset
, length
));
837 void bluestore_blob_t::allocated_test(const bluestore_pextent_t
& alloc
)
839 extents
.emplace_back(alloc
);
840 if (!is_compressed()) {
841 logical_length
+= alloc
.length
;
845 bool bluestore_blob_t::release_extents(bool all
,
846 const PExtentVector
& logical
,
849 // common case: all of it?
852 for (auto& e
: extents
) {
858 assert(is_compressed() || get_logical_length() == pos
);
860 extents
[0].offset
= bluestore_pextent_t::INVALID_OFFSET
;
861 extents
[0].length
= pos
;
864 // remove from pextents according to logical release list
866 auto loffs_it
= logical
.begin();
867 auto lend
= logical
.end();
868 uint32_t pext_loffs_start
= 0; //starting loffset of the current pextent
869 uint32_t pext_loffs
= 0; //current loffset
870 auto pext_it
= extents
.begin();
871 auto pext_end
= extents
.end();
872 while (pext_it
!= pext_end
) {
873 if (loffs_it
== lend
||
874 pext_loffs_start
+ pext_it
->length
<= loffs_it
->offset
) {
875 int delta0
= pext_loffs
- pext_loffs_start
;
877 if ((uint32_t)delta0
< pext_it
->length
) {
878 vb
.add(pext_it
->offset
+ delta0
, pext_it
->length
- delta0
);
880 pext_loffs_start
+= pext_it
->length
;
881 pext_loffs
= pext_loffs_start
;
885 //assert(pext_loffs == pext_loffs_start);
886 int delta0
= pext_loffs
- pext_loffs_start
;
889 int delta
= loffs_it
->offset
- pext_loffs
;
892 vb
.add(pext_it
->offset
+ delta0
, delta
);
896 PExtentVector::iterator last_r
= r
->end();
897 if (r
->begin() != last_r
) {
900 uint32_t to_release
= loffs_it
->length
;
902 uint32_t to_release_part
=
903 MIN(pext_it
->length
- delta0
- delta
, to_release
);
904 auto o
= pext_it
->offset
+ delta0
+ delta
;
905 if (last_r
!= r
->end() && last_r
->offset
+ last_r
->length
== o
) {
906 last_r
->length
+= to_release_part
;
909 last_r
= r
->emplace(r
->end(), o
, to_release_part
);
911 to_release
-= to_release_part
;
912 pext_loffs
+= to_release_part
;
913 if (pext_loffs
== pext_loffs_start
+ pext_it
->length
) {
914 pext_loffs_start
+= pext_it
->length
;
915 pext_loffs
= pext_loffs_start
;
919 } while (to_release
> 0 && pext_it
!= pext_end
);
920 vb
.add_invalid(loffs_it
->length
- to_release
);
929 void bluestore_blob_t::split(uint32_t blob_offset
, bluestore_blob_t
& rb
)
931 size_t left
= blob_offset
;
932 uint32_t llen_lb
= 0;
933 uint32_t llen_rb
= 0;
935 for (auto p
= extents
.begin(); p
!= extents
.end(); ++p
, ++i
) {
936 if (p
->length
<= left
) {
938 llen_lb
+= p
->length
;
943 rb
.extents
.emplace_back(bluestore_pextent_t(p
->offset
+ left
,
947 rb
.extents
.emplace_back(bluestore_pextent_t(
948 bluestore_pextent_t::INVALID_OFFSET
,
951 llen_rb
+= p
->length
- left
;
957 while (p
!= extents
.end()) {
958 llen_rb
+= p
->length
;
959 rb
.extents
.push_back(*p
++);
962 logical_length
= llen_lb
;
963 rb
.logical_length
= llen_rb
;
969 rb
.csum_type
= csum_type
;
970 rb
.csum_chunk_order
= csum_chunk_order
;
971 size_t csum_order
= get_csum_chunk_size();
972 assert(blob_offset
% csum_order
== 0);
973 size_t pos
= (blob_offset
/ csum_order
) * get_csum_value_size();
974 // deep copy csum data
977 rb
.csum_data
= bufferptr(old
.c_str() + pos
, old
.length() - pos
);
978 csum_data
= bufferptr(old
.c_str(), pos
);
982 // bluestore_shared_blob_t
984 void bluestore_shared_blob_t::dump(Formatter
*f
) const
986 f
->dump_int("sbid", sbid
);
987 f
->dump_object("ref_map", ref_map
);
990 void bluestore_shared_blob_t::generate_test_instances(
991 list
<bluestore_shared_blob_t
*>& ls
)
993 ls
.push_back(new bluestore_shared_blob_t(1));
996 ostream
& operator<<(ostream
& out
, const bluestore_shared_blob_t
& sb
)
998 out
<< " sbid 0x" << std::hex
<< sb
.sbid
<< std::dec
;
999 out
<< " ref_map(" << sb
.ref_map
<< ")";
1003 // bluestore_onode_t
1005 void bluestore_onode_t::shard_info::dump(Formatter
*f
) const
1007 f
->dump_unsigned("offset", offset
);
1008 f
->dump_unsigned("bytes", bytes
);
1011 ostream
& operator<<(ostream
& out
, const bluestore_onode_t::shard_info
& si
)
1013 return out
<< std::hex
<< "0x" << si
.offset
<< "(0x" << si
.bytes
<< " bytes"
1017 void bluestore_onode_t::dump(Formatter
*f
) const
1019 f
->dump_unsigned("nid", nid
);
1020 f
->dump_unsigned("size", size
);
1021 f
->open_object_section("attrs");
1022 for (auto p
= attrs
.begin(); p
!= attrs
.end(); ++p
) {
1023 f
->open_object_section("attr");
1024 f
->dump_string("name", p
->first
.c_str()); // it's not quite std::string
1025 f
->dump_unsigned("len", p
->second
.length());
1029 f
->dump_string("flags", get_flags_string());
1030 f
->open_array_section("extent_map_shards");
1031 for (auto si
: extent_map_shards
) {
1032 f
->dump_object("shard", si
);
1035 f
->dump_unsigned("expected_object_size", expected_object_size
);
1036 f
->dump_unsigned("expected_write_size", expected_write_size
);
1037 f
->dump_unsigned("alloc_hint_flags", alloc_hint_flags
);
1040 void bluestore_onode_t::generate_test_instances(list
<bluestore_onode_t
*>& o
)
1042 o
.push_back(new bluestore_onode_t());
1046 // bluestore_deferred_op_t
1048 void bluestore_deferred_op_t::dump(Formatter
*f
) const
1050 f
->dump_unsigned("op", (int)op
);
1051 f
->dump_unsigned("data_len", data
.length());
1052 f
->open_array_section("extents");
1053 for (auto& e
: extents
) {
1054 f
->dump_object("extent", e
);
1059 void bluestore_deferred_op_t::generate_test_instances(list
<bluestore_deferred_op_t
*>& o
)
1061 o
.push_back(new bluestore_deferred_op_t
);
1062 o
.push_back(new bluestore_deferred_op_t
);
1063 o
.back()->op
= OP_WRITE
;
1064 o
.back()->extents
.push_back(bluestore_pextent_t(1, 2));
1065 o
.back()->extents
.push_back(bluestore_pextent_t(100, 5));
1066 o
.back()->data
.append("my data");
1069 void bluestore_deferred_transaction_t::dump(Formatter
*f
) const
1071 f
->dump_unsigned("seq", seq
);
1072 f
->open_array_section("ops");
1073 for (list
<bluestore_deferred_op_t
>::const_iterator p
= ops
.begin(); p
!= ops
.end(); ++p
) {
1074 f
->dump_object("op", *p
);
1078 f
->open_array_section("released extents");
1079 for (interval_set
<uint64_t>::const_iterator p
= released
.begin(); p
!= released
.end(); ++p
) {
1080 f
->open_object_section("extent");
1081 f
->dump_unsigned("offset", p
.get_start());
1082 f
->dump_unsigned("length", p
.get_len());
1088 void bluestore_deferred_transaction_t::generate_test_instances(list
<bluestore_deferred_transaction_t
*>& o
)
1090 o
.push_back(new bluestore_deferred_transaction_t());
1091 o
.push_back(new bluestore_deferred_transaction_t());
1092 o
.back()->seq
= 123;
1093 o
.back()->ops
.push_back(bluestore_deferred_op_t());
1094 o
.back()->ops
.push_back(bluestore_deferred_op_t());
1095 o
.back()->ops
.back().op
= bluestore_deferred_op_t::OP_WRITE
;
1096 o
.back()->ops
.back().extents
.push_back(bluestore_pextent_t(1,7));
1097 o
.back()->ops
.back().data
.append("foodata");
1100 void bluestore_compression_header_t::dump(Formatter
*f
) const
1102 f
->dump_unsigned("type", type
);
1103 f
->dump_unsigned("length", length
);
1106 void bluestore_compression_header_t::generate_test_instances(
1107 list
<bluestore_compression_header_t
*>& o
)
1109 o
.push_back(new bluestore_compression_header_t
);
1110 o
.push_back(new bluestore_compression_header_t(1));
1111 o
.back()->length
= 1234;