]> git.proxmox.com Git - ceph.git/blame - ceph/src/rocksdb/util/arena_test.cc
update sources to ceph Nautilus 14.2.1
[ceph.git] / ceph / src / rocksdb / util / arena_test.cc
CommitLineData
7c673cae 1// Copyright (c) 2011-present, Facebook, Inc. All rights reserved.
11fdf7f2
TL
2// This source code is licensed under both the GPLv2 (found in the
3// COPYING file in the root directory) and Apache 2.0 License
4// (found in the LICENSE.Apache file in the root directory).
7c673cae
FG
5//
6// Copyright (c) 2011 The LevelDB Authors. All rights reserved.
7// Use of this source code is governed by a BSD-style license that can be
8// found in the LICENSE file. See the AUTHORS file for names of contributors.
9
10#include "util/arena.h"
11#include "util/random.h"
12#include "util/testharness.h"
13
14namespace rocksdb {
15
16namespace {
17const size_t kHugePageSize = 2 * 1024 * 1024;
18} // namespace
19class ArenaTest : public testing::Test {};
20
21TEST_F(ArenaTest, Empty) { Arena arena0; }
22
23namespace {
24bool CheckMemoryAllocated(size_t allocated, size_t expected) {
25 // The value returned by Arena::MemoryAllocatedBytes() may be greater than
26 // the requested memory. We choose a somewhat arbitrary upper bound of
27 // max_expected = expected * 1.1 to detect critical overallocation.
28 size_t max_expected = expected + expected / 10;
29 return allocated >= expected && allocated <= max_expected;
30}
31
32void MemoryAllocatedBytesTest(size_t huge_page_size) {
33 const int N = 17;
34 size_t req_sz; // requested size
35 size_t bsz = 32 * 1024; // block size
36 size_t expected_memory_allocated;
37
11fdf7f2 38 Arena arena(bsz, nullptr, huge_page_size);
7c673cae
FG
39
40 // requested size > quarter of a block:
41 // allocate requested size separately
42 req_sz = 12 * 1024;
43 for (int i = 0; i < N; i++) {
44 arena.Allocate(req_sz);
45 }
46 expected_memory_allocated = req_sz * N + Arena::kInlineSize;
47 ASSERT_PRED2(CheckMemoryAllocated, arena.MemoryAllocatedBytes(),
48 expected_memory_allocated);
49
50 arena.Allocate(Arena::kInlineSize - 1);
51
52 // requested size < quarter of a block:
53 // allocate a block with the default size, then try to use unused part
54 // of the block. So one new block will be allocated for the first
55 // Allocate(99) call. All the remaining calls won't lead to new allocation.
56 req_sz = 99;
57 for (int i = 0; i < N; i++) {
58 arena.Allocate(req_sz);
59 }
60 if (huge_page_size) {
61 ASSERT_TRUE(
62 CheckMemoryAllocated(arena.MemoryAllocatedBytes(),
63 expected_memory_allocated + bsz) ||
64 CheckMemoryAllocated(arena.MemoryAllocatedBytes(),
65 expected_memory_allocated + huge_page_size));
66 } else {
67 expected_memory_allocated += bsz;
68 ASSERT_PRED2(CheckMemoryAllocated, arena.MemoryAllocatedBytes(),
69 expected_memory_allocated);
70 }
71
72 // requested size > size of a block:
73 // allocate requested size separately
74 expected_memory_allocated = arena.MemoryAllocatedBytes();
75 req_sz = 8 * 1024 * 1024;
76 for (int i = 0; i < N; i++) {
77 arena.Allocate(req_sz);
78 }
79 expected_memory_allocated += req_sz * N;
80 ASSERT_PRED2(CheckMemoryAllocated, arena.MemoryAllocatedBytes(),
81 expected_memory_allocated);
82}
83
84// Make sure we didn't count the allocate but not used memory space in
85// Arena::ApproximateMemoryUsage()
86static void ApproximateMemoryUsageTest(size_t huge_page_size) {
87 const size_t kBlockSize = 4096;
88 const size_t kEntrySize = kBlockSize / 8;
89 const size_t kZero = 0;
11fdf7f2 90 Arena arena(kBlockSize, nullptr, huge_page_size);
7c673cae
FG
91 ASSERT_EQ(kZero, arena.ApproximateMemoryUsage());
92
93 // allocate inline bytes
11fdf7f2
TL
94 const size_t kAlignUnit = alignof(max_align_t);
95 EXPECT_TRUE(arena.IsInInlineBlock());
96 arena.AllocateAligned(kAlignUnit);
97 EXPECT_TRUE(arena.IsInInlineBlock());
98 arena.AllocateAligned(Arena::kInlineSize / 2 - (2 * kAlignUnit));
99 EXPECT_TRUE(arena.IsInInlineBlock());
7c673cae 100 arena.AllocateAligned(Arena::kInlineSize / 2);
11fdf7f2
TL
101 EXPECT_TRUE(arena.IsInInlineBlock());
102 ASSERT_EQ(arena.ApproximateMemoryUsage(), Arena::kInlineSize - kAlignUnit);
7c673cae
FG
103 ASSERT_PRED2(CheckMemoryAllocated, arena.MemoryAllocatedBytes(),
104 Arena::kInlineSize);
105
106 auto num_blocks = kBlockSize / kEntrySize;
107
108 // first allocation
109 arena.AllocateAligned(kEntrySize);
11fdf7f2 110 EXPECT_FALSE(arena.IsInInlineBlock());
7c673cae
FG
111 auto mem_usage = arena.MemoryAllocatedBytes();
112 if (huge_page_size) {
113 ASSERT_TRUE(
114 CheckMemoryAllocated(mem_usage, kBlockSize + Arena::kInlineSize) ||
115 CheckMemoryAllocated(mem_usage, huge_page_size + Arena::kInlineSize));
116 } else {
117 ASSERT_PRED2(CheckMemoryAllocated, mem_usage,
118 kBlockSize + Arena::kInlineSize);
119 }
120 auto usage = arena.ApproximateMemoryUsage();
121 ASSERT_LT(usage, mem_usage);
122 for (size_t i = 1; i < num_blocks; ++i) {
123 arena.AllocateAligned(kEntrySize);
124 ASSERT_EQ(mem_usage, arena.MemoryAllocatedBytes());
125 ASSERT_EQ(arena.ApproximateMemoryUsage(), usage + kEntrySize);
11fdf7f2 126 EXPECT_FALSE(arena.IsInInlineBlock());
7c673cae
FG
127 usage = arena.ApproximateMemoryUsage();
128 }
129 if (huge_page_size) {
130 ASSERT_TRUE(usage > mem_usage ||
131 usage + huge_page_size - kBlockSize == mem_usage);
132 } else {
133 ASSERT_GT(usage, mem_usage);
134 }
135}
136
137static void SimpleTest(size_t huge_page_size) {
138 std::vector<std::pair<size_t, char*>> allocated;
11fdf7f2 139 Arena arena(Arena::kMinBlockSize, nullptr, huge_page_size);
7c673cae
FG
140 const int N = 100000;
141 size_t bytes = 0;
142 Random rnd(301);
143 for (int i = 0; i < N; i++) {
144 size_t s;
145 if (i % (N / 10) == 0) {
146 s = i;
147 } else {
148 s = rnd.OneIn(4000)
149 ? rnd.Uniform(6000)
150 : (rnd.OneIn(10) ? rnd.Uniform(100) : rnd.Uniform(20));
151 }
152 if (s == 0) {
153 // Our arena disallows size 0 allocations.
154 s = 1;
155 }
156 char* r;
157 if (rnd.OneIn(10)) {
158 r = arena.AllocateAligned(s);
159 } else {
160 r = arena.Allocate(s);
161 }
162
163 for (unsigned int b = 0; b < s; b++) {
164 // Fill the "i"th allocation with a known bit pattern
165 r[b] = i % 256;
166 }
167 bytes += s;
168 allocated.push_back(std::make_pair(s, r));
169 ASSERT_GE(arena.ApproximateMemoryUsage(), bytes);
170 if (i > N / 10) {
171 ASSERT_LE(arena.ApproximateMemoryUsage(), bytes * 1.10);
172 }
173 }
174 for (unsigned int i = 0; i < allocated.size(); i++) {
175 size_t num_bytes = allocated[i].first;
176 const char* p = allocated[i].second;
177 for (unsigned int b = 0; b < num_bytes; b++) {
178 // Check the "i"th allocation for the known bit pattern
179 ASSERT_EQ(int(p[b]) & 0xff, (int)(i % 256));
180 }
181 }
182}
183} // namespace
184
185TEST_F(ArenaTest, MemoryAllocatedBytes) {
186 MemoryAllocatedBytesTest(0);
187 MemoryAllocatedBytesTest(kHugePageSize);
188}
189
190TEST_F(ArenaTest, ApproximateMemoryUsage) {
191 ApproximateMemoryUsageTest(0);
192 ApproximateMemoryUsageTest(kHugePageSize);
193}
194
195TEST_F(ArenaTest, Simple) {
196 SimpleTest(0);
197 SimpleTest(kHugePageSize);
198}
199} // namespace rocksdb
200
201int main(int argc, char** argv) {
202 ::testing::InitGoogleTest(&argc, argv);
203 return RUN_ALL_TESTS();
204}