]> git.proxmox.com Git - rustc.git/blob - src/jemalloc/test/unit/prof_reset.c
New upstream version 1.22.1+dfsg1
[rustc.git] / src / jemalloc / test / unit / prof_reset.c
1 #include "test/jemalloc_test.h"
2
3 static int
4 prof_dump_open_intercept(bool propagate_err, const char *filename)
5 {
6 int fd;
7
8 fd = open("/dev/null", O_WRONLY);
9 assert_d_ne(fd, -1, "Unexpected open() failure");
10
11 return (fd);
12 }
13
14 static void
15 set_prof_active(bool active)
16 {
17
18 assert_d_eq(mallctl("prof.active", NULL, NULL, (void *)&active,
19 sizeof(active)), 0, "Unexpected mallctl failure");
20 }
21
22 static size_t
23 get_lg_prof_sample(void)
24 {
25 size_t lg_prof_sample;
26 size_t sz = sizeof(size_t);
27
28 assert_d_eq(mallctl("prof.lg_sample", (void *)&lg_prof_sample, &sz,
29 NULL, 0), 0,
30 "Unexpected mallctl failure while reading profiling sample rate");
31 return (lg_prof_sample);
32 }
33
34 static void
35 do_prof_reset(size_t lg_prof_sample)
36 {
37 assert_d_eq(mallctl("prof.reset", NULL, NULL,
38 (void *)&lg_prof_sample, sizeof(size_t)), 0,
39 "Unexpected mallctl failure while resetting profile data");
40 assert_zu_eq(lg_prof_sample, get_lg_prof_sample(),
41 "Expected profile sample rate change");
42 }
43
44 TEST_BEGIN(test_prof_reset_basic)
45 {
46 size_t lg_prof_sample_orig, lg_prof_sample, lg_prof_sample_next;
47 size_t sz;
48 unsigned i;
49
50 test_skip_if(!config_prof);
51
52 sz = sizeof(size_t);
53 assert_d_eq(mallctl("opt.lg_prof_sample", (void *)&lg_prof_sample_orig,
54 &sz, NULL, 0), 0,
55 "Unexpected mallctl failure while reading profiling sample rate");
56 assert_zu_eq(lg_prof_sample_orig, 0,
57 "Unexpected profiling sample rate");
58 lg_prof_sample = get_lg_prof_sample();
59 assert_zu_eq(lg_prof_sample_orig, lg_prof_sample,
60 "Unexpected disagreement between \"opt.lg_prof_sample\" and "
61 "\"prof.lg_sample\"");
62
63 /* Test simple resets. */
64 for (i = 0; i < 2; i++) {
65 assert_d_eq(mallctl("prof.reset", NULL, NULL, NULL, 0), 0,
66 "Unexpected mallctl failure while resetting profile data");
67 lg_prof_sample = get_lg_prof_sample();
68 assert_zu_eq(lg_prof_sample_orig, lg_prof_sample,
69 "Unexpected profile sample rate change");
70 }
71
72 /* Test resets with prof.lg_sample changes. */
73 lg_prof_sample_next = 1;
74 for (i = 0; i < 2; i++) {
75 do_prof_reset(lg_prof_sample_next);
76 lg_prof_sample = get_lg_prof_sample();
77 assert_zu_eq(lg_prof_sample, lg_prof_sample_next,
78 "Expected profile sample rate change");
79 lg_prof_sample_next = lg_prof_sample_orig;
80 }
81
82 /* Make sure the test code restored prof.lg_sample. */
83 lg_prof_sample = get_lg_prof_sample();
84 assert_zu_eq(lg_prof_sample_orig, lg_prof_sample,
85 "Unexpected disagreement between \"opt.lg_prof_sample\" and "
86 "\"prof.lg_sample\"");
87 }
88 TEST_END
89
90 bool prof_dump_header_intercepted = false;
91 prof_cnt_t cnt_all_copy = {0, 0, 0, 0};
92 static bool
93 prof_dump_header_intercept(tsdn_t *tsdn, bool propagate_err,
94 const prof_cnt_t *cnt_all)
95 {
96
97 prof_dump_header_intercepted = true;
98 memcpy(&cnt_all_copy, cnt_all, sizeof(prof_cnt_t));
99
100 return (false);
101 }
102
103 TEST_BEGIN(test_prof_reset_cleanup)
104 {
105 void *p;
106 prof_dump_header_t *prof_dump_header_orig;
107
108 test_skip_if(!config_prof);
109
110 set_prof_active(true);
111
112 assert_zu_eq(prof_bt_count(), 0, "Expected 0 backtraces");
113 p = mallocx(1, 0);
114 assert_ptr_not_null(p, "Unexpected mallocx() failure");
115 assert_zu_eq(prof_bt_count(), 1, "Expected 1 backtrace");
116
117 prof_dump_header_orig = prof_dump_header;
118 prof_dump_header = prof_dump_header_intercept;
119 assert_false(prof_dump_header_intercepted, "Unexpected intercept");
120
121 assert_d_eq(mallctl("prof.dump", NULL, NULL, NULL, 0),
122 0, "Unexpected error while dumping heap profile");
123 assert_true(prof_dump_header_intercepted, "Expected intercept");
124 assert_u64_eq(cnt_all_copy.curobjs, 1, "Expected 1 allocation");
125
126 assert_d_eq(mallctl("prof.reset", NULL, NULL, NULL, 0), 0,
127 "Unexpected error while resetting heap profile data");
128 assert_d_eq(mallctl("prof.dump", NULL, NULL, NULL, 0),
129 0, "Unexpected error while dumping heap profile");
130 assert_u64_eq(cnt_all_copy.curobjs, 0, "Expected 0 allocations");
131 assert_zu_eq(prof_bt_count(), 1, "Expected 1 backtrace");
132
133 prof_dump_header = prof_dump_header_orig;
134
135 dallocx(p, 0);
136 assert_zu_eq(prof_bt_count(), 0, "Expected 0 backtraces");
137
138 set_prof_active(false);
139 }
140 TEST_END
141
142 #define NTHREADS 4
143 #define NALLOCS_PER_THREAD (1U << 13)
144 #define OBJ_RING_BUF_COUNT 1531
145 #define RESET_INTERVAL (1U << 10)
146 #define DUMP_INTERVAL 3677
147 static void *
148 thd_start(void *varg)
149 {
150 unsigned thd_ind = *(unsigned *)varg;
151 unsigned i;
152 void *objs[OBJ_RING_BUF_COUNT];
153
154 memset(objs, 0, sizeof(objs));
155
156 for (i = 0; i < NALLOCS_PER_THREAD; i++) {
157 if (i % RESET_INTERVAL == 0) {
158 assert_d_eq(mallctl("prof.reset", NULL, NULL, NULL, 0),
159 0, "Unexpected error while resetting heap profile "
160 "data");
161 }
162
163 if (i % DUMP_INTERVAL == 0) {
164 assert_d_eq(mallctl("prof.dump", NULL, NULL, NULL, 0),
165 0, "Unexpected error while dumping heap profile");
166 }
167
168 {
169 void **pp = &objs[i % OBJ_RING_BUF_COUNT];
170 if (*pp != NULL) {
171 dallocx(*pp, 0);
172 *pp = NULL;
173 }
174 *pp = btalloc(1, thd_ind*NALLOCS_PER_THREAD + i);
175 assert_ptr_not_null(*pp,
176 "Unexpected btalloc() failure");
177 }
178 }
179
180 /* Clean up any remaining objects. */
181 for (i = 0; i < OBJ_RING_BUF_COUNT; i++) {
182 void **pp = &objs[i % OBJ_RING_BUF_COUNT];
183 if (*pp != NULL) {
184 dallocx(*pp, 0);
185 *pp = NULL;
186 }
187 }
188
189 return (NULL);
190 }
191
192 TEST_BEGIN(test_prof_reset)
193 {
194 size_t lg_prof_sample_orig;
195 thd_t thds[NTHREADS];
196 unsigned thd_args[NTHREADS];
197 unsigned i;
198 size_t bt_count, tdata_count;
199
200 test_skip_if(!config_prof);
201
202 bt_count = prof_bt_count();
203 assert_zu_eq(bt_count, 0,
204 "Unexpected pre-existing tdata structures");
205 tdata_count = prof_tdata_count();
206
207 lg_prof_sample_orig = get_lg_prof_sample();
208 do_prof_reset(5);
209
210 set_prof_active(true);
211
212 for (i = 0; i < NTHREADS; i++) {
213 thd_args[i] = i;
214 thd_create(&thds[i], thd_start, (void *)&thd_args[i]);
215 }
216 for (i = 0; i < NTHREADS; i++)
217 thd_join(thds[i], NULL);
218
219 assert_zu_eq(prof_bt_count(), bt_count,
220 "Unexpected bactrace count change");
221 assert_zu_eq(prof_tdata_count(), tdata_count,
222 "Unexpected remaining tdata structures");
223
224 set_prof_active(false);
225
226 do_prof_reset(lg_prof_sample_orig);
227 }
228 TEST_END
229 #undef NTHREADS
230 #undef NALLOCS_PER_THREAD
231 #undef OBJ_RING_BUF_COUNT
232 #undef RESET_INTERVAL
233 #undef DUMP_INTERVAL
234
235 /* Test sampling at the same allocation site across resets. */
236 #define NITER 10
237 TEST_BEGIN(test_xallocx)
238 {
239 size_t lg_prof_sample_orig;
240 unsigned i;
241 void *ptrs[NITER];
242
243 test_skip_if(!config_prof);
244
245 lg_prof_sample_orig = get_lg_prof_sample();
246 set_prof_active(true);
247
248 /* Reset profiling. */
249 do_prof_reset(0);
250
251 for (i = 0; i < NITER; i++) {
252 void *p;
253 size_t sz, nsz;
254
255 /* Reset profiling. */
256 do_prof_reset(0);
257
258 /* Allocate small object (which will be promoted). */
259 p = ptrs[i] = mallocx(1, 0);
260 assert_ptr_not_null(p, "Unexpected mallocx() failure");
261
262 /* Reset profiling. */
263 do_prof_reset(0);
264
265 /* Perform successful xallocx(). */
266 sz = sallocx(p, 0);
267 assert_zu_eq(xallocx(p, sz, 0, 0), sz,
268 "Unexpected xallocx() failure");
269
270 /* Perform unsuccessful xallocx(). */
271 nsz = nallocx(sz+1, 0);
272 assert_zu_eq(xallocx(p, nsz, 0, 0), sz,
273 "Unexpected xallocx() success");
274 }
275
276 for (i = 0; i < NITER; i++) {
277 /* dallocx. */
278 dallocx(ptrs[i], 0);
279 }
280
281 set_prof_active(false);
282 do_prof_reset(lg_prof_sample_orig);
283 }
284 TEST_END
285 #undef NITER
286
287 int
288 main(void)
289 {
290
291 /* Intercept dumping prior to running any tests. */
292 prof_dump_open = prof_dump_open_intercept;
293
294 return (test(
295 test_prof_reset_basic,
296 test_prof_reset_cleanup,
297 test_prof_reset,
298 test_xallocx));
299 }