1 #include "test/jemalloc_test.h"
4 prof_dump_open_intercept(bool propagate_err
, const char *filename
)
8 fd
= open("/dev/null", O_WRONLY
);
9 assert_d_ne(fd
, -1, "Unexpected open() failure");
15 set_prof_active(bool active
)
18 assert_d_eq(mallctl("prof.active", NULL
, NULL
, (void *)&active
,
19 sizeof(active
)), 0, "Unexpected mallctl failure");
23 get_lg_prof_sample(void)
25 size_t lg_prof_sample
;
26 size_t sz
= sizeof(size_t);
28 assert_d_eq(mallctl("prof.lg_sample", (void *)&lg_prof_sample
, &sz
,
30 "Unexpected mallctl failure while reading profiling sample rate");
31 return (lg_prof_sample
);
35 do_prof_reset(size_t lg_prof_sample
)
37 assert_d_eq(mallctl("prof.reset", NULL
, NULL
,
38 (void *)&lg_prof_sample
, sizeof(size_t)), 0,
39 "Unexpected mallctl failure while resetting profile data");
40 assert_zu_eq(lg_prof_sample
, get_lg_prof_sample(),
41 "Expected profile sample rate change");
44 TEST_BEGIN(test_prof_reset_basic
)
46 size_t lg_prof_sample_orig
, lg_prof_sample
, lg_prof_sample_next
;
50 test_skip_if(!config_prof
);
53 assert_d_eq(mallctl("opt.lg_prof_sample", (void *)&lg_prof_sample_orig
,
55 "Unexpected mallctl failure while reading profiling sample rate");
56 assert_zu_eq(lg_prof_sample_orig
, 0,
57 "Unexpected profiling sample rate");
58 lg_prof_sample
= get_lg_prof_sample();
59 assert_zu_eq(lg_prof_sample_orig
, lg_prof_sample
,
60 "Unexpected disagreement between \"opt.lg_prof_sample\" and "
61 "\"prof.lg_sample\"");
63 /* Test simple resets. */
64 for (i
= 0; i
< 2; i
++) {
65 assert_d_eq(mallctl("prof.reset", NULL
, NULL
, NULL
, 0), 0,
66 "Unexpected mallctl failure while resetting profile data");
67 lg_prof_sample
= get_lg_prof_sample();
68 assert_zu_eq(lg_prof_sample_orig
, lg_prof_sample
,
69 "Unexpected profile sample rate change");
72 /* Test resets with prof.lg_sample changes. */
73 lg_prof_sample_next
= 1;
74 for (i
= 0; i
< 2; i
++) {
75 do_prof_reset(lg_prof_sample_next
);
76 lg_prof_sample
= get_lg_prof_sample();
77 assert_zu_eq(lg_prof_sample
, lg_prof_sample_next
,
78 "Expected profile sample rate change");
79 lg_prof_sample_next
= lg_prof_sample_orig
;
82 /* Make sure the test code restored prof.lg_sample. */
83 lg_prof_sample
= get_lg_prof_sample();
84 assert_zu_eq(lg_prof_sample_orig
, lg_prof_sample
,
85 "Unexpected disagreement between \"opt.lg_prof_sample\" and "
86 "\"prof.lg_sample\"");
90 bool prof_dump_header_intercepted
= false;
91 prof_cnt_t cnt_all_copy
= {0, 0, 0, 0};
93 prof_dump_header_intercept(tsdn_t
*tsdn
, bool propagate_err
,
94 const prof_cnt_t
*cnt_all
)
97 prof_dump_header_intercepted
= true;
98 memcpy(&cnt_all_copy
, cnt_all
, sizeof(prof_cnt_t
));
103 TEST_BEGIN(test_prof_reset_cleanup
)
106 prof_dump_header_t
*prof_dump_header_orig
;
108 test_skip_if(!config_prof
);
110 set_prof_active(true);
112 assert_zu_eq(prof_bt_count(), 0, "Expected 0 backtraces");
114 assert_ptr_not_null(p
, "Unexpected mallocx() failure");
115 assert_zu_eq(prof_bt_count(), 1, "Expected 1 backtrace");
117 prof_dump_header_orig
= prof_dump_header
;
118 prof_dump_header
= prof_dump_header_intercept
;
119 assert_false(prof_dump_header_intercepted
, "Unexpected intercept");
121 assert_d_eq(mallctl("prof.dump", NULL
, NULL
, NULL
, 0),
122 0, "Unexpected error while dumping heap profile");
123 assert_true(prof_dump_header_intercepted
, "Expected intercept");
124 assert_u64_eq(cnt_all_copy
.curobjs
, 1, "Expected 1 allocation");
126 assert_d_eq(mallctl("prof.reset", NULL
, NULL
, NULL
, 0), 0,
127 "Unexpected error while resetting heap profile data");
128 assert_d_eq(mallctl("prof.dump", NULL
, NULL
, NULL
, 0),
129 0, "Unexpected error while dumping heap profile");
130 assert_u64_eq(cnt_all_copy
.curobjs
, 0, "Expected 0 allocations");
131 assert_zu_eq(prof_bt_count(), 1, "Expected 1 backtrace");
133 prof_dump_header
= prof_dump_header_orig
;
136 assert_zu_eq(prof_bt_count(), 0, "Expected 0 backtraces");
138 set_prof_active(false);
143 #define NALLOCS_PER_THREAD (1U << 13)
144 #define OBJ_RING_BUF_COUNT 1531
145 #define RESET_INTERVAL (1U << 10)
146 #define DUMP_INTERVAL 3677
148 thd_start(void *varg
)
150 unsigned thd_ind
= *(unsigned *)varg
;
152 void *objs
[OBJ_RING_BUF_COUNT
];
154 memset(objs
, 0, sizeof(objs
));
156 for (i
= 0; i
< NALLOCS_PER_THREAD
; i
++) {
157 if (i
% RESET_INTERVAL
== 0) {
158 assert_d_eq(mallctl("prof.reset", NULL
, NULL
, NULL
, 0),
159 0, "Unexpected error while resetting heap profile "
163 if (i
% DUMP_INTERVAL
== 0) {
164 assert_d_eq(mallctl("prof.dump", NULL
, NULL
, NULL
, 0),
165 0, "Unexpected error while dumping heap profile");
169 void **pp
= &objs
[i
% OBJ_RING_BUF_COUNT
];
174 *pp
= btalloc(1, thd_ind
*NALLOCS_PER_THREAD
+ i
);
175 assert_ptr_not_null(*pp
,
176 "Unexpected btalloc() failure");
180 /* Clean up any remaining objects. */
181 for (i
= 0; i
< OBJ_RING_BUF_COUNT
; i
++) {
182 void **pp
= &objs
[i
% OBJ_RING_BUF_COUNT
];
192 TEST_BEGIN(test_prof_reset
)
194 size_t lg_prof_sample_orig
;
195 thd_t thds
[NTHREADS
];
196 unsigned thd_args
[NTHREADS
];
198 size_t bt_count
, tdata_count
;
200 test_skip_if(!config_prof
);
202 bt_count
= prof_bt_count();
203 assert_zu_eq(bt_count
, 0,
204 "Unexpected pre-existing tdata structures");
205 tdata_count
= prof_tdata_count();
207 lg_prof_sample_orig
= get_lg_prof_sample();
210 set_prof_active(true);
212 for (i
= 0; i
< NTHREADS
; i
++) {
214 thd_create(&thds
[i
], thd_start
, (void *)&thd_args
[i
]);
216 for (i
= 0; i
< NTHREADS
; i
++)
217 thd_join(thds
[i
], NULL
);
219 assert_zu_eq(prof_bt_count(), bt_count
,
220 "Unexpected bactrace count change");
221 assert_zu_eq(prof_tdata_count(), tdata_count
,
222 "Unexpected remaining tdata structures");
224 set_prof_active(false);
226 do_prof_reset(lg_prof_sample_orig
);
230 #undef NALLOCS_PER_THREAD
231 #undef OBJ_RING_BUF_COUNT
232 #undef RESET_INTERVAL
235 /* Test sampling at the same allocation site across resets. */
237 TEST_BEGIN(test_xallocx
)
239 size_t lg_prof_sample_orig
;
243 test_skip_if(!config_prof
);
245 lg_prof_sample_orig
= get_lg_prof_sample();
246 set_prof_active(true);
248 /* Reset profiling. */
251 for (i
= 0; i
< NITER
; i
++) {
255 /* Reset profiling. */
258 /* Allocate small object (which will be promoted). */
259 p
= ptrs
[i
] = mallocx(1, 0);
260 assert_ptr_not_null(p
, "Unexpected mallocx() failure");
262 /* Reset profiling. */
265 /* Perform successful xallocx(). */
267 assert_zu_eq(xallocx(p
, sz
, 0, 0), sz
,
268 "Unexpected xallocx() failure");
270 /* Perform unsuccessful xallocx(). */
271 nsz
= nallocx(sz
+1, 0);
272 assert_zu_eq(xallocx(p
, nsz
, 0, 0), sz
,
273 "Unexpected xallocx() success");
276 for (i
= 0; i
< NITER
; i
++) {
281 set_prof_active(false);
282 do_prof_reset(lg_prof_sample_orig
);
291 /* Intercept dumping prior to running any tests. */
292 prof_dump_open
= prof_dump_open_intercept
;
295 test_prof_reset_basic
,
296 test_prof_reset_cleanup
,