]>
git.proxmox.com Git - rustc.git/blob - src/jemalloc/test/unit/pack.c
1 #include "test/jemalloc_test.h"
4 * Size class that is a divisor of the page size, ideally 4+ regions per run.
7 #define SZ (ZU(1) << (LG_PAGE - 2))
13 * Number of chunks to consume at high water mark. Should be at least 2 so that
14 * if mmap()ed memory grows downward, downward growth of mmap()ed memory is
26 assert_d_eq(mallctl("arenas.nbins", (void *)&nbins
, &sz
, NULL
, 0), 0,
27 "Unexpected mallctl failure");
29 for (i
= 0; i
< nbins
; i
++) {
31 size_t miblen
= sizeof(mib
)/sizeof(size_t);
34 assert_d_eq(mallctlnametomib("arenas.bin.0.size", mib
,
35 &miblen
), 0, "Unexpected mallctlnametomb failure");
39 assert_d_eq(mallctlbymib(mib
, miblen
, (void *)&size
, &sz
, NULL
,
40 0), 0, "Unexpected mallctlbymib failure");
45 test_fail("Unable to compute nregs_per_run");
50 nregs_per_run_compute(void)
54 unsigned binind
= binind_compute();
56 size_t miblen
= sizeof(mib
)/sizeof(size_t);
58 assert_d_eq(mallctlnametomib("arenas.bin.0.nregs", mib
, &miblen
), 0,
59 "Unexpected mallctlnametomb failure");
60 mib
[2] = (size_t)binind
;
62 assert_d_eq(mallctlbymib(mib
, miblen
, (void *)&nregs
, &sz
, NULL
,
63 0), 0, "Unexpected mallctlbymib failure");
68 npages_per_run_compute(void)
71 unsigned binind
= binind_compute();
73 size_t miblen
= sizeof(mib
)/sizeof(size_t);
76 assert_d_eq(mallctlnametomib("arenas.bin.0.run_size", mib
, &miblen
), 0,
77 "Unexpected mallctlnametomb failure");
78 mib
[2] = (size_t)binind
;
79 sz
= sizeof(run_size
);
80 assert_d_eq(mallctlbymib(mib
, miblen
, (void *)&run_size
, &sz
, NULL
,
81 0), 0, "Unexpected mallctlbymib failure");
82 return (run_size
>> LG_PAGE
);
86 npages_per_chunk_compute(void)
89 return ((chunksize
>> LG_PAGE
) - map_bias
);
93 nruns_per_chunk_compute(void)
96 return (npages_per_chunk_compute() / npages_per_run_compute());
100 arenas_extend_mallctl(void)
105 sz
= sizeof(arena_ind
);
106 assert_d_eq(mallctl("arenas.extend", (void *)&arena_ind
, &sz
, NULL
, 0),
107 0, "Error in arenas.extend");
113 arena_reset_mallctl(unsigned arena_ind
)
116 size_t miblen
= sizeof(mib
)/sizeof(size_t);
118 assert_d_eq(mallctlnametomib("arena.0.reset", mib
, &miblen
), 0,
119 "Unexpected mallctlnametomib() failure");
120 mib
[1] = (size_t)arena_ind
;
121 assert_d_eq(mallctlbymib(mib
, miblen
, NULL
, NULL
, NULL
, 0), 0,
122 "Unexpected mallctlbymib() failure");
125 TEST_BEGIN(test_pack
)
127 unsigned arena_ind
= arenas_extend_mallctl();
128 size_t nregs_per_run
= nregs_per_run_compute();
129 size_t nruns_per_chunk
= nruns_per_chunk_compute();
130 size_t nruns
= nruns_per_chunk
* NCHUNKS
;
131 size_t nregs
= nregs_per_run
* nruns
;
132 VARIABLE_ARRAY(void *, ptrs
, nregs
);
136 for (i
= offset
= 0; i
< nruns
; i
++) {
137 for (j
= 0; j
< nregs_per_run
; j
++) {
138 void *p
= mallocx(SZ
, MALLOCX_ARENA(arena_ind
) |
139 MALLOCX_TCACHE_NONE
);
140 assert_ptr_not_null(p
,
141 "Unexpected mallocx(%zu, MALLOCX_ARENA(%u) |"
142 " MALLOCX_TCACHE_NONE) failure, run=%zu, reg=%zu",
143 SZ
, arena_ind
, i
, j
);
144 ptrs
[(i
* nregs_per_run
) + j
] = p
;
149 * Free all but one region of each run, but rotate which region is
150 * preserved, so that subsequent allocations exercise the within-run
156 i
++, offset
= (offset
+ 1) % nregs_per_run
) {
157 for (j
= 0; j
< nregs_per_run
; j
++) {
158 void *p
= ptrs
[(i
* nregs_per_run
) + j
];
161 dallocx(p
, MALLOCX_ARENA(arena_ind
) |
162 MALLOCX_TCACHE_NONE
);
167 * Logically refill matrix, skipping preserved regions and verifying
168 * that the matrix is unmodified.
173 i
++, offset
= (offset
+ 1) % nregs_per_run
) {
174 for (j
= 0; j
< nregs_per_run
; j
++) {
179 p
= mallocx(SZ
, MALLOCX_ARENA(arena_ind
) |
180 MALLOCX_TCACHE_NONE
);
181 assert_ptr_eq(p
, ptrs
[(i
* nregs_per_run
) + j
],
182 "Unexpected refill discrepancy, run=%zu, reg=%zu\n",
188 arena_reset_mallctl(arena_ind
);