Jason Evans | d82a5e6 | 2013-12-12 22:35:52 -0800 | [diff] [blame] | 1 | #include "test/jemalloc_test.h" |
| 2 | |
Jason Evans | 3a9ec67 | 2016-05-11 00:52:16 -0700 | [diff] [blame] | 3 | #ifdef JEMALLOC_FILL |
| 4 | const char *malloc_conf = "junk:false"; |
| 5 | #endif |
| 6 | |
Jason Evans | 2152329 | 2015-09-17 15:27:28 -0700 | [diff] [blame] | 7 | static unsigned |
| 8 | get_nsizes_impl(const char *cmd) |
| 9 | { |
| 10 | unsigned ret; |
| 11 | size_t z; |
| 12 | |
| 13 | z = sizeof(unsigned); |
Jason Evans | 8f61fde | 2016-10-27 21:31:25 -0700 | [diff] [blame] | 14 | assert_d_eq(mallctl(cmd, (void *)&ret, &z, NULL, 0), 0, |
Jason Evans | 2152329 | 2015-09-17 15:27:28 -0700 | [diff] [blame] | 15 | "Unexpected mallctl(\"%s\", ...) failure", cmd); |
| 16 | |
| 17 | return (ret); |
| 18 | } |
| 19 | |
| 20 | static unsigned |
| 21 | get_nhuge(void) |
| 22 | { |
| 23 | |
| 24 | return (get_nsizes_impl("arenas.nhchunks")); |
| 25 | } |
| 26 | |
| 27 | static size_t |
| 28 | get_size_impl(const char *cmd, size_t ind) |
| 29 | { |
| 30 | size_t ret; |
| 31 | size_t z; |
| 32 | size_t mib[4]; |
| 33 | size_t miblen = 4; |
| 34 | |
| 35 | z = sizeof(size_t); |
| 36 | assert_d_eq(mallctlnametomib(cmd, mib, &miblen), |
| 37 | 0, "Unexpected mallctlnametomib(\"%s\", ...) failure", cmd); |
| 38 | mib[2] = ind; |
| 39 | z = sizeof(size_t); |
Jason Evans | 8f61fde | 2016-10-27 21:31:25 -0700 | [diff] [blame] | 40 | assert_d_eq(mallctlbymib(mib, miblen, (void *)&ret, &z, NULL, 0), |
Jason Evans | 2152329 | 2015-09-17 15:27:28 -0700 | [diff] [blame] | 41 | 0, "Unexpected mallctlbymib([\"%s\", %zu], ...) failure", cmd, ind); |
| 42 | |
| 43 | return (ret); |
| 44 | } |
| 45 | |
| 46 | static size_t |
| 47 | get_huge_size(size_t ind) |
| 48 | { |
| 49 | |
| 50 | return (get_size_impl("arenas.hchunk.0.size", ind)); |
| 51 | } |
| 52 | |
Jason Evans | b99c72f | 2016-10-28 11:23:24 -0700 | [diff] [blame] | 53 | /* |
| 54 | * On systems which can't merge extents, tests that call this function generate |
| 55 | * a lot of dirty memory very quickly. Purging between cycles mitigates |
| 56 | * potential OOM on e.g. 32-bit Windows. |
| 57 | */ |
| 58 | static void |
| 59 | purge(void) |
| 60 | { |
| 61 | |
| 62 | assert_d_eq(mallctl("arena.0.purge", NULL, NULL, NULL, 0), 0, |
| 63 | "Unexpected mallctl error"); |
| 64 | } |
| 65 | |
Jason Evans | 0c516a0 | 2016-02-25 15:29:49 -0800 | [diff] [blame] | 66 | TEST_BEGIN(test_overflow) |
| 67 | { |
Jason Evans | a62e94c | 2016-02-26 16:27:52 -0800 | [diff] [blame] | 68 | size_t hugemax; |
Jason Evans | 0c516a0 | 2016-02-25 15:29:49 -0800 | [diff] [blame] | 69 | |
| 70 | hugemax = get_huge_size(get_nhuge()-1); |
| 71 | |
| 72 | assert_ptr_null(mallocx(hugemax+1, 0), |
| 73 | "Expected OOM for mallocx(size=%#zx, 0)", hugemax+1); |
| 74 | |
Jason Evans | e3195fa | 2016-02-25 16:40:24 -0800 | [diff] [blame] | 75 | assert_ptr_null(mallocx(ZU(PTRDIFF_MAX)+1, 0), |
| 76 | "Expected OOM for mallocx(size=%#zx, 0)", ZU(PTRDIFF_MAX)+1); |
Jason Evans | 0c516a0 | 2016-02-25 15:29:49 -0800 | [diff] [blame] | 77 | |
| 78 | assert_ptr_null(mallocx(SIZE_T_MAX, 0), |
| 79 | "Expected OOM for mallocx(size=%#zx, 0)", SIZE_T_MAX); |
| 80 | |
Jason Evans | e3195fa | 2016-02-25 16:40:24 -0800 | [diff] [blame] | 81 | assert_ptr_null(mallocx(1, MALLOCX_ALIGN(ZU(PTRDIFF_MAX)+1)), |
Jason Evans | 0c516a0 | 2016-02-25 15:29:49 -0800 | [diff] [blame] | 82 | "Expected OOM for mallocx(size=1, MALLOCX_ALIGN(%#zx))", |
Jason Evans | e3195fa | 2016-02-25 16:40:24 -0800 | [diff] [blame] | 83 | ZU(PTRDIFF_MAX)+1); |
Jason Evans | 0c516a0 | 2016-02-25 15:29:49 -0800 | [diff] [blame] | 84 | } |
| 85 | TEST_END |
| 86 | |
Jason Evans | 2152329 | 2015-09-17 15:27:28 -0700 | [diff] [blame] | 87 | TEST_BEGIN(test_oom) |
| 88 | { |
Jason Evans | 9aa1543 | 2016-05-03 09:37:54 -0700 | [diff] [blame] | 89 | size_t hugemax; |
| 90 | bool oom; |
| 91 | void *ptrs[3]; |
| 92 | unsigned i; |
Jason Evans | 2152329 | 2015-09-17 15:27:28 -0700 | [diff] [blame] | 93 | |
Jason Evans | 03eb37e | 2015-09-24 16:44:16 -0700 | [diff] [blame] | 94 | /* |
Jason Evans | 9aa1543 | 2016-05-03 09:37:54 -0700 | [diff] [blame] | 95 | * It should be impossible to allocate three objects that each consume |
| 96 | * nearly half the virtual address space. |
Jason Evans | 03eb37e | 2015-09-24 16:44:16 -0700 | [diff] [blame] | 97 | */ |
Jason Evans | 9aa1543 | 2016-05-03 09:37:54 -0700 | [diff] [blame] | 98 | hugemax = get_huge_size(get_nhuge()-1); |
| 99 | oom = false; |
| 100 | for (i = 0; i < sizeof(ptrs) / sizeof(void *); i++) { |
| 101 | ptrs[i] = mallocx(hugemax, 0); |
| 102 | if (ptrs[i] == NULL) |
| 103 | oom = true; |
| 104 | } |
| 105 | assert_true(oom, |
| 106 | "Expected OOM during series of calls to mallocx(size=%zu, 0)", |
| 107 | hugemax); |
| 108 | for (i = 0; i < sizeof(ptrs) / sizeof(void *); i++) { |
| 109 | if (ptrs[i] != NULL) |
| 110 | dallocx(ptrs[i], 0); |
Jason Evans | 03eb37e | 2015-09-24 16:44:16 -0700 | [diff] [blame] | 111 | } |
Jason Evans | b99c72f | 2016-10-28 11:23:24 -0700 | [diff] [blame] | 112 | purge(); |
Jason Evans | 2152329 | 2015-09-17 15:27:28 -0700 | [diff] [blame] | 113 | |
| 114 | #if LG_SIZEOF_PTR == 3 |
Jason Evans | 824b947 | 2016-03-11 10:11:56 -0800 | [diff] [blame] | 115 | assert_ptr_null(mallocx(0x8000000000000000ULL, |
| 116 | MALLOCX_ALIGN(0x8000000000000000ULL)), |
| 117 | "Expected OOM for mallocx()"); |
| 118 | assert_ptr_null(mallocx(0x8000000000000000ULL, |
| 119 | MALLOCX_ALIGN(0x80000000)), |
| 120 | "Expected OOM for mallocx()"); |
Jason Evans | 2152329 | 2015-09-17 15:27:28 -0700 | [diff] [blame] | 121 | #else |
Jason Evans | 824b947 | 2016-03-11 10:11:56 -0800 | [diff] [blame] | 122 | assert_ptr_null(mallocx(0x80000000UL, MALLOCX_ALIGN(0x80000000UL)), |
| 123 | "Expected OOM for mallocx()"); |
Jason Evans | 2152329 | 2015-09-17 15:27:28 -0700 | [diff] [blame] | 124 | #endif |
Jason Evans | 2152329 | 2015-09-17 15:27:28 -0700 | [diff] [blame] | 125 | } |
| 126 | TEST_END |
| 127 | |
Jason Evans | d82a5e6 | 2013-12-12 22:35:52 -0800 | [diff] [blame] | 128 | TEST_BEGIN(test_basic) |
| 129 | { |
Jason Evans | b99c72f | 2016-10-28 11:23:24 -0700 | [diff] [blame] | 130 | #define MAXSZ (((size_t)1) << 23) |
Jason Evans | cb9b449 | 2015-02-15 20:13:28 -0800 | [diff] [blame] | 131 | size_t sz; |
Jason Evans | d82a5e6 | 2013-12-12 22:35:52 -0800 | [diff] [blame] | 132 | |
Jason Evans | cb9b449 | 2015-02-15 20:13:28 -0800 | [diff] [blame] | 133 | for (sz = 1; sz < MAXSZ; sz = nallocx(sz, 0) + 1) { |
| 134 | size_t nsz, rsz; |
| 135 | void *p; |
| 136 | nsz = nallocx(sz, 0); |
| 137 | assert_zu_ne(nsz, 0, "Unexpected nallocx() error"); |
| 138 | p = mallocx(sz, 0); |
Jason Evans | b99c72f | 2016-10-28 11:23:24 -0700 | [diff] [blame] | 139 | assert_ptr_not_null(p, |
| 140 | "Unexpected mallocx(size=%zx, flags=0) error", sz); |
Jason Evans | cb9b449 | 2015-02-15 20:13:28 -0800 | [diff] [blame] | 141 | rsz = sallocx(p, 0); |
| 142 | assert_zu_ge(rsz, sz, "Real size smaller than expected"); |
| 143 | assert_zu_eq(nsz, rsz, "nallocx()/sallocx() size mismatch"); |
| 144 | dallocx(p, 0); |
Jason Evans | d82a5e6 | 2013-12-12 22:35:52 -0800 | [diff] [blame] | 145 | |
Jason Evans | cb9b449 | 2015-02-15 20:13:28 -0800 | [diff] [blame] | 146 | p = mallocx(sz, 0); |
Jason Evans | b99c72f | 2016-10-28 11:23:24 -0700 | [diff] [blame] | 147 | assert_ptr_not_null(p, |
| 148 | "Unexpected mallocx(size=%zx, flags=0) error", sz); |
Jason Evans | cb9b449 | 2015-02-15 20:13:28 -0800 | [diff] [blame] | 149 | dallocx(p, 0); |
Jason Evans | d82a5e6 | 2013-12-12 22:35:52 -0800 | [diff] [blame] | 150 | |
Jason Evans | cb9b449 | 2015-02-15 20:13:28 -0800 | [diff] [blame] | 151 | nsz = nallocx(sz, MALLOCX_ZERO); |
| 152 | assert_zu_ne(nsz, 0, "Unexpected nallocx() error"); |
| 153 | p = mallocx(sz, MALLOCX_ZERO); |
Jason Evans | b99c72f | 2016-10-28 11:23:24 -0700 | [diff] [blame] | 154 | assert_ptr_not_null(p, |
| 155 | "Unexpected mallocx(size=%zx, flags=MALLOCX_ZERO) error", |
| 156 | nsz); |
Jason Evans | cb9b449 | 2015-02-15 20:13:28 -0800 | [diff] [blame] | 157 | rsz = sallocx(p, 0); |
| 158 | assert_zu_eq(nsz, rsz, "nallocx()/sallocx() rsize mismatch"); |
| 159 | dallocx(p, 0); |
Jason Evans | b99c72f | 2016-10-28 11:23:24 -0700 | [diff] [blame] | 160 | purge(); |
Jason Evans | cb9b449 | 2015-02-15 20:13:28 -0800 | [diff] [blame] | 161 | } |
Jason Evans | 241abc6 | 2015-06-23 18:47:07 -0700 | [diff] [blame] | 162 | #undef MAXSZ |
Jason Evans | d82a5e6 | 2013-12-12 22:35:52 -0800 | [diff] [blame] | 163 | } |
| 164 | TEST_END |
| 165 | |
Jason Evans | d82a5e6 | 2013-12-12 22:35:52 -0800 | [diff] [blame] | 166 | TEST_BEGIN(test_alignment_and_size) |
| 167 | { |
Jason Evans | b99c72f | 2016-10-28 11:23:24 -0700 | [diff] [blame] | 168 | #define MAXALIGN (((size_t)1) << 23) |
Jason Evans | 241abc6 | 2015-06-23 18:47:07 -0700 | [diff] [blame] | 169 | #define NITER 4 |
Jason Evans | d82a5e6 | 2013-12-12 22:35:52 -0800 | [diff] [blame] | 170 | size_t nsz, rsz, sz, alignment, total; |
| 171 | unsigned i; |
| 172 | void *ps[NITER]; |
| 173 | |
| 174 | for (i = 0; i < NITER; i++) |
| 175 | ps[i] = NULL; |
| 176 | |
| 177 | for (alignment = 8; |
| 178 | alignment <= MAXALIGN; |
| 179 | alignment <<= 1) { |
| 180 | total = 0; |
| 181 | for (sz = 1; |
| 182 | sz < 3 * alignment && sz < (1U << 31); |
| 183 | sz += (alignment >> (LG_SIZEOF_PTR-1)) - 1) { |
| 184 | for (i = 0; i < NITER; i++) { |
| 185 | nsz = nallocx(sz, MALLOCX_ALIGN(alignment) | |
| 186 | MALLOCX_ZERO); |
| 187 | assert_zu_ne(nsz, 0, |
| 188 | "nallocx() error for alignment=%zu, " |
| 189 | "size=%zu (%#zx)", alignment, sz, sz); |
| 190 | ps[i] = mallocx(sz, MALLOCX_ALIGN(alignment) | |
| 191 | MALLOCX_ZERO); |
| 192 | assert_ptr_not_null(ps[i], |
| 193 | "mallocx() error for alignment=%zu, " |
| 194 | "size=%zu (%#zx)", alignment, sz, sz); |
| 195 | rsz = sallocx(ps[i], 0); |
| 196 | assert_zu_ge(rsz, sz, |
| 197 | "Real size smaller than expected for " |
| 198 | "alignment=%zu, size=%zu", alignment, sz); |
| 199 | assert_zu_eq(nsz, rsz, |
| 200 | "nallocx()/sallocx() size mismatch for " |
| 201 | "alignment=%zu, size=%zu", alignment, sz); |
| 202 | assert_ptr_null( |
| 203 | (void *)((uintptr_t)ps[i] & (alignment-1)), |
| 204 | "%p inadequately aligned for" |
| 205 | " alignment=%zu, size=%zu", ps[i], |
| 206 | alignment, sz); |
| 207 | total += rsz; |
| 208 | if (total >= (MAXALIGN << 1)) |
| 209 | break; |
| 210 | } |
| 211 | for (i = 0; i < NITER; i++) { |
| 212 | if (ps[i] != NULL) { |
| 213 | dallocx(ps[i], 0); |
| 214 | ps[i] = NULL; |
| 215 | } |
| 216 | } |
| 217 | } |
Jason Evans | b99c72f | 2016-10-28 11:23:24 -0700 | [diff] [blame] | 218 | purge(); |
Jason Evans | d82a5e6 | 2013-12-12 22:35:52 -0800 | [diff] [blame] | 219 | } |
Jason Evans | 241abc6 | 2015-06-23 18:47:07 -0700 | [diff] [blame] | 220 | #undef MAXALIGN |
| 221 | #undef NITER |
Jason Evans | d82a5e6 | 2013-12-12 22:35:52 -0800 | [diff] [blame] | 222 | } |
| 223 | TEST_END |
| 224 | |
| 225 | int |
| 226 | main(void) |
| 227 | { |
| 228 | |
| 229 | return (test( |
Jason Evans | 0c516a0 | 2016-02-25 15:29:49 -0800 | [diff] [blame] | 230 | test_overflow, |
Jason Evans | 2152329 | 2015-09-17 15:27:28 -0700 | [diff] [blame] | 231 | test_oom, |
Jason Evans | d82a5e6 | 2013-12-12 22:35:52 -0800 | [diff] [blame] | 232 | test_basic, |
Jason Evans | d82a5e6 | 2013-12-12 22:35:52 -0800 | [diff] [blame] | 233 | test_alignment_and_size)); |
| 234 | } |