Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 1 | #define JEMALLOC_HUGE_C_ |
Jason Evans | 376b152 | 2010-02-11 14:45:59 -0800 | [diff] [blame] | 2 | #include "jemalloc/internal/jemalloc_internal.h" |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 3 | |
| 4 | /******************************************************************************/ |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 5 | |
Christopher Ferris | 83e5767 | 2015-04-22 06:59:28 +0000 | [diff] [blame] | 6 | static extent_node_t * |
| 7 | huge_node_get(const void *ptr) |
| 8 | { |
| 9 | extent_node_t *node; |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 10 | |
Jason Evans | 831d585 | 2015-05-15 17:02:30 -0700 | [diff] [blame] | 11 | node = chunk_lookup(ptr, true); |
Christopher Ferris | 83e5767 | 2015-04-22 06:59:28 +0000 | [diff] [blame] | 12 | assert(!extent_node_achunk_get(node)); |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 13 | |
Christopher Ferris | 83e5767 | 2015-04-22 06:59:28 +0000 | [diff] [blame] | 14 | return (node); |
| 15 | } |
Jason Evans | cbf3a6d | 2015-02-11 12:24:27 -0800 | [diff] [blame] | 16 | |
Christopher Ferris | 83e5767 | 2015-04-22 06:59:28 +0000 | [diff] [blame] | 17 | static bool |
| 18 | huge_node_set(const void *ptr, extent_node_t *node) |
Jason Evans | cbf3a6d | 2015-02-11 12:24:27 -0800 | [diff] [blame] | 19 | { |
| 20 | |
Christopher Ferris | 83e5767 | 2015-04-22 06:59:28 +0000 | [diff] [blame] | 21 | assert(extent_node_addr_get(node) == ptr); |
| 22 | assert(!extent_node_achunk_get(node)); |
| 23 | return (chunk_register(ptr, node)); |
| 24 | } |
| 25 | |
| 26 | static void |
| 27 | huge_node_unset(const void *ptr, const extent_node_t *node) |
| 28 | { |
| 29 | |
| 30 | chunk_deregister(ptr, node); |
Jason Evans | cbf3a6d | 2015-02-11 12:24:27 -0800 | [diff] [blame] | 31 | } |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 32 | |
| 33 | void * |
Christopher Ferris | e429403 | 2016-03-02 14:33:02 -0800 | [diff] [blame] | 34 | huge_malloc(tsd_t *tsd, arena_t *arena, size_t usize, bool zero, |
Christopher Ferris | 83e5767 | 2015-04-22 06:59:28 +0000 | [diff] [blame] | 35 | tcache_t *tcache) |
| 36 | { |
Christopher Ferris | 83e5767 | 2015-04-22 06:59:28 +0000 | [diff] [blame] | 37 | |
Christopher Ferris | e429403 | 2016-03-02 14:33:02 -0800 | [diff] [blame] | 38 | assert(usize == s2u(usize)); |
Christopher Ferris | 83e5767 | 2015-04-22 06:59:28 +0000 | [diff] [blame] | 39 | |
| 40 | return (huge_palloc(tsd, arena, usize, chunksize, zero, tcache)); |
| 41 | } |
| 42 | |
| 43 | void * |
Christopher Ferris | e429403 | 2016-03-02 14:33:02 -0800 | [diff] [blame] | 44 | huge_palloc(tsd_t *tsd, arena_t *arena, size_t usize, size_t alignment, |
Christopher Ferris | 83e5767 | 2015-04-22 06:59:28 +0000 | [diff] [blame] | 45 | bool zero, tcache_t *tcache) |
Mike Hommey | eae2690 | 2012-04-10 19:50:33 +0200 | [diff] [blame] | 46 | { |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 47 | void *ret; |
Christopher Ferris | e429403 | 2016-03-02 14:33:02 -0800 | [diff] [blame] | 48 | size_t ausize; |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 49 | extent_node_t *node; |
Jason Evans | 7ad54c1 | 2012-04-21 16:04:51 -0700 | [diff] [blame] | 50 | bool is_zeroed; |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 51 | |
| 52 | /* Allocate one or more contiguous chunks for this request. */ |
| 53 | |
Christopher Ferris | e429403 | 2016-03-02 14:33:02 -0800 | [diff] [blame] | 54 | ausize = sa2u(usize, alignment); |
| 55 | if (unlikely(ausize == 0 || ausize > HUGE_MAXCLASS)) |
Jason Evans | 87ccb55 | 2015-07-23 17:16:32 -0700 | [diff] [blame] | 56 | return (NULL); |
Christopher Ferris | e429403 | 2016-03-02 14:33:02 -0800 | [diff] [blame] | 57 | assert(ausize >= chunksize); |
Jason Evans | 87ccb55 | 2015-07-23 17:16:32 -0700 | [diff] [blame] | 58 | |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 59 | /* Allocate an extent node with which to track the chunk. */ |
Christopher Ferris | 83e5767 | 2015-04-22 06:59:28 +0000 | [diff] [blame] | 60 | node = ipallocztm(tsd, CACHELINE_CEILING(sizeof(extent_node_t)), |
| 61 | CACHELINE, false, tcache, true, arena); |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 62 | if (node == NULL) |
| 63 | return (NULL); |
| 64 | |
Jason Evans | 7ad54c1 | 2012-04-21 16:04:51 -0700 | [diff] [blame] | 65 | /* |
| 66 | * Copy zero into is_zeroed and pass the copy to chunk_alloc(), so that |
| 67 | * it is possible to make correct junk/zero fill decisions below. |
| 68 | */ |
| 69 | is_zeroed = zero; |
Christopher Ferris | 6ff2aaf | 2015-08-09 19:36:28 -0700 | [diff] [blame] | 70 | /* ANDROID change */ |
| 71 | #if !defined(__LP64__) |
| 72 | /* On 32 bit systems, using a per arena cache can exhaust |
| 73 | * virtual address space. Force all huge allocations to |
| 74 | * always take place in the first arena. |
| 75 | */ |
Christopher Ferris | 54d4dfa | 2016-03-02 16:24:07 -0800 | [diff] [blame] | 76 | extern arena_t *a0get(void); |
Christopher Ferris | 6ff2aaf | 2015-08-09 19:36:28 -0700 | [diff] [blame] | 77 | arena = a0get(); |
| 78 | #else |
Christopher Ferris | 83e5767 | 2015-04-22 06:59:28 +0000 | [diff] [blame] | 79 | arena = arena_choose(tsd, arena); |
Christopher Ferris | 6ff2aaf | 2015-08-09 19:36:28 -0700 | [diff] [blame] | 80 | #endif |
| 81 | /* End ANDROID change */ |
Christopher Ferris | 83e5767 | 2015-04-22 06:59:28 +0000 | [diff] [blame] | 82 | if (unlikely(arena == NULL) || (ret = arena_chunk_alloc_huge(arena, |
Christopher Ferris | e429403 | 2016-03-02 14:33:02 -0800 | [diff] [blame] | 83 | usize, alignment, &is_zeroed)) == NULL) { |
Qi Wang | f4a0f32 | 2015-10-27 15:12:10 -0700 | [diff] [blame] | 84 | idalloctm(tsd, node, tcache, true, true); |
Christopher Ferris | 83e5767 | 2015-04-22 06:59:28 +0000 | [diff] [blame] | 85 | return (NULL); |
| 86 | } |
| 87 | |
Christopher Ferris | e429403 | 2016-03-02 14:33:02 -0800 | [diff] [blame] | 88 | extent_node_init(node, arena, ret, usize, is_zeroed, true); |
Christopher Ferris | 83e5767 | 2015-04-22 06:59:28 +0000 | [diff] [blame] | 89 | |
| 90 | if (huge_node_set(ret, node)) { |
Christopher Ferris | e429403 | 2016-03-02 14:33:02 -0800 | [diff] [blame] | 91 | arena_chunk_dalloc_huge(arena, ret, usize); |
Qi Wang | f4a0f32 | 2015-10-27 15:12:10 -0700 | [diff] [blame] | 92 | idalloctm(tsd, node, tcache, true, true); |
Jason Evans | cbf3a6d | 2015-02-11 12:24:27 -0800 | [diff] [blame] | 93 | return (NULL); |
| 94 | } |
| 95 | |
| 96 | /* Insert node into huge. */ |
Christopher Ferris | 83e5767 | 2015-04-22 06:59:28 +0000 | [diff] [blame] | 97 | malloc_mutex_lock(&arena->huge_mtx); |
| 98 | ql_elm_new(node, ql_link); |
| 99 | ql_tail_insert(&arena->huge, node, ql_link); |
| 100 | malloc_mutex_unlock(&arena->huge_mtx); |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 101 | |
Christopher Ferris | 83e5767 | 2015-04-22 06:59:28 +0000 | [diff] [blame] | 102 | if (zero || (config_fill && unlikely(opt_zero))) { |
| 103 | if (!is_zeroed) |
Christopher Ferris | e429403 | 2016-03-02 14:33:02 -0800 | [diff] [blame] | 104 | memset(ret, 0, usize); |
Christopher Ferris | 83e5767 | 2015-04-22 06:59:28 +0000 | [diff] [blame] | 105 | } else if (config_fill && unlikely(opt_junk_alloc)) |
Christopher Ferris | e429403 | 2016-03-02 14:33:02 -0800 | [diff] [blame] | 106 | memset(ret, 0xa5, usize); |
Nicolas Geoffray | 75929a9 | 2015-04-16 11:10:55 +0000 | [diff] [blame] | 107 | |
Christopher Ferris | e429403 | 2016-03-02 14:33:02 -0800 | [diff] [blame] | 108 | arena_decay_tick(tsd, arena); |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 109 | return (ret); |
| 110 | } |
| 111 | |
Nicolas Geoffray | 75929a9 | 2015-04-16 11:10:55 +0000 | [diff] [blame] | 112 | #ifdef JEMALLOC_JET |
| 113 | #undef huge_dalloc_junk |
| 114 | #define huge_dalloc_junk JEMALLOC_N(huge_dalloc_junk_impl) |
| 115 | #endif |
| 116 | static void |
| 117 | huge_dalloc_junk(void *ptr, size_t usize) |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 118 | { |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 119 | |
Christopher Ferris | 83e5767 | 2015-04-22 06:59:28 +0000 | [diff] [blame] | 120 | if (config_fill && have_dss && unlikely(opt_junk_free)) { |
Nicolas Geoffray | 75929a9 | 2015-04-16 11:10:55 +0000 | [diff] [blame] | 121 | /* |
| 122 | * Only bother junk filling if the chunk isn't about to be |
| 123 | * unmapped. |
| 124 | */ |
Christopher Ferris | 83e5767 | 2015-04-22 06:59:28 +0000 | [diff] [blame] | 125 | if (!config_munmap || (have_dss && chunk_in_dss(ptr))) |
Nicolas Geoffray | 75929a9 | 2015-04-16 11:10:55 +0000 | [diff] [blame] | 126 | memset(ptr, 0x5a, usize); |
| 127 | } |
Jason Evans | 4581b97 | 2014-11-27 17:22:36 -0200 | [diff] [blame] | 128 | } |
Nicolas Geoffray | 75929a9 | 2015-04-16 11:10:55 +0000 | [diff] [blame] | 129 | #ifdef JEMALLOC_JET |
| 130 | #undef huge_dalloc_junk |
| 131 | #define huge_dalloc_junk JEMALLOC_N(huge_dalloc_junk) |
| 132 | huge_dalloc_junk_t *huge_dalloc_junk = JEMALLOC_N(huge_dalloc_junk_impl); |
| 133 | #endif |
Jason Evans | 4581b97 | 2014-11-27 17:22:36 -0200 | [diff] [blame] | 134 | |
Christopher Ferris | 83e5767 | 2015-04-22 06:59:28 +0000 | [diff] [blame] | 135 | static void |
Jason Evans | 560a4e1 | 2015-09-11 16:18:53 -0700 | [diff] [blame] | 136 | huge_ralloc_no_move_similar(void *ptr, size_t oldsize, size_t usize_min, |
| 137 | size_t usize_max, bool zero) |
Jason Evans | 4581b97 | 2014-11-27 17:22:36 -0200 | [diff] [blame] | 138 | { |
Jason Evans | 560a4e1 | 2015-09-11 16:18:53 -0700 | [diff] [blame] | 139 | size_t usize, usize_next; |
Christopher Ferris | 83e5767 | 2015-04-22 06:59:28 +0000 | [diff] [blame] | 140 | extent_node_t *node; |
| 141 | arena_t *arena; |
Jason Evans | b49a334 | 2015-07-28 11:28:19 -0400 | [diff] [blame] | 142 | chunk_hooks_t chunk_hooks = CHUNK_HOOKS_INITIALIZER; |
Jason Evans | d260f44 | 2015-09-24 16:38:45 -0700 | [diff] [blame] | 143 | bool pre_zeroed, post_zeroed; |
Jason Evans | 4581b97 | 2014-11-27 17:22:36 -0200 | [diff] [blame] | 144 | |
Christopher Ferris | 83e5767 | 2015-04-22 06:59:28 +0000 | [diff] [blame] | 145 | /* Increase usize to incorporate extra. */ |
Jason Evans | 560a4e1 | 2015-09-11 16:18:53 -0700 | [diff] [blame] | 146 | for (usize = usize_min; usize < usize_max && (usize_next = s2u(usize+1)) |
| 147 | <= oldsize; usize = usize_next) |
| 148 | ; /* Do nothing. */ |
Nicolas Geoffray | 75929a9 | 2015-04-16 11:10:55 +0000 | [diff] [blame] | 149 | |
Christopher Ferris | 83e5767 | 2015-04-22 06:59:28 +0000 | [diff] [blame] | 150 | if (oldsize == usize) |
| 151 | return; |
Nicolas Geoffray | 75929a9 | 2015-04-16 11:10:55 +0000 | [diff] [blame] | 152 | |
Christopher Ferris | 83e5767 | 2015-04-22 06:59:28 +0000 | [diff] [blame] | 153 | node = huge_node_get(ptr); |
| 154 | arena = extent_node_arena_get(node); |
Jason Evans | d260f44 | 2015-09-24 16:38:45 -0700 | [diff] [blame] | 155 | pre_zeroed = extent_node_zeroed_get(node); |
Nicolas Geoffray | 75929a9 | 2015-04-16 11:10:55 +0000 | [diff] [blame] | 156 | |
Christopher Ferris | 83e5767 | 2015-04-22 06:59:28 +0000 | [diff] [blame] | 157 | /* Fill if necessary (shrinking). */ |
| 158 | if (oldsize > usize) { |
| 159 | size_t sdiff = oldsize - usize; |
Christopher Ferris | 83e5767 | 2015-04-22 06:59:28 +0000 | [diff] [blame] | 160 | if (config_fill && unlikely(opt_junk_free)) { |
| 161 | memset((void *)((uintptr_t)ptr + usize), 0x5a, sdiff); |
Jason Evans | d260f44 | 2015-09-24 16:38:45 -0700 | [diff] [blame] | 162 | post_zeroed = false; |
Mike Hommey | 4a2a3c9 | 2015-08-28 13:45:51 +0900 | [diff] [blame] | 163 | } else { |
Jason Evans | d260f44 | 2015-09-24 16:38:45 -0700 | [diff] [blame] | 164 | post_zeroed = !chunk_purge_wrapper(arena, &chunk_hooks, |
| 165 | ptr, CHUNK_CEILING(oldsize), usize, sdiff); |
Christopher Ferris | 83e5767 | 2015-04-22 06:59:28 +0000 | [diff] [blame] | 166 | } |
| 167 | } else |
Jason Evans | d260f44 | 2015-09-24 16:38:45 -0700 | [diff] [blame] | 168 | post_zeroed = pre_zeroed; |
Christopher Ferris | 83e5767 | 2015-04-22 06:59:28 +0000 | [diff] [blame] | 169 | |
| 170 | malloc_mutex_lock(&arena->huge_mtx); |
| 171 | /* Update the size of the huge allocation. */ |
| 172 | assert(extent_node_size_get(node) != usize); |
| 173 | extent_node_size_set(node, usize); |
Jason Evans | d260f44 | 2015-09-24 16:38:45 -0700 | [diff] [blame] | 174 | /* Update zeroed. */ |
| 175 | extent_node_zeroed_set(node, post_zeroed); |
Christopher Ferris | 83e5767 | 2015-04-22 06:59:28 +0000 | [diff] [blame] | 176 | malloc_mutex_unlock(&arena->huge_mtx); |
| 177 | |
| 178 | arena_chunk_ralloc_huge_similar(arena, ptr, oldsize, usize); |
| 179 | |
| 180 | /* Fill if necessary (growing). */ |
| 181 | if (oldsize < usize) { |
| 182 | if (zero || (config_fill && unlikely(opt_zero))) { |
Jason Evans | d260f44 | 2015-09-24 16:38:45 -0700 | [diff] [blame] | 183 | if (!pre_zeroed) { |
Christopher Ferris | 83e5767 | 2015-04-22 06:59:28 +0000 | [diff] [blame] | 184 | memset((void *)((uintptr_t)ptr + oldsize), 0, |
| 185 | usize - oldsize); |
| 186 | } |
| 187 | } else if (config_fill && unlikely(opt_junk_alloc)) { |
| 188 | memset((void *)((uintptr_t)ptr + oldsize), 0xa5, usize - |
| 189 | oldsize); |
| 190 | } |
| 191 | } |
| 192 | } |
| 193 | |
Jason Evans | b49a334 | 2015-07-28 11:28:19 -0400 | [diff] [blame] | 194 | static bool |
Christopher Ferris | 83e5767 | 2015-04-22 06:59:28 +0000 | [diff] [blame] | 195 | huge_ralloc_no_move_shrink(void *ptr, size_t oldsize, size_t usize) |
| 196 | { |
| 197 | extent_node_t *node; |
| 198 | arena_t *arena; |
Jason Evans | b49a334 | 2015-07-28 11:28:19 -0400 | [diff] [blame] | 199 | chunk_hooks_t chunk_hooks; |
| 200 | size_t cdiff; |
Jason Evans | d260f44 | 2015-09-24 16:38:45 -0700 | [diff] [blame] | 201 | bool pre_zeroed, post_zeroed; |
Christopher Ferris | 83e5767 | 2015-04-22 06:59:28 +0000 | [diff] [blame] | 202 | |
| 203 | node = huge_node_get(ptr); |
| 204 | arena = extent_node_arena_get(node); |
Jason Evans | d260f44 | 2015-09-24 16:38:45 -0700 | [diff] [blame] | 205 | pre_zeroed = extent_node_zeroed_get(node); |
Jason Evans | b49a334 | 2015-07-28 11:28:19 -0400 | [diff] [blame] | 206 | chunk_hooks = chunk_hooks_get(arena); |
Christopher Ferris | 83e5767 | 2015-04-22 06:59:28 +0000 | [diff] [blame] | 207 | |
Jason Evans | 560a4e1 | 2015-09-11 16:18:53 -0700 | [diff] [blame] | 208 | assert(oldsize > usize); |
| 209 | |
Jason Evans | b49a334 | 2015-07-28 11:28:19 -0400 | [diff] [blame] | 210 | /* Split excess chunks. */ |
| 211 | cdiff = CHUNK_CEILING(oldsize) - CHUNK_CEILING(usize); |
| 212 | if (cdiff != 0 && chunk_hooks.split(ptr, CHUNK_CEILING(oldsize), |
| 213 | CHUNK_CEILING(usize), cdiff, true, arena->ind)) |
| 214 | return (true); |
Christopher Ferris | 83e5767 | 2015-04-22 06:59:28 +0000 | [diff] [blame] | 215 | |
| 216 | if (oldsize > usize) { |
| 217 | size_t sdiff = oldsize - usize; |
Christopher Ferris | 83e5767 | 2015-04-22 06:59:28 +0000 | [diff] [blame] | 218 | if (config_fill && unlikely(opt_junk_free)) { |
| 219 | huge_dalloc_junk((void *)((uintptr_t)ptr + usize), |
| 220 | sdiff); |
Jason Evans | d260f44 | 2015-09-24 16:38:45 -0700 | [diff] [blame] | 221 | post_zeroed = false; |
Mike Hommey | 4a2a3c9 | 2015-08-28 13:45:51 +0900 | [diff] [blame] | 222 | } else { |
Jason Evans | d260f44 | 2015-09-24 16:38:45 -0700 | [diff] [blame] | 223 | post_zeroed = !chunk_purge_wrapper(arena, &chunk_hooks, |
Mike Hommey | 4a2a3c9 | 2015-08-28 13:45:51 +0900 | [diff] [blame] | 224 | CHUNK_ADDR2BASE((uintptr_t)ptr + usize), |
| 225 | CHUNK_CEILING(oldsize), |
| 226 | CHUNK_ADDR2OFFSET((uintptr_t)ptr + usize), sdiff); |
Christopher Ferris | 83e5767 | 2015-04-22 06:59:28 +0000 | [diff] [blame] | 227 | } |
| 228 | } else |
Jason Evans | d260f44 | 2015-09-24 16:38:45 -0700 | [diff] [blame] | 229 | post_zeroed = pre_zeroed; |
Christopher Ferris | 83e5767 | 2015-04-22 06:59:28 +0000 | [diff] [blame] | 230 | |
| 231 | malloc_mutex_lock(&arena->huge_mtx); |
| 232 | /* Update the size of the huge allocation. */ |
| 233 | extent_node_size_set(node, usize); |
Jason Evans | d260f44 | 2015-09-24 16:38:45 -0700 | [diff] [blame] | 234 | /* Update zeroed. */ |
| 235 | extent_node_zeroed_set(node, post_zeroed); |
Christopher Ferris | 83e5767 | 2015-04-22 06:59:28 +0000 | [diff] [blame] | 236 | malloc_mutex_unlock(&arena->huge_mtx); |
| 237 | |
| 238 | /* Zap the excess chunks. */ |
| 239 | arena_chunk_ralloc_huge_shrink(arena, ptr, oldsize, usize); |
Jason Evans | b49a334 | 2015-07-28 11:28:19 -0400 | [diff] [blame] | 240 | |
| 241 | return (false); |
Christopher Ferris | 83e5767 | 2015-04-22 06:59:28 +0000 | [diff] [blame] | 242 | } |
| 243 | |
| 244 | static bool |
Jason Evans | 560a4e1 | 2015-09-11 16:18:53 -0700 | [diff] [blame] | 245 | huge_ralloc_no_move_expand(void *ptr, size_t oldsize, size_t usize, bool zero) { |
Christopher Ferris | 83e5767 | 2015-04-22 06:59:28 +0000 | [diff] [blame] | 246 | extent_node_t *node; |
| 247 | arena_t *arena; |
| 248 | bool is_zeroed_subchunk, is_zeroed_chunk; |
| 249 | |
Christopher Ferris | 83e5767 | 2015-04-22 06:59:28 +0000 | [diff] [blame] | 250 | node = huge_node_get(ptr); |
| 251 | arena = extent_node_arena_get(node); |
| 252 | malloc_mutex_lock(&arena->huge_mtx); |
| 253 | is_zeroed_subchunk = extent_node_zeroed_get(node); |
| 254 | malloc_mutex_unlock(&arena->huge_mtx); |
| 255 | |
| 256 | /* |
| 257 | * Copy zero into is_zeroed_chunk and pass the copy to chunk_alloc(), so |
| 258 | * that it is possible to make correct junk/zero fill decisions below. |
| 259 | */ |
| 260 | is_zeroed_chunk = zero; |
| 261 | |
| 262 | if (arena_chunk_ralloc_huge_expand(arena, ptr, oldsize, usize, |
| 263 | &is_zeroed_chunk)) |
| 264 | return (true); |
| 265 | |
| 266 | malloc_mutex_lock(&arena->huge_mtx); |
| 267 | /* Update the size of the huge allocation. */ |
| 268 | extent_node_size_set(node, usize); |
| 269 | malloc_mutex_unlock(&arena->huge_mtx); |
| 270 | |
| 271 | if (zero || (config_fill && unlikely(opt_zero))) { |
| 272 | if (!is_zeroed_subchunk) { |
| 273 | memset((void *)((uintptr_t)ptr + oldsize), 0, |
| 274 | CHUNK_CEILING(oldsize) - oldsize); |
| 275 | } |
| 276 | if (!is_zeroed_chunk) { |
| 277 | memset((void *)((uintptr_t)ptr + |
| 278 | CHUNK_CEILING(oldsize)), 0, usize - |
| 279 | CHUNK_CEILING(oldsize)); |
| 280 | } |
| 281 | } else if (config_fill && unlikely(opt_junk_alloc)) { |
| 282 | memset((void *)((uintptr_t)ptr + oldsize), 0xa5, usize - |
| 283 | oldsize); |
| 284 | } |
| 285 | |
| 286 | return (false); |
| 287 | } |
| 288 | |
| 289 | bool |
Christopher Ferris | e429403 | 2016-03-02 14:33:02 -0800 | [diff] [blame] | 290 | huge_ralloc_no_move(tsd_t *tsd, void *ptr, size_t oldsize, size_t usize_min, |
Jason Evans | 560a4e1 | 2015-09-11 16:18:53 -0700 | [diff] [blame] | 291 | size_t usize_max, bool zero) |
Christopher Ferris | 83e5767 | 2015-04-22 06:59:28 +0000 | [diff] [blame] | 292 | { |
Christopher Ferris | 83e5767 | 2015-04-22 06:59:28 +0000 | [diff] [blame] | 293 | |
| 294 | assert(s2u(oldsize) == oldsize); |
Christopher Ferris | e429403 | 2016-03-02 14:33:02 -0800 | [diff] [blame] | 295 | /* The following should have been caught by callers. */ |
| 296 | assert(usize_min > 0 && usize_max <= HUGE_MAXCLASS); |
Jason Evans | 560a4e1 | 2015-09-11 16:18:53 -0700 | [diff] [blame] | 297 | |
| 298 | /* Both allocations must be huge to avoid a move. */ |
| 299 | if (oldsize < chunksize || usize_max < chunksize) |
Christopher Ferris | 83e5767 | 2015-04-22 06:59:28 +0000 | [diff] [blame] | 300 | return (true); |
Jason Evans | 560a4e1 | 2015-09-11 16:18:53 -0700 | [diff] [blame] | 301 | |
| 302 | if (CHUNK_CEILING(usize_max) > CHUNK_CEILING(oldsize)) { |
| 303 | /* Attempt to expand the allocation in-place. */ |
Christopher Ferris | e429403 | 2016-03-02 14:33:02 -0800 | [diff] [blame] | 304 | if (!huge_ralloc_no_move_expand(ptr, oldsize, usize_max, |
| 305 | zero)) { |
| 306 | arena_decay_tick(tsd, huge_aalloc(ptr)); |
Jason Evans | 560a4e1 | 2015-09-11 16:18:53 -0700 | [diff] [blame] | 307 | return (false); |
Christopher Ferris | e429403 | 2016-03-02 14:33:02 -0800 | [diff] [blame] | 308 | } |
Jason Evans | 560a4e1 | 2015-09-11 16:18:53 -0700 | [diff] [blame] | 309 | /* Try again, this time with usize_min. */ |
| 310 | if (usize_min < usize_max && CHUNK_CEILING(usize_min) > |
| 311 | CHUNK_CEILING(oldsize) && huge_ralloc_no_move_expand(ptr, |
Christopher Ferris | e429403 | 2016-03-02 14:33:02 -0800 | [diff] [blame] | 312 | oldsize, usize_min, zero)) { |
| 313 | arena_decay_tick(tsd, huge_aalloc(ptr)); |
Jason Evans | 560a4e1 | 2015-09-11 16:18:53 -0700 | [diff] [blame] | 314 | return (false); |
Christopher Ferris | e429403 | 2016-03-02 14:33:02 -0800 | [diff] [blame] | 315 | } |
Christopher Ferris | 83e5767 | 2015-04-22 06:59:28 +0000 | [diff] [blame] | 316 | } |
| 317 | |
| 318 | /* |
| 319 | * Avoid moving the allocation if the existing chunk size accommodates |
| 320 | * the new size. |
| 321 | */ |
Jason Evans | 560a4e1 | 2015-09-11 16:18:53 -0700 | [diff] [blame] | 322 | if (CHUNK_CEILING(oldsize) >= CHUNK_CEILING(usize_min) |
| 323 | && CHUNK_CEILING(oldsize) <= CHUNK_CEILING(usize_max)) { |
| 324 | huge_ralloc_no_move_similar(ptr, oldsize, usize_min, usize_max, |
Christopher Ferris | 83e5767 | 2015-04-22 06:59:28 +0000 | [diff] [blame] | 325 | zero); |
Christopher Ferris | e429403 | 2016-03-02 14:33:02 -0800 | [diff] [blame] | 326 | arena_decay_tick(tsd, huge_aalloc(ptr)); |
Christopher Ferris | 83e5767 | 2015-04-22 06:59:28 +0000 | [diff] [blame] | 327 | return (false); |
| 328 | } |
| 329 | |
Jason Evans | b49a334 | 2015-07-28 11:28:19 -0400 | [diff] [blame] | 330 | /* Attempt to shrink the allocation in-place. */ |
Christopher Ferris | e429403 | 2016-03-02 14:33:02 -0800 | [diff] [blame] | 331 | if (CHUNK_CEILING(oldsize) > CHUNK_CEILING(usize_max)) { |
| 332 | if (!huge_ralloc_no_move_shrink(ptr, oldsize, usize_max)) { |
| 333 | arena_decay_tick(tsd, huge_aalloc(ptr)); |
| 334 | return (false); |
| 335 | } |
| 336 | } |
Jason Evans | 560a4e1 | 2015-09-11 16:18:53 -0700 | [diff] [blame] | 337 | return (true); |
| 338 | } |
Christopher Ferris | 83e5767 | 2015-04-22 06:59:28 +0000 | [diff] [blame] | 339 | |
Jason Evans | 560a4e1 | 2015-09-11 16:18:53 -0700 | [diff] [blame] | 340 | static void * |
| 341 | huge_ralloc_move_helper(tsd_t *tsd, arena_t *arena, size_t usize, |
| 342 | size_t alignment, bool zero, tcache_t *tcache) |
| 343 | { |
Christopher Ferris | 83e5767 | 2015-04-22 06:59:28 +0000 | [diff] [blame] | 344 | |
Jason Evans | 560a4e1 | 2015-09-11 16:18:53 -0700 | [diff] [blame] | 345 | if (alignment <= chunksize) |
| 346 | return (huge_malloc(tsd, arena, usize, zero, tcache)); |
| 347 | return (huge_palloc(tsd, arena, usize, alignment, zero, tcache)); |
Christopher Ferris | 83e5767 | 2015-04-22 06:59:28 +0000 | [diff] [blame] | 348 | } |
| 349 | |
| 350 | void * |
Jason Evans | 560a4e1 | 2015-09-11 16:18:53 -0700 | [diff] [blame] | 351 | huge_ralloc(tsd_t *tsd, arena_t *arena, void *ptr, size_t oldsize, size_t usize, |
| 352 | size_t alignment, bool zero, tcache_t *tcache) |
Christopher Ferris | 83e5767 | 2015-04-22 06:59:28 +0000 | [diff] [blame] | 353 | { |
| 354 | void *ret; |
| 355 | size_t copysize; |
| 356 | |
Christopher Ferris | e429403 | 2016-03-02 14:33:02 -0800 | [diff] [blame] | 357 | /* The following should have been caught by callers. */ |
| 358 | assert(usize > 0 && usize <= HUGE_MAXCLASS); |
| 359 | |
Christopher Ferris | 83e5767 | 2015-04-22 06:59:28 +0000 | [diff] [blame] | 360 | /* Try to avoid moving the allocation. */ |
Christopher Ferris | e429403 | 2016-03-02 14:33:02 -0800 | [diff] [blame] | 361 | if (!huge_ralloc_no_move(tsd, ptr, oldsize, usize, usize, zero)) |
Christopher Ferris | 83e5767 | 2015-04-22 06:59:28 +0000 | [diff] [blame] | 362 | return (ptr); |
| 363 | |
| 364 | /* |
Jason Evans | 560a4e1 | 2015-09-11 16:18:53 -0700 | [diff] [blame] | 365 | * usize and oldsize are different enough that we need to use a |
Christopher Ferris | 83e5767 | 2015-04-22 06:59:28 +0000 | [diff] [blame] | 366 | * different size class. In that case, fall back to allocating new |
| 367 | * space and copying. |
| 368 | */ |
Jason Evans | 560a4e1 | 2015-09-11 16:18:53 -0700 | [diff] [blame] | 369 | ret = huge_ralloc_move_helper(tsd, arena, usize, alignment, zero, |
| 370 | tcache); |
| 371 | if (ret == NULL) |
| 372 | return (NULL); |
Christopher Ferris | 83e5767 | 2015-04-22 06:59:28 +0000 | [diff] [blame] | 373 | |
Jason Evans | 560a4e1 | 2015-09-11 16:18:53 -0700 | [diff] [blame] | 374 | copysize = (usize < oldsize) ? usize : oldsize; |
Christopher Ferris | 83e5767 | 2015-04-22 06:59:28 +0000 | [diff] [blame] | 375 | memcpy(ret, ptr, copysize); |
| 376 | isqalloc(tsd, ptr, oldsize, tcache); |
| 377 | return (ret); |
| 378 | } |
| 379 | |
| 380 | void |
| 381 | huge_dalloc(tsd_t *tsd, void *ptr, tcache_t *tcache) |
| 382 | { |
| 383 | extent_node_t *node; |
| 384 | arena_t *arena; |
| 385 | |
| 386 | node = huge_node_get(ptr); |
| 387 | arena = extent_node_arena_get(node); |
| 388 | huge_node_unset(ptr, node); |
| 389 | malloc_mutex_lock(&arena->huge_mtx); |
| 390 | ql_remove(&arena->huge, node, ql_link); |
| 391 | malloc_mutex_unlock(&arena->huge_mtx); |
| 392 | |
| 393 | huge_dalloc_junk(extent_node_addr_get(node), |
| 394 | extent_node_size_get(node)); |
| 395 | arena_chunk_dalloc_huge(extent_node_arena_get(node), |
| 396 | extent_node_addr_get(node), extent_node_size_get(node)); |
Qi Wang | f4a0f32 | 2015-10-27 15:12:10 -0700 | [diff] [blame] | 397 | idalloctm(tsd, node, tcache, true, true); |
Christopher Ferris | e429403 | 2016-03-02 14:33:02 -0800 | [diff] [blame] | 398 | |
| 399 | arena_decay_tick(tsd, arena); |
Christopher Ferris | 83e5767 | 2015-04-22 06:59:28 +0000 | [diff] [blame] | 400 | } |
| 401 | |
| 402 | arena_t * |
| 403 | huge_aalloc(const void *ptr) |
| 404 | { |
| 405 | |
| 406 | return (extent_node_arena_get(huge_node_get(ptr))); |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 407 | } |
| 408 | |
| 409 | size_t |
| 410 | huge_salloc(const void *ptr) |
| 411 | { |
Christopher Ferris | 83e5767 | 2015-04-22 06:59:28 +0000 | [diff] [blame] | 412 | size_t size; |
| 413 | extent_node_t *node; |
| 414 | arena_t *arena; |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 415 | |
Christopher Ferris | 83e5767 | 2015-04-22 06:59:28 +0000 | [diff] [blame] | 416 | node = huge_node_get(ptr); |
| 417 | arena = extent_node_arena_get(node); |
| 418 | malloc_mutex_lock(&arena->huge_mtx); |
| 419 | size = extent_node_size_get(node); |
| 420 | malloc_mutex_unlock(&arena->huge_mtx); |
Jason Evans | cbf3a6d | 2015-02-11 12:24:27 -0800 | [diff] [blame] | 421 | |
Christopher Ferris | 83e5767 | 2015-04-22 06:59:28 +0000 | [diff] [blame] | 422 | return (size); |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 423 | } |
| 424 | |
Christopher Ferris | 83e5767 | 2015-04-22 06:59:28 +0000 | [diff] [blame] | 425 | prof_tctx_t * |
| 426 | huge_prof_tctx_get(const void *ptr) |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 427 | { |
Christopher Ferris | 83e5767 | 2015-04-22 06:59:28 +0000 | [diff] [blame] | 428 | prof_tctx_t *tctx; |
| 429 | extent_node_t *node; |
| 430 | arena_t *arena; |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 431 | |
Christopher Ferris | 83e5767 | 2015-04-22 06:59:28 +0000 | [diff] [blame] | 432 | node = huge_node_get(ptr); |
| 433 | arena = extent_node_arena_get(node); |
| 434 | malloc_mutex_lock(&arena->huge_mtx); |
| 435 | tctx = extent_node_prof_tctx_get(node); |
| 436 | malloc_mutex_unlock(&arena->huge_mtx); |
Jason Evans | cbf3a6d | 2015-02-11 12:24:27 -0800 | [diff] [blame] | 437 | |
Christopher Ferris | 83e5767 | 2015-04-22 06:59:28 +0000 | [diff] [blame] | 438 | return (tctx); |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 439 | } |
| 440 | |
| 441 | void |
Christopher Ferris | 83e5767 | 2015-04-22 06:59:28 +0000 | [diff] [blame] | 442 | huge_prof_tctx_set(const void *ptr, prof_tctx_t *tctx) |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 443 | { |
Christopher Ferris | 83e5767 | 2015-04-22 06:59:28 +0000 | [diff] [blame] | 444 | extent_node_t *node; |
| 445 | arena_t *arena; |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 446 | |
Christopher Ferris | 83e5767 | 2015-04-22 06:59:28 +0000 | [diff] [blame] | 447 | node = huge_node_get(ptr); |
| 448 | arena = extent_node_arena_get(node); |
| 449 | malloc_mutex_lock(&arena->huge_mtx); |
| 450 | extent_node_prof_tctx_set(node, tctx); |
| 451 | malloc_mutex_unlock(&arena->huge_mtx); |
Jason Evans | 4e2e3dd | 2012-03-13 16:31:41 -0700 | [diff] [blame] | 452 | } |
Jason Evans | 708ed79 | 2015-09-14 23:48:11 -0700 | [diff] [blame] | 453 | |
| 454 | void |
| 455 | huge_prof_tctx_reset(const void *ptr) |
| 456 | { |
| 457 | |
| 458 | huge_prof_tctx_set(ptr, (prof_tctx_t *)(uintptr_t)1U); |
| 459 | } |