--- a/memory/mozjemalloc/mozjemalloc.cpp
+++ b/memory/mozjemalloc/mozjemalloc.cpp
@@ -1072,16 +1072,19 @@ static __thread arena_t *arenas_map;
#endif
/*******************************/
/*
* Runtime configuration options.
*/
const char *_malloc_options = MOZ_MALLOC_OPTIONS;
+const uint8_t kAllocJunk = 0xe4;
+const uint8_t kAllocPoison = 0xe5;
+
#ifndef MALLOC_PRODUCTION
static bool opt_abort = true;
static bool opt_junk = true;
static bool opt_poison = true;
static bool opt_zero = false;
#else
static bool opt_abort = false;
static const bool opt_junk = false;
@@ -3570,17 +3573,17 @@ arena_malloc_small(arena_t *arena, size_
bin->stats.nrequests++;
arena->stats.nmalloc_small++;
arena->stats.allocated_small += size;
malloc_spin_unlock(&arena->lock);
if (zero == false) {
if (opt_junk)
- memset(ret, 0xe4, size);
+ memset(ret, kAllocJunk, size);
else if (opt_zero)
memset(ret, 0, size);
} else
memset(ret, 0, size);
return (ret);
}
@@ -3598,17 +3601,17 @@ arena_malloc_large(arena_t *arena, size_
return (NULL);
}
arena->stats.nmalloc_large++;
arena->stats.allocated_large += size;
malloc_spin_unlock(&arena->lock);
if (zero == false) {
if (opt_junk)
- memset(ret, 0xe4, size);
+ memset(ret, kAllocJunk, size);
else if (opt_zero)
memset(ret, 0, size);
}
return (ret);
}
static inline void *
@@ -3692,17 +3695,17 @@ arena_palloc(arena_t *arena, size_t alig
}
}
arena->stats.nmalloc_large++;
arena->stats.allocated_large += size;
malloc_spin_unlock(&arena->lock);
if (opt_junk)
- memset(ret, 0xe4, size);
+ memset(ret, kAllocJunk, size);
else if (opt_zero)
memset(ret, 0, size);
return (ret);
}
static inline void *
ipalloc(size_t alignment, size_t size)
{
@@ -3909,17 +3912,17 @@ arena_dalloc_small(arena_t *arena, arena
size_t size;
run = (arena_run_t *)(mapelm->bits & ~pagesize_mask);
RELEASE_ASSERT(run->magic == ARENA_RUN_MAGIC);
bin = run->bin;
size = bin->reg_size;
if (opt_poison)
- memset(ptr, 0xe5, size);
+ memset(ptr, kAllocPoison, size);
arena_run_reg_dalloc(run, bin, ptr, size);
run->nfree++;
if (run->nfree == bin->nregs) {
/* Deallocate run. */
if (run == bin->runcur)
bin->runcur = NULL;
@@ -3985,17 +3988,17 @@ arena_dalloc_small(arena_t *arena, arena
static void
arena_dalloc_large(arena_t *arena, arena_chunk_t *chunk, void *ptr)
{
size_t pageind = ((uintptr_t)ptr - (uintptr_t)chunk) >>
pagesize_2pow;
size_t size = chunk->map[pageind].bits & ~pagesize_mask;
if (opt_poison)
- memset(ptr, 0xe5, size);
+ memset(ptr, kAllocPoison, size);
arena->stats.allocated_large -= size;
arena->stats.ndalloc_large++;
arena_run_dalloc(arena, (arena_run_t *)ptr, true);
}
static inline void
arena_dalloc(void *ptr, size_t offset)
@@ -4106,32 +4109,32 @@ static bool
arena_ralloc_large(void *ptr, size_t size, size_t oldsize)
{
size_t psize;
psize = PAGE_CEILING(size);
if (psize == oldsize) {
/* Same size class. */
if (opt_poison && size < oldsize) {
- memset((void *)((uintptr_t)ptr + size), 0xe5, oldsize -
+ memset((void *)((uintptr_t)ptr + size), kAllocPoison, oldsize -
size);
}
return (false);
} else {
arena_chunk_t *chunk;
arena_t *arena;
chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr);
arena = chunk->arena;
RELEASE_ASSERT(arena->magic == ARENA_MAGIC);
if (psize < oldsize) {
/* Fill before shrinking in order avoid a race. */
if (opt_poison) {
- memset((void *)((uintptr_t)ptr + size), 0xe5,
+ memset((void *)((uintptr_t)ptr + size), kAllocPoison,
oldsize - size);
}
arena_ralloc_large_shrink(arena, chunk, ptr, psize,
oldsize);
return (false);
} else {
bool ret = arena_ralloc_large_grow(arena, chunk, ptr,
psize, oldsize);
@@ -4187,17 +4190,17 @@ arena_ralloc(void *ptr, size_t size, siz
pages_copy(ret, ptr, copysize);
else
#endif
memcpy(ret, ptr, copysize);
idalloc(ptr);
return (ret);
IN_PLACE:
if (opt_poison && size < oldsize)
- memset((void *)((uintptr_t)ptr + size), 0xe5, oldsize - size);
+ memset((void *)((uintptr_t)ptr + size), kAllocPoison, oldsize - size);
else if (opt_zero && size > oldsize)
memset((void *)((uintptr_t)ptr + oldsize), 0, size - oldsize);
return (ptr);
}
static inline void *
iralloc(void *ptr, size_t size)
{
@@ -4430,19 +4433,19 @@ huge_palloc(size_t size, size_t alignmen
#ifdef MALLOC_DECOMMIT
if (csize - psize > 0)
pages_decommit((void *)((uintptr_t)ret + psize), csize - psize);
#endif
if (zero == false) {
if (opt_junk)
# ifdef MALLOC_DECOMMIT
- memset(ret, 0xe4, psize);
+ memset(ret, kAllocJunk, psize);
# else
- memset(ret, 0xe4, csize);
+ memset(ret, kAllocJunk, csize);
# endif
else if (opt_zero)
# ifdef MALLOC_DECOMMIT
memset(ret, 0, psize);
# else
memset(ret, 0, csize);
# endif
}
@@ -4457,17 +4460,17 @@ huge_ralloc(void *ptr, size_t size, size
size_t copysize;
/* Avoid moving the allocation if the size class would not change. */
if (oldsize > arena_maxclass &&
CHUNK_CEILING(size) == CHUNK_CEILING(oldsize)) {
size_t psize = PAGE_CEILING(size);
if (opt_poison && size < oldsize) {
- memset((void *)((uintptr_t)ptr + size), 0xe5, oldsize
+ memset((void *)((uintptr_t)ptr + size), kAllocPoison, oldsize
- size);
}
#ifdef MALLOC_DECOMMIT
if (psize < oldsize) {
extent_node_t *node, key;
pages_decommit((void *)((uintptr_t)ptr + psize),
oldsize - psize);