aboutsummaryrefslogtreecommitdiffstats
path: root/include
diff options
context:
space:
mode:
Diffstat (limited to 'include')
-rw-r--r--include/jemalloc/internal/arena.h4
-rw-r--r--include/jemalloc/internal/jemalloc_internal.h.in28
-rw-r--r--include/jemalloc/internal/private_symbols.txt2
3 files changed, 9 insertions, 25 deletions
diff --git a/include/jemalloc/internal/arena.h b/include/jemalloc/internal/arena.h
index 4e20af48..ce9d8b5e 100644
--- a/include/jemalloc/internal/arena.h
+++ b/include/jemalloc/internal/arena.h
@@ -212,8 +212,8 @@ struct arena_s {
* Heaps of extents that were previously allocated. These are used when
* allocating extents, in an attempt to re-use address space.
*/
- extent_heap_t extents_cached[NPSIZES];
- extent_heap_t extents_retained[NPSIZES];
+ extent_heap_t extents_cached[NPSIZES+1];
+ extent_heap_t extents_retained[NPSIZES+1];
/*
* Ring sentinel used to track unused dirty memory. Dirty memory is
* managed as an LRU of cached extents.
diff --git a/include/jemalloc/internal/jemalloc_internal.h.in b/include/jemalloc/internal/jemalloc_internal.h.in
index 0e4ffd91..85b34012 100644
--- a/include/jemalloc/internal/jemalloc_internal.h.in
+++ b/include/jemalloc/internal/jemalloc_internal.h.in
@@ -434,7 +434,7 @@ extern arena_t **arenas;
* pind2sz_tab encodes the same information as could be computed by
* pind2sz_compute().
*/
-extern size_t const pind2sz_tab[NPSIZES];
+extern size_t const pind2sz_tab[NPSIZES+1];
/*
* index2size_tab encodes the same information as could be computed (at
* unacceptable cost in some code paths) by index2size_compute().
@@ -516,9 +516,7 @@ void jemalloc_postfork_child(void);
#include "jemalloc/internal/large.h"
#ifndef JEMALLOC_ENABLE_INLINE
-pszind_t psz2ind_impl(size_t psz, bool clamp);
pszind_t psz2ind(size_t psz);
-pszind_t psz2ind_clamp(size_t psz);
size_t pind2sz_compute(pszind_t pind);
size_t pind2sz_lookup(pszind_t pind);
size_t pind2sz(pszind_t pind);
@@ -544,11 +542,11 @@ ticker_t *decay_ticker_get(tsd_t *tsd, unsigned ind);
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_C_))
JEMALLOC_ALWAYS_INLINE pszind_t
-psz2ind_impl(size_t psz, bool clamp)
+psz2ind(size_t psz)
{
if (unlikely(psz > LARGE_MAXCLASS))
- return (clamp ? NPSIZES-1 : NPSIZES);
+ return (NPSIZES);
{
pszind_t x = lg_floor((psz<<1)-1);
pszind_t shift = (x < LG_SIZE_CLASS_GROUP + LG_PAGE) ? 0 : x -
@@ -567,24 +565,12 @@ psz2ind_impl(size_t psz, bool clamp)
}
}
-JEMALLOC_INLINE pszind_t
-psz2ind(size_t psz)
-{
-
- return (psz2ind_impl(psz, false));
-}
-
-JEMALLOC_INLINE pszind_t
-psz2ind_clamp(size_t psz)
-{
-
- return (psz2ind_impl(psz, true));
-}
-
JEMALLOC_INLINE size_t
pind2sz_compute(pszind_t pind)
{
+ if (unlikely(pind == NPSIZES))
+ return (LARGE_MAXCLASS + PAGE);
{
size_t grp = pind >> LG_SIZE_CLASS_GROUP;
size_t mod = pind & ((ZU(1) << LG_SIZE_CLASS_GROUP) - 1);
@@ -614,7 +600,7 @@ JEMALLOC_INLINE size_t
pind2sz(pszind_t pind)
{
- assert(pind < NPSIZES);
+ assert(pind < NPSIZES+1);
return (pind2sz_lookup(pind));
}
@@ -623,7 +609,7 @@ psz2u(size_t psz)
{
if (unlikely(psz > LARGE_MAXCLASS))
- return (0);
+ return (LARGE_MAXCLASS + PAGE);
{
size_t x = lg_floor((psz<<1)-1);
size_t lg_delta = (x < LG_SIZE_CLASS_GROUP + LG_PAGE + 1) ?
diff --git a/include/jemalloc/internal/private_symbols.txt b/include/jemalloc/internal/private_symbols.txt
index 2e2c11d8..2949de10 100644
--- a/include/jemalloc/internal/private_symbols.txt
+++ b/include/jemalloc/internal/private_symbols.txt
@@ -380,8 +380,6 @@ prof_thread_active_set
prof_thread_name_get
prof_thread_name_set
psz2ind
-psz2ind_clamp
-psz2ind_impl
psz2u
rtree_child_read
rtree_child_read_hard