diff options
author | Christopher Ferris <cferris@google.com> | 2019-05-03 07:34:57 -0700 |
---|---|---|
committer | android-build-merger <android-build-merger@google.com> | 2019-05-03 07:34:57 -0700 |
commit | 44924f24a547101d4476edd1db2fdaa56cdf3e91 (patch) | |
tree | 5d0471227f485ce69c3df9e65b93e1186ad9fee8 | |
parent | 3a11273af2ec269a2861a75d932dc75d91fe28bb (diff) | |
parent | 158f084de4880d7241a9cd9c4d0e5571e9ea6a2b (diff) | |
download | platform_external_jemalloc_new-android10-qpr1-release.tar.gz platform_external_jemalloc_new-android10-qpr1-release.tar.bz2 platform_external_jemalloc_new-android10-qpr1-release.zip |
Fix miscalculating large memory allocations.android-10.0.0_r29android-10.0.0_r28android-10.0.0_r27android-10.0.0_r26android-10.0.0_r25android-10.0.0_r24android-10.0.0_r23android-10.0.0_r22android-10.0.0_r21android-10.0.0_r20android-10.0.0_r19android-10.0.0_r18android-10.0.0_r16android-10.0.0_r15android10-qpr1-releaseandroid10-qpr1-d-releaseandroid10-qpr1-c-s1-releaseandroid10-qpr1-c-releaseandroid10-qpr1-b-s1-releaseandroid10-qpr1-b-release
am: 158f084de4
Change-Id: Iceba2489e1d528b98713bb3551ddf037303f0a7d
-rw-r--r-- | src/android_je_mallinfo.c | 73 |
1 files changed, 39 insertions, 34 deletions
diff --git a/src/android_je_mallinfo.c b/src/android_je_mallinfo.c index 8a7ff232..53bf6644 100644 --- a/src/android_je_mallinfo.c +++ b/src/android_je_mallinfo.c @@ -14,6 +14,37 @@ * limitations under the License. */ +static size_t accumulate_large_allocs(arena_t* arena) { + size_t total_bytes = 0; + + /* Accumulate the large allocation stats. + * Do not include stats.allocated_large, it is only updated by + * arena_stats_merge, and would include the data counted below. + */ + for (unsigned j = 0; j < NSIZES - NBINS; j++) { + /* Read ndalloc first so that we guarantee nmalloc >= ndalloc. */ + uint64_t ndalloc = arena_stats_read_u64(TSDN_NULL, &arena->stats, &arena->stats.lstats[j].ndalloc); + uint64_t nmalloc = arena_stats_read_u64(TSDN_NULL, &arena->stats, &arena->stats.lstats[j].nmalloc); + size_t allocs = (size_t)(nmalloc - ndalloc); + total_bytes += sz_index2size(NBINS + j) * allocs; + } + return total_bytes; +} + +static size_t accumulate_small_allocs(arena_t* arena) { + size_t total_bytes = 0; + for (unsigned j = 0; j < NBINS; j++) { + bin_t* bin = &arena->bins[j]; + + /* NOTE: This includes allocations cached on every thread. */ + malloc_mutex_lock(TSDN_NULL, &bin->lock); + total_bytes += bin_infos[j].reg_size * bin->stats.curregs; + malloc_mutex_unlock(TSDN_NULL, &bin->lock); + } + return total_bytes; +} + + /* Only use bin locks since the stats are now all atomic and can be read * without taking the stats lock. */ @@ -27,27 +58,8 @@ struct mallinfo je_mallinfo() { if (arena != NULL) { mi.hblkhd += atomic_load_zu(&arena->stats.mapped, ATOMIC_ACQUIRE); - /* Accumulate the small bins. */ - for (unsigned j = 0; j < NBINS; j++) { - bin_t* bin = &arena->bins[j]; - - /* NOTE: This includes allocations cached on every thread. */ - malloc_mutex_lock(TSDN_NULL, &bin->lock); - mi.uordblks += bin_infos[j].reg_size * bin->stats.curregs; - malloc_mutex_unlock(TSDN_NULL, &bin->lock); - } - - /* Accumulate the large allocation stats. - * Do not include stats.allocated_large, it is only updated by - * arena_stats_merge, and would include the data counted below. - */ - for (unsigned j = 0; j < NSIZES - NBINS; j++) { - /* Read ndalloc first so that we guarantee nmalloc >= ndalloc. */ - uint64_t ndalloc = arena_stats_read_u64(TSDN_NULL, &arena->stats, &arena->stats.lstats[j].ndalloc); - uint64_t nmalloc = arena_stats_read_u64(TSDN_NULL, &arena->stats, &arena->stats.lstats[j].nmalloc); - size_t allocs = (size_t)(nmalloc - ndalloc); - mi.uordblks += sz_index2size(NBINS + j) * allocs; - } + mi.uordblks += accumulate_small_allocs(arena); + mi.uordblks += accumulate_large_allocs(arena); } } malloc_mutex_unlock(TSDN_NULL, &arenas_lock); @@ -56,15 +68,15 @@ struct mallinfo je_mallinfo() { return mi; } -size_t __mallinfo_narenas() { +size_t je_mallinfo_narenas() { return narenas_auto; } -size_t __mallinfo_nbins() { +size_t je_mallinfo_nbins() { return NBINS; } -struct mallinfo __mallinfo_arena_info(size_t aidx) { +struct mallinfo je_mallinfo_arena_info(size_t aidx) { struct mallinfo mi; memset(&mi, 0, sizeof(mi)); @@ -73,22 +85,15 @@ struct mallinfo __mallinfo_arena_info(size_t aidx) { arena_t* arena = atomic_load_p(&arenas[aidx], ATOMIC_ACQUIRE); if (arena != NULL) { mi.hblkhd = atomic_load_zu(&arena->stats.mapped, ATOMIC_ACQUIRE); - mi.ordblks = atomic_load_zu(&arena->stats.allocated_large, ATOMIC_ACQUIRE); - - for (unsigned j = 0; j < NBINS; j++) { - bin_t* bin = &arena->bins[j]; - - malloc_mutex_lock(TSDN_NULL, &bin->lock); - mi.fsmblks += bin_infos[j].reg_size * bin->stats.curregs; - malloc_mutex_unlock(TSDN_NULL, &bin->lock); - } + mi.ordblks = accumulate_large_allocs(arena); + mi.fsmblks = accumulate_small_allocs(arena); } } malloc_mutex_unlock(TSDN_NULL, &arenas_lock); return mi; } -struct mallinfo __mallinfo_bin_info(size_t aidx, size_t bidx) { +struct mallinfo je_mallinfo_bin_info(size_t aidx, size_t bidx) { struct mallinfo mi; memset(&mi, 0, sizeof(mi)); |