From 08ccc1993359eaeefa82d9f4728d9b81b73b574a Mon Sep 17 00:00:00 2001 From: Christopher Ferris Date: Fri, 16 Nov 2018 12:52:28 -0800 Subject: Fix mallinfo counting for large allocations. The lstats part of the arena structure was not being countered at all, so added that counting for mallinfo. Bug: 119580449 Test: New unit tests pass, without changes, the test fails. Change-Id: I97b231f9189a79f0ce0f55fe6c4cc00266ca75ac --- src/android_je_mallinfo.c | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/src/android_je_mallinfo.c b/src/android_je_mallinfo.c index 32661c21..8a7ff232 100644 --- a/src/android_je_mallinfo.c +++ b/src/android_je_mallinfo.c @@ -26,15 +26,28 @@ struct mallinfo je_mallinfo() { arena_t* arena = atomic_load_p(&arenas[i], ATOMIC_ACQUIRE); if (arena != NULL) { mi.hblkhd += atomic_load_zu(&arena->stats.mapped, ATOMIC_ACQUIRE); - mi.uordblks += atomic_load_zu(&arena->stats.allocated_large, ATOMIC_ACQUIRE); + /* Accumulate the small bins. */ for (unsigned j = 0; j < NBINS; j++) { bin_t* bin = &arena->bins[j]; + /* NOTE: This includes allocations cached on every thread. */ malloc_mutex_lock(TSDN_NULL, &bin->lock); mi.uordblks += bin_infos[j].reg_size * bin->stats.curregs; malloc_mutex_unlock(TSDN_NULL, &bin->lock); } + + /* Accumulate the large allocation stats. + * Do not include stats.allocated_large, it is only updated by + * arena_stats_merge, and would include the data counted below. + */ + for (unsigned j = 0; j < NSIZES - NBINS; j++) { + /* Read ndalloc first so that we guarantee nmalloc >= ndalloc. */ + uint64_t ndalloc = arena_stats_read_u64(TSDN_NULL, &arena->stats, &arena->stats.lstats[j].ndalloc); + uint64_t nmalloc = arena_stats_read_u64(TSDN_NULL, &arena->stats, &arena->stats.lstats[j].nmalloc); + size_t allocs = (size_t)(nmalloc - ndalloc); + mi.uordblks += sz_index2size(NBINS + j) * allocs; + } } } malloc_mutex_unlock(TSDN_NULL, &arenas_lock); -- cgit v1.2.3