summaryrefslogtreecommitdiffstats
path: root/runtime/base
diff options
context:
space:
mode:
Diffstat (limited to 'runtime/base')
-rw-r--r--runtime/base/arena_allocator.cc12
-rw-r--r--runtime/base/arena_allocator.h38
-rw-r--r--runtime/base/iteration_range.h11
-rw-r--r--runtime/base/macros.h1
-rw-r--r--runtime/base/scoped_arena_containers.h6
5 files changed, 65 insertions, 3 deletions
diff --git a/runtime/base/arena_allocator.cc b/runtime/base/arena_allocator.cc
index b53fa84f5c..8f2d94b564 100644
--- a/runtime/base/arena_allocator.cc
+++ b/runtime/base/arena_allocator.cc
@@ -302,6 +302,18 @@ void ArenaAllocator::ObtainNewArenaForAllocation(size_t allocation_size) {
end_ = new_arena->End();
}
+bool ArenaAllocator::Contains(const void* ptr) const {
+ if (ptr >= begin_ && ptr < end_) {
+ return true;
+ }
+ for (const Arena* cur_arena = arena_head_; cur_arena != nullptr; cur_arena = cur_arena->next_) {
+ if (cur_arena->Contains(ptr)) {
+ return true;
+ }
+ }
+ return false;
+}
+
MemStats::MemStats(const char* name, const ArenaAllocatorStats* stats, const Arena* first_arena,
ssize_t lost_bytes_adjustment)
: name_(name),
diff --git a/runtime/base/arena_allocator.h b/runtime/base/arena_allocator.h
index 2e617b500a..d9723b57de 100644
--- a/runtime/base/arena_allocator.h
+++ b/runtime/base/arena_allocator.h
@@ -142,6 +142,11 @@ class Arena {
return bytes_allocated_;
}
+ // Return true if ptr is contained in the arena.
+ bool Contains(const void* ptr) const {
+ return memory_ <= ptr && ptr < memory_ + bytes_allocated_;
+ }
+
protected:
size_t bytes_allocated_;
uint8_t* memory_;
@@ -219,19 +224,52 @@ class ArenaAllocator : private DebugStackRefCounter, private ArenaAllocatorStats
return ret;
}
+ // Realloc never frees the input pointer, it is the caller's job to do this if necessary.
+ void* Realloc(void* ptr, size_t ptr_size, size_t new_size,
+ ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
+ DCHECK_GE(new_size, ptr_size);
+ DCHECK_EQ(ptr == nullptr, ptr_size == 0u);
+ auto* end = reinterpret_cast<uint8_t*>(ptr) + ptr_size;
+ // If we haven't allocated anything else, we can safely extend.
+ if (end == ptr_) {
+ const size_t size_delta = new_size - ptr_size;
+ // Check remain space.
+ const size_t remain = end_ - ptr_;
+ if (remain >= size_delta) {
+ ptr_ += size_delta;
+ ArenaAllocatorStats::RecordAlloc(size_delta, kind);
+ return ptr;
+ }
+ }
+ auto* new_ptr = Alloc(new_size, kind);
+ memcpy(new_ptr, ptr, ptr_size);
+ // TODO: Call free on ptr if linear alloc supports free.
+ return new_ptr;
+ }
+
template <typename T>
T* AllocArray(size_t length, ArenaAllocKind kind = kArenaAllocMisc) {
return static_cast<T*>(Alloc(length * sizeof(T), kind));
}
void* AllocValgrind(size_t bytes, ArenaAllocKind kind);
+
void ObtainNewArenaForAllocation(size_t allocation_size);
+
size_t BytesAllocated() const;
+
MemStats GetMemStats() const;
+
// The BytesUsed method sums up bytes allocated from arenas in arena_head_ and nodes.
// TODO: Change BytesAllocated to this behavior?
size_t BytesUsed() const;
+ ArenaPool* GetArenaPool() const {
+ return pool_;
+ }
+
+ bool Contains(const void* ptr) const;
+
private:
static constexpr size_t kAlignment = 8;
diff --git a/runtime/base/iteration_range.h b/runtime/base/iteration_range.h
index 5a46376237..6a0ef1f585 100644
--- a/runtime/base/iteration_range.h
+++ b/runtime/base/iteration_range.h
@@ -17,6 +17,8 @@
#ifndef ART_RUNTIME_BASE_ITERATION_RANGE_H_
#define ART_RUNTIME_BASE_ITERATION_RANGE_H_
+#include <iterator>
+
namespace art {
// Helper class that acts as a container for range-based loops, given an iteration
@@ -38,10 +40,15 @@ class IterationRange {
iterator cend() const { return last_; }
private:
- iterator first_;
- iterator last_;
+ const iterator first_;
+ const iterator last_;
};
+template <typename Iter>
+static inline IterationRange<Iter> MakeIterationRange(const Iter& begin_it, const Iter& end_it) {
+ return IterationRange<Iter>(begin_it, end_it);
+}
+
} // namespace art
#endif // ART_RUNTIME_BASE_ITERATION_RANGE_H_
diff --git a/runtime/base/macros.h b/runtime/base/macros.h
index c00ae78be8..5c596471c2 100644
--- a/runtime/base/macros.h
+++ b/runtime/base/macros.h
@@ -50,7 +50,6 @@ friend class test_set_name##_##individual_test##_Test
#define ART_FRIEND_TYPED_TEST(test_set_name, individual_test)\
template<typename T> ART_FRIEND_TEST(test_set_name, individual_test)
-
// DISALLOW_COPY_AND_ASSIGN disallows the copy and operator= functions. It goes in the private:
// declarations in a class.
#if !defined(DISALLOW_COPY_AND_ASSIGN)
diff --git a/runtime/base/scoped_arena_containers.h b/runtime/base/scoped_arena_containers.h
index df79085626..82db60e4e4 100644
--- a/runtime/base/scoped_arena_containers.h
+++ b/runtime/base/scoped_arena_containers.h
@@ -20,6 +20,7 @@
#include <deque>
#include <queue>
#include <set>
+#include <unordered_map>
#include <vector>
#include "arena_containers.h" // For ArenaAllocatorAdapterKind.
@@ -55,6 +56,11 @@ template <typename K, typename V, typename Comparator = std::less<K>>
using ScopedArenaSafeMap =
SafeMap<K, V, Comparator, ScopedArenaAllocatorAdapter<std::pair<const K, V>>>;
+template <typename K, typename V, class Hash = std::hash<K>, class KeyEqual = std::equal_to<K>>
+using ScopedArenaUnorderedMap =
+ std::unordered_map<K, V, Hash, KeyEqual, ScopedArenaAllocatorAdapter<std::pair<const K, V>>>;
+
+
// Implementation details below.
template <>