summaryrefslogtreecommitdiffstats
path: root/compiler/optimizing/register_allocator.cc
diff options
context:
space:
mode:
authorNicolas Geoffray <ngeoffray@google.com>2015-04-21 14:28:41 +0100
committerNicolas Geoffray <ngeoffray@google.com>2015-04-29 18:02:36 +0100
commit579026039080252878106118645ed70706f4838e (patch)
treecfedba53d8e8b04e81b855560e388f3f691ee837 /compiler/optimizing/register_allocator.cc
parent2d01066db24c19f9384f50ff71806cbb4835c7f9 (diff)
downloadart-579026039080252878106118645ed70706f4838e.tar.gz
art-579026039080252878106118645ed70706f4838e.tar.bz2
art-579026039080252878106118645ed70706f4838e.zip
Add synthesize uses at back edge.
This reduces the cost of linearizing the graph (hence removing the notion of back edge). Since linear scan allocates/spills registers based on next use, adding a use at a back edge ensures we do count for loop uses. Change-Id: Idaa882cb120edbdd08ca6bff142d326a8245bd14
Diffstat (limited to 'compiler/optimizing/register_allocator.cc')
-rw-r--r--compiler/optimizing/register_allocator.cc39
1 files changed, 25 insertions, 14 deletions
diff --git a/compiler/optimizing/register_allocator.cc b/compiler/optimizing/register_allocator.cc
index a8d006f104..812642b1b2 100644
--- a/compiler/optimizing/register_allocator.cc
+++ b/compiler/optimizing/register_allocator.cc
@@ -1467,23 +1467,28 @@ void RegisterAllocator::ConnectSiblings(LiveInterval* interval) {
LiveRange* range = current->GetFirstRange();
while (range != nullptr) {
- DCHECK(use == nullptr || use->GetPosition() >= range->GetStart());
+ while (use != nullptr && use->GetPosition() < range->GetStart()) {
+ DCHECK(use->IsSynthesized());
+ use = use->GetNext();
+ }
while (use != nullptr && use->GetPosition() <= range->GetEnd()) {
DCHECK(!use->GetIsEnvironment());
DCHECK(current->CoversSlow(use->GetPosition()) || (use->GetPosition() == range->GetEnd()));
- LocationSummary* locations = use->GetUser()->GetLocations();
- Location expected_location = locations->InAt(use->GetInputIndex());
- // The expected (actual) location may be invalid in case the input is unused. Currently
- // this only happens for intrinsics.
- if (expected_location.IsValid()) {
- if (expected_location.IsUnallocated()) {
- locations->SetInAt(use->GetInputIndex(), source);
- } else if (!expected_location.IsConstant()) {
- AddInputMoveFor(interval->GetDefinedBy(), use->GetUser(), source, expected_location);
+ if (!use->IsSynthesized()) {
+ LocationSummary* locations = use->GetUser()->GetLocations();
+ Location expected_location = locations->InAt(use->GetInputIndex());
+ // The expected (actual) location may be invalid in case the input is unused. Currently
+ // this only happens for intrinsics.
+ if (expected_location.IsValid()) {
+ if (expected_location.IsUnallocated()) {
+ locations->SetInAt(use->GetInputIndex(), source);
+ } else if (!expected_location.IsConstant()) {
+ AddInputMoveFor(interval->GetDefinedBy(), use->GetUser(), source, expected_location);
+ }
+ } else {
+ DCHECK(use->GetUser()->IsInvoke());
+ DCHECK(use->GetUser()->AsInvoke()->GetIntrinsic() != Intrinsics::kNone);
}
- } else {
- DCHECK(use->GetUser()->IsInvoke());
- DCHECK(use->GetUser()->AsInvoke()->GetIntrinsic() != Intrinsics::kNone);
}
use = use->GetNext();
}
@@ -1561,7 +1566,13 @@ void RegisterAllocator::ConnectSiblings(LiveInterval* interval) {
current = next_sibling;
} while (current != nullptr);
- DCHECK(use == nullptr);
+ if (kIsDebugBuild) {
+ // Following uses can only be synthesized uses.
+ while (use != nullptr) {
+ DCHECK(use->IsSynthesized());
+ use = use->GetNext();
+ }
+ }
}
void RegisterAllocator::ConnectSplitSiblings(LiveInterval* interval,