diff options
author | Andreas Gampe <agampe@google.com> | 2015-05-07 22:31:55 -0700 |
---|---|---|
committer | Andreas Gampe <agampe@google.com> | 2015-05-12 11:56:42 -0700 |
commit | ce7d005c1ba0716423d44861d2d0f58f142ff06a (patch) | |
tree | 650120ade40af0aba0ef66a69e84299e936912d5 /compiler/optimizing/intrinsics_arm.cc | |
parent | 8db2a6deb82d9c14d62e7ea201bc27b3040f1b62 (diff) | |
download | art-ce7d005c1ba0716423d44861d2d0f58f142ff06a.tar.gz art-ce7d005c1ba0716423d44861d2d0f58f142ff06a.tar.bz2 art-ce7d005c1ba0716423d44861d2d0f58f142ff06a.zip |
ART: arm indexOf intrinsics for the optimizing compiler
Add intrinsics implementations for indexOf in the optimizing
compiler. These are mostly ported from Quick.
Bug: 20889065
(cherry picked from commit ba6fdbcb764d5a8972f5ff2d7147e4d78226b347)
Change-Id: I18ee849d41187a381f99529669e6f97040aaacf6
Diffstat (limited to 'compiler/optimizing/intrinsics_arm.cc')
-rw-r--r-- | compiler/optimizing/intrinsics_arm.cc | 90 |
1 files changed, 88 insertions, 2 deletions
diff --git a/compiler/optimizing/intrinsics_arm.cc b/compiler/optimizing/intrinsics_arm.cc index 7f7b450003..dccfe9a0ca 100644 --- a/compiler/optimizing/intrinsics_arm.cc +++ b/compiler/optimizing/intrinsics_arm.cc @@ -850,6 +850,94 @@ void IntrinsicCodeGeneratorARM::VisitStringCompareTo(HInvoke* invoke) { __ Bind(slow_path->GetExitLabel()); } +static void GenerateVisitStringIndexOf(HInvoke* invoke, + ArmAssembler* assembler, + CodeGeneratorARM* codegen, + ArenaAllocator* allocator, + bool start_at_zero) { + LocationSummary* locations = invoke->GetLocations(); + Register tmp_reg = locations->GetTemp(0).AsRegister<Register>(); + + // Note that the null check must have been done earlier. + DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0))); + + // Check for code points > 0xFFFF. Either a slow-path check when we don't know statically, + // or directly dispatch if we have a constant. + SlowPathCodeARM* slow_path = nullptr; + if (invoke->InputAt(1)->IsIntConstant()) { + if (static_cast<uint32_t>(invoke->InputAt(1)->AsIntConstant()->GetValue()) > + std::numeric_limits<uint16_t>::max()) { + // Always needs the slow-path. We could directly dispatch to it, but this case should be + // rare, so for simplicity just put the full slow-path down and branch unconditionally. + slow_path = new (allocator) IntrinsicSlowPathARM(invoke); + codegen->AddSlowPath(slow_path); + __ b(slow_path->GetEntryLabel()); + __ Bind(slow_path->GetExitLabel()); + return; + } + } else { + Register char_reg = locations->InAt(1).AsRegister<Register>(); + __ LoadImmediate(tmp_reg, std::numeric_limits<uint16_t>::max()); + __ cmp(char_reg, ShifterOperand(tmp_reg)); + slow_path = new (allocator) IntrinsicSlowPathARM(invoke); + codegen->AddSlowPath(slow_path); + __ b(slow_path->GetEntryLabel(), HI); + } + + if (start_at_zero) { + DCHECK_EQ(tmp_reg, R2); + // Start-index = 0. + __ LoadImmediate(tmp_reg, 0); + } + + __ LoadFromOffset(kLoadWord, LR, TR, + QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pIndexOf).Int32Value()); + __ blx(LR); + + if (slow_path != nullptr) { + __ Bind(slow_path->GetExitLabel()); + } +} + +void IntrinsicLocationsBuilderARM::VisitStringIndexOf(HInvoke* invoke) { + LocationSummary* locations = new (arena_) LocationSummary(invoke, + LocationSummary::kCall, + kIntrinsified); + // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's + // best to align the inputs accordingly. + InvokeRuntimeCallingConvention calling_convention; + locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0))); + locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1))); + locations->SetOut(Location::RegisterLocation(R0)); + + // Need a temp for slow-path codepoint compare, and need to send start-index=0. + locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(2))); +} + +void IntrinsicCodeGeneratorARM::VisitStringIndexOf(HInvoke* invoke) { + GenerateVisitStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), true); +} + +void IntrinsicLocationsBuilderARM::VisitStringIndexOfAfter(HInvoke* invoke) { + LocationSummary* locations = new (arena_) LocationSummary(invoke, + LocationSummary::kCall, + kIntrinsified); + // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's + // best to align the inputs accordingly. + InvokeRuntimeCallingConvention calling_convention; + locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0))); + locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1))); + locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2))); + locations->SetOut(Location::RegisterLocation(R0)); + + // Need a temp for slow-path codepoint compare. + locations->AddTemp(Location::RequiresRegister()); +} + +void IntrinsicCodeGeneratorARM::VisitStringIndexOfAfter(HInvoke* invoke) { + GenerateVisitStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), false); +} + void IntrinsicLocationsBuilderARM::VisitStringNewStringFromBytes(HInvoke* invoke) { LocationSummary* locations = new (arena_) LocationSummary(invoke, LocationSummary::kCall, @@ -951,8 +1039,6 @@ UNIMPLEMENTED_INTRINSIC(MathRoundDouble) // Could be done by changing rounding UNIMPLEMENTED_INTRINSIC(MathRoundFloat) // Could be done by changing rounding mode, maybe? UNIMPLEMENTED_INTRINSIC(UnsafeCASLong) // High register pressure. UNIMPLEMENTED_INTRINSIC(SystemArrayCopyChar) -UNIMPLEMENTED_INTRINSIC(StringIndexOf) -UNIMPLEMENTED_INTRINSIC(StringIndexOfAfter) UNIMPLEMENTED_INTRINSIC(ReferenceGetReferent) UNIMPLEMENTED_INTRINSIC(StringGetCharsNoCheck) |