diff options
author | Michael Butler <butlermichael@google.com> | 2020-12-18 20:53:55 -0800 |
---|---|---|
committer | Lev Proleev <levp@google.com> | 2021-01-19 16:21:17 +0000 |
commit | 95331510ad69e90a0d390aff312ac998bd2259a7 (patch) | |
tree | be3b69df92170b1cc11cbef21b30b42156e199ef /neuralnetworks | |
parent | 2a8b679bba46dc017ff7fd8bf2e1c5ec8766f5ee (diff) | |
download | platform_hardware_interfaces-95331510ad69e90a0d390aff312ac998bd2259a7.tar.gz platform_hardware_interfaces-95331510ad69e90a0d390aff312ac998bd2259a7.tar.bz2 platform_hardware_interfaces-95331510ad69e90a0d390aff312ac998bd2259a7.zip |
Implement partial canonical Burst in NN util code
This CL adds a simple implementation of IBurst that dispatches calls to
an IPreparedModel object and changes
IPreparedModel::configureExecutionBurst to return this new object
(instead of returning an error).
This CL additionally defines an InvalidBurst class that returns errors
whenever it is used and a ResilientBurst class to recover an IBurst
object when it has died.
Bug: 177267324
Test: mma
Change-Id: I4c7e7ff4e6559aeb5e62c4fa02f2e751fef9d87d
Merged-In: I4c7e7ff4e6559aeb5e62c4fa02f2e751fef9d87d
(cherry picked from commit 44f324fb0d89ed896c9b0566ea632bddcfe69439)
Diffstat (limited to 'neuralnetworks')
14 files changed, 400 insertions, 10 deletions
diff --git a/neuralnetworks/1.0/utils/include/nnapi/hal/1.0/Burst.h b/neuralnetworks/1.0/utils/include/nnapi/hal/1.0/Burst.h new file mode 100644 index 0000000000..f2cbe93c7c --- /dev/null +++ b/neuralnetworks/1.0/utils/include/nnapi/hal/1.0/Burst.h @@ -0,0 +1,55 @@ +/* + * Copyright (C) 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_0_UTILS_BURST_H +#define ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_0_UTILS_BURST_H + +#include <nnapi/IBurst.h> +#include <nnapi/IPreparedModel.h> +#include <nnapi/Result.h> +#include <nnapi/Types.h> + +#include <memory> +#include <optional> +#include <utility> + +// See hardware/interfaces/neuralnetworks/utils/README.md for more information on HIDL interface +// lifetimes across processes and for protecting asynchronous calls across HIDL. + +namespace android::hardware::neuralnetworks::V1_0::utils { + +// Class that adapts nn::IPreparedModel to nn::IBurst. +class Burst final : public nn::IBurst { + struct PrivateConstructorTag {}; + + public: + static nn::GeneralResult<std::shared_ptr<const Burst>> create( + nn::SharedPreparedModel preparedModel); + + Burst(PrivateConstructorTag tag, nn::SharedPreparedModel preparedModel); + + OptionalCacheHold cacheMemory(const nn::Memory& memory) const override; + + nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> execute( + const nn::Request& request, nn::MeasureTiming measure) const override; + + private: + const nn::SharedPreparedModel kPreparedModel; +}; + +} // namespace android::hardware::neuralnetworks::V1_0::utils + +#endif // ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_0_UTILS_BURST_H diff --git a/neuralnetworks/1.0/utils/include/nnapi/hal/1.0/PreparedModel.h b/neuralnetworks/1.0/utils/include/nnapi/hal/1.0/PreparedModel.h index bda40c549b..8853eea048 100644 --- a/neuralnetworks/1.0/utils/include/nnapi/hal/1.0/PreparedModel.h +++ b/neuralnetworks/1.0/utils/include/nnapi/hal/1.0/PreparedModel.h @@ -35,7 +35,8 @@ namespace android::hardware::neuralnetworks::V1_0::utils { // Class that adapts V1_0::IPreparedModel to nn::IPreparedModel. -class PreparedModel final : public nn::IPreparedModel { +class PreparedModel final : public nn::IPreparedModel, + public std::enable_shared_from_this<PreparedModel> { struct PrivateConstructorTag {}; public: diff --git a/neuralnetworks/1.0/utils/src/Burst.cpp b/neuralnetworks/1.0/utils/src/Burst.cpp new file mode 100644 index 0000000000..384bd9b699 --- /dev/null +++ b/neuralnetworks/1.0/utils/src/Burst.cpp @@ -0,0 +1,55 @@ +/* + * Copyright (C) 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "Burst.h" + +#include <android-base/logging.h> +#include <nnapi/IBurst.h> +#include <nnapi/IPreparedModel.h> +#include <nnapi/Result.h> +#include <nnapi/Types.h> + +#include <memory> +#include <optional> +#include <utility> + +namespace android::hardware::neuralnetworks::V1_0::utils { + +nn::GeneralResult<std::shared_ptr<const Burst>> Burst::create( + nn::SharedPreparedModel preparedModel) { + if (preparedModel == nullptr) { + return NN_ERROR(nn::ErrorStatus::GENERAL_FAILURE) + << "V1_0::utils::Burst::create must have non-null preparedModel"; + } + + return std::make_shared<const Burst>(PrivateConstructorTag{}, std::move(preparedModel)); +} + +Burst::Burst(PrivateConstructorTag /*tag*/, nn::SharedPreparedModel preparedModel) + : kPreparedModel(std::move(preparedModel)) { + CHECK(kPreparedModel != nullptr); +} + +Burst::OptionalCacheHold Burst::cacheMemory(const nn::Memory& /*memory*/) const { + return nullptr; +} + +nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> Burst::execute( + const nn::Request& request, nn::MeasureTiming measure) const { + return kPreparedModel->execute(request, measure, {}, {}); +} + +} // namespace android::hardware::neuralnetworks::V1_0::utils diff --git a/neuralnetworks/1.0/utils/src/PreparedModel.cpp b/neuralnetworks/1.0/utils/src/PreparedModel.cpp index b8de131383..858571d401 100644 --- a/neuralnetworks/1.0/utils/src/PreparedModel.cpp +++ b/neuralnetworks/1.0/utils/src/PreparedModel.cpp @@ -16,6 +16,7 @@ #include "PreparedModel.h" +#include "Burst.h" #include "Callbacks.h" #include "Conversions.h" #include "Utils.h" @@ -91,7 +92,7 @@ PreparedModel::executeFenced(const nn::Request& /*request*/, } nn::GeneralResult<nn::SharedBurst> PreparedModel::configureExecutionBurst() const { - return NN_ERROR(nn::ErrorStatus::GENERAL_FAILURE) << "Not yet implemented"; + return Burst::create(shared_from_this()); } std::any PreparedModel::getUnderlyingResource() const { diff --git a/neuralnetworks/1.2/utils/include/nnapi/hal/1.2/PreparedModel.h b/neuralnetworks/1.2/utils/include/nnapi/hal/1.2/PreparedModel.h index f43933309c..fb1113051c 100644 --- a/neuralnetworks/1.2/utils/include/nnapi/hal/1.2/PreparedModel.h +++ b/neuralnetworks/1.2/utils/include/nnapi/hal/1.2/PreparedModel.h @@ -36,7 +36,8 @@ namespace android::hardware::neuralnetworks::V1_2::utils { // Class that adapts V1_2::IPreparedModel to nn::IPreparedModel. -class PreparedModel final : public nn::IPreparedModel { +class PreparedModel final : public nn::IPreparedModel, + public std::enable_shared_from_this<PreparedModel> { struct PrivateConstructorTag {}; public: diff --git a/neuralnetworks/1.2/utils/src/PreparedModel.cpp b/neuralnetworks/1.2/utils/src/PreparedModel.cpp index ad54c39282..6841c5e007 100644 --- a/neuralnetworks/1.2/utils/src/PreparedModel.cpp +++ b/neuralnetworks/1.2/utils/src/PreparedModel.cpp @@ -27,6 +27,7 @@ #include <nnapi/IPreparedModel.h> #include <nnapi/Result.h> #include <nnapi/Types.h> +#include <nnapi/hal/1.0/Burst.h> #include <nnapi/hal/1.0/Conversions.h> #include <nnapi/hal/CommonUtils.h> #include <nnapi/hal/HandleError.h> @@ -118,7 +119,7 @@ PreparedModel::executeFenced(const nn::Request& /*request*/, } nn::GeneralResult<nn::SharedBurst> PreparedModel::configureExecutionBurst() const { - return NN_ERROR(nn::ErrorStatus::GENERAL_FAILURE) << "Not yet implemented"; + return V1_0::utils::Burst::create(shared_from_this()); } std::any PreparedModel::getUnderlyingResource() const { diff --git a/neuralnetworks/1.3/utils/include/nnapi/hal/1.3/PreparedModel.h b/neuralnetworks/1.3/utils/include/nnapi/hal/1.3/PreparedModel.h index 63aa6faf79..690fecccfb 100644 --- a/neuralnetworks/1.3/utils/include/nnapi/hal/1.3/PreparedModel.h +++ b/neuralnetworks/1.3/utils/include/nnapi/hal/1.3/PreparedModel.h @@ -35,7 +35,8 @@ namespace android::hardware::neuralnetworks::V1_3::utils { // Class that adapts V1_3::IPreparedModel to nn::IPreparedModel. -class PreparedModel final : public nn::IPreparedModel { +class PreparedModel final : public nn::IPreparedModel, + public std::enable_shared_from_this<PreparedModel> { struct PrivateConstructorTag {}; public: diff --git a/neuralnetworks/1.3/utils/src/PreparedModel.cpp b/neuralnetworks/1.3/utils/src/PreparedModel.cpp index 7afa777868..725e4f546a 100644 --- a/neuralnetworks/1.3/utils/src/PreparedModel.cpp +++ b/neuralnetworks/1.3/utils/src/PreparedModel.cpp @@ -29,6 +29,7 @@ #include <nnapi/Result.h> #include <nnapi/TypeUtils.h> #include <nnapi/Types.h> +#include <nnapi/hal/1.0/Burst.h> #include <nnapi/hal/1.2/Conversions.h> #include <nnapi/hal/CommonUtils.h> #include <nnapi/hal/HandleError.h> @@ -198,7 +199,7 @@ PreparedModel::executeFenced(const nn::Request& request, const std::vector<nn::S } nn::GeneralResult<nn::SharedBurst> PreparedModel::configureExecutionBurst() const { - return NN_ERROR(nn::ErrorStatus::GENERAL_FAILURE) << "Not yet implemented"; + return V1_0::utils::Burst::create(shared_from_this()); } std::any PreparedModel::getUnderlyingResource() const { diff --git a/neuralnetworks/utils/common/include/nnapi/hal/InvalidBurst.h b/neuralnetworks/utils/common/include/nnapi/hal/InvalidBurst.h new file mode 100644 index 0000000000..83e60b6a25 --- /dev/null +++ b/neuralnetworks/utils/common/include/nnapi/hal/InvalidBurst.h @@ -0,0 +1,40 @@ +/* + * Copyright (C) 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_UTILS_COMMON_INVALID_BURST_H +#define ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_UTILS_COMMON_INVALID_BURST_H + +#include <nnapi/IBurst.h> +#include <nnapi/Result.h> +#include <nnapi/Types.h> + +#include <memory> +#include <optional> +#include <utility> + +namespace android::hardware::neuralnetworks::utils { + +class InvalidBurst final : public nn::IBurst { + public: + OptionalCacheHold cacheMemory(const nn::Memory& memory) const override; + + nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> execute( + const nn::Request& request, nn::MeasureTiming measure) const override; +}; + +} // namespace android::hardware::neuralnetworks::utils + +#endif // ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_UTILS_COMMON_INVALID_BURST_H diff --git a/neuralnetworks/utils/common/include/nnapi/hal/ResilientBurst.h b/neuralnetworks/utils/common/include/nnapi/hal/ResilientBurst.h new file mode 100644 index 0000000000..0df287f2f8 --- /dev/null +++ b/neuralnetworks/utils/common/include/nnapi/hal/ResilientBurst.h @@ -0,0 +1,60 @@ +/* + * Copyright (C) 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_UTILS_COMMON_RESILIENT_BURST_H +#define ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_UTILS_COMMON_RESILIENT_BURST_H + +#include <android-base/thread_annotations.h> +#include <nnapi/IBurst.h> +#include <nnapi/Result.h> +#include <nnapi/Types.h> + +#include <functional> +#include <memory> +#include <mutex> +#include <optional> +#include <utility> + +namespace android::hardware::neuralnetworks::utils { + +class ResilientBurst final : public nn::IBurst, + public std::enable_shared_from_this<ResilientBurst> { + struct PrivateConstructorTag {}; + + public: + using Factory = std::function<nn::GeneralResult<nn::SharedBurst>()>; + + static nn::GeneralResult<std::shared_ptr<const ResilientBurst>> create(Factory makeBurst); + + ResilientBurst(PrivateConstructorTag tag, Factory makeBurst, nn::SharedBurst burst); + + nn::SharedBurst getBurst() const; + nn::GeneralResult<nn::SharedBurst> recover(const nn::IBurst* failingBurst) const; + + OptionalCacheHold cacheMemory(const nn::Memory& memory) const override; + + nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> execute( + const nn::Request& request, nn::MeasureTiming measure) const override; + + private: + const Factory kMakeBurst; + mutable std::mutex mMutex; + mutable nn::SharedBurst mBurst GUARDED_BY(mMutex); +}; + +} // namespace android::hardware::neuralnetworks::utils + +#endif // ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_UTILS_COMMON_RESILIENT_BURST_H diff --git a/neuralnetworks/utils/common/include/nnapi/hal/ResilientPreparedModel.h b/neuralnetworks/utils/common/include/nnapi/hal/ResilientPreparedModel.h index 4014404aee..a6c1b1911a 100644 --- a/neuralnetworks/utils/common/include/nnapi/hal/ResilientPreparedModel.h +++ b/neuralnetworks/utils/common/include/nnapi/hal/ResilientPreparedModel.h @@ -30,7 +30,8 @@ namespace android::hardware::neuralnetworks::utils { -class ResilientPreparedModel final : public nn::IPreparedModel { +class ResilientPreparedModel final : public nn::IPreparedModel, + public std::enable_shared_from_this<ResilientPreparedModel> { struct PrivateConstructorTag {}; public: @@ -62,6 +63,9 @@ class ResilientPreparedModel final : public nn::IPreparedModel { std::any getUnderlyingResource() const override; private: + bool isValidInternal() const EXCLUDES(mMutex); + nn::GeneralResult<nn::SharedBurst> configureExecutionBurstInternal() const; + const Factory kMakePreparedModel; mutable std::mutex mMutex; mutable nn::SharedPreparedModel mPreparedModel GUARDED_BY(mMutex); diff --git a/neuralnetworks/utils/common/src/InvalidBurst.cpp b/neuralnetworks/utils/common/src/InvalidBurst.cpp new file mode 100644 index 0000000000..4ca6603eb7 --- /dev/null +++ b/neuralnetworks/utils/common/src/InvalidBurst.cpp @@ -0,0 +1,38 @@ +/* + * Copyright (C) 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "InvalidBurst.h" + +#include <nnapi/IBurst.h> +#include <nnapi/Result.h> +#include <nnapi/Types.h> + +#include <memory> +#include <optional> +#include <utility> + +namespace android::hardware::neuralnetworks::utils { + +InvalidBurst::OptionalCacheHold InvalidBurst::cacheMemory(const nn::Memory& /*memory*/) const { + return nullptr; +} + +nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> InvalidBurst::execute( + const nn::Request& /*request*/, nn::MeasureTiming /*measure*/) const { + return NN_ERROR() << "InvalidBurst"; +} + +} // namespace android::hardware::neuralnetworks::utils diff --git a/neuralnetworks/utils/common/src/ResilientBurst.cpp b/neuralnetworks/utils/common/src/ResilientBurst.cpp new file mode 100644 index 0000000000..0d3cb33a98 --- /dev/null +++ b/neuralnetworks/utils/common/src/ResilientBurst.cpp @@ -0,0 +1,109 @@ +/* + * Copyright (C) 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "ResilientBurst.h" + +#include <android-base/logging.h> +#include <android-base/thread_annotations.h> +#include <nnapi/IBurst.h> +#include <nnapi/Result.h> +#include <nnapi/TypeUtils.h> +#include <nnapi/Types.h> + +#include <functional> +#include <memory> +#include <mutex> +#include <optional> +#include <utility> + +namespace android::hardware::neuralnetworks::utils { +namespace { + +template <typename FnType> +auto protect(const ResilientBurst& resilientBurst, const FnType& fn) + -> decltype(fn(*resilientBurst.getBurst())) { + auto burst = resilientBurst.getBurst(); + auto result = fn(*burst); + + // Immediately return if burst is not dead. + if (result.has_value() || result.error().code != nn::ErrorStatus::DEAD_OBJECT) { + return result; + } + + // Attempt recovery and return if it fails. + auto maybeBurst = resilientBurst.recover(burst.get()); + if (!maybeBurst.has_value()) { + auto [resultErrorMessage, resultErrorCode, resultOutputShapes] = std::move(result).error(); + const auto& [recoveryErrorMessage, recoveryErrorCode] = maybeBurst.error(); + return nn::error(resultErrorCode, std::move(resultOutputShapes)) + << resultErrorMessage << ", and failed to recover dead burst object with error " + << recoveryErrorCode << ": " << recoveryErrorMessage; + } + burst = std::move(maybeBurst).value(); + + return fn(*burst); +} + +} // namespace + +nn::GeneralResult<std::shared_ptr<const ResilientBurst>> ResilientBurst::create(Factory makeBurst) { + if (makeBurst == nullptr) { + return NN_ERROR(nn::ErrorStatus::INVALID_ARGUMENT) + << "utils::ResilientBurst::create must have non-empty makeBurst"; + } + auto burst = NN_TRY(makeBurst()); + CHECK(burst != nullptr); + return std::make_shared<ResilientBurst>(PrivateConstructorTag{}, std::move(makeBurst), + std::move(burst)); +} + +ResilientBurst::ResilientBurst(PrivateConstructorTag /*tag*/, Factory makeBurst, + nn::SharedBurst burst) + : kMakeBurst(std::move(makeBurst)), mBurst(std::move(burst)) { + CHECK(kMakeBurst != nullptr); + CHECK(mBurst != nullptr); +} + +nn::SharedBurst ResilientBurst::getBurst() const { + std::lock_guard guard(mMutex); + return mBurst; +} + +nn::GeneralResult<nn::SharedBurst> ResilientBurst::recover(const nn::IBurst* failingBurst) const { + std::lock_guard guard(mMutex); + + // Another caller updated the failing burst. + if (mBurst.get() != failingBurst) { + return mBurst; + } + + mBurst = NN_TRY(kMakeBurst()); + return mBurst; +} + +ResilientBurst::OptionalCacheHold ResilientBurst::cacheMemory(const nn::Memory& memory) const { + return getBurst()->cacheMemory(memory); +} + +nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> ResilientBurst::execute( + const nn::Request& request, nn::MeasureTiming measure) const { + const auto fn = [&request, measure](const nn::IBurst& burst) { + return burst.execute(request, measure); + }; + return protect(*this, fn); +} + +} // namespace android::hardware::neuralnetworks::utils diff --git a/neuralnetworks/utils/common/src/ResilientPreparedModel.cpp b/neuralnetworks/utils/common/src/ResilientPreparedModel.cpp index faba9965f2..5dd5f99f5f 100644 --- a/neuralnetworks/utils/common/src/ResilientPreparedModel.cpp +++ b/neuralnetworks/utils/common/src/ResilientPreparedModel.cpp @@ -16,6 +16,9 @@ #include "ResilientPreparedModel.h" +#include "InvalidBurst.h" +#include "ResilientBurst.h" + #include <android-base/logging.h> #include <android-base/thread_annotations.h> #include <nnapi/IPreparedModel.h> @@ -125,14 +128,34 @@ ResilientPreparedModel::executeFenced(const nn::Request& request, } nn::GeneralResult<nn::SharedBurst> ResilientPreparedModel::configureExecutionBurst() const { - const auto fn = [](const nn::IPreparedModel& preparedModel) { - return preparedModel.configureExecutionBurst(); +#if 0 + auto self = shared_from_this(); + ResilientBurst::Factory makeBurst = + [preparedModel = std::move(self)]() -> nn::GeneralResult<nn::SharedBurst> { + return preparedModel->configureExecutionBurst(); }; - return protect(*this, fn); + return ResilientBurst::create(std::move(makeBurst)); +#else + return configureExecutionBurstInternal(); +#endif } std::any ResilientPreparedModel::getUnderlyingResource() const { return getPreparedModel()->getUnderlyingResource(); } +bool ResilientPreparedModel::isValidInternal() const { + return true; +} + +nn::GeneralResult<nn::SharedBurst> ResilientPreparedModel::configureExecutionBurstInternal() const { + if (!isValidInternal()) { + return std::make_shared<const InvalidBurst>(); + } + const auto fn = [](const nn::IPreparedModel& preparedModel) { + return preparedModel.configureExecutionBurst(); + }; + return protect(*this, fn); +} + } // namespace android::hardware::neuralnetworks::utils |