1 /*
2  * This file is auto-generated.  DO NOT MODIFY.
3  * Using: out/host/linux-x86/bin/aidl --lang=ndk --structured --version 4 --hash 53178f8de9b8861df391cf0593f6f3e08adad33d -t --stability vintf --min_sdk_version 30 -pout/soong/.intermediates/hardware/interfaces/common/aidl/android.hardware.common_interface/2/preprocessed.aidl -pout/soong/.intermediates/hardware/interfaces/graphics/common/aidl/android.hardware.graphics.common_interface/6/preprocessed.aidl --ninja -d out/soong/.intermediates/hardware/interfaces/neuralnetworks/aidl/android.hardware.neuralnetworks-V4-ndk-source/gen/staging/android/hardware/neuralnetworks/PrepareModelConfig.cpp.d -h out/soong/.intermediates/hardware/interfaces/neuralnetworks/aidl/android.hardware.neuralnetworks-V4-ndk-source/gen/include/staging -o out/soong/.intermediates/hardware/interfaces/neuralnetworks/aidl/android.hardware.neuralnetworks-V4-ndk-source/gen/staging -Nhardware/interfaces/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/4 hardware/interfaces/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/4/android/hardware/neuralnetworks/PrepareModelConfig.aidl
4  *
5  * DO NOT CHECK THIS FILE INTO A CODE TREE (e.g. git, etc..).
6  * ALWAYS GENERATE THIS FILE FROM UPDATED AIDL COMPILER
7  * AS A BUILD INTERMEDIATE ONLY. THIS IS NOT SOURCE CODE.
8  */
9 #pragma once
10 
11 #include <cstdint>
12 #include <memory>
13 #include <optional>
14 #include <string>
15 #include <vector>
16 #include <android/binder_interface_utils.h>
17 #include <android/binder_parcelable_utils.h>
18 #include <android/binder_to_string.h>
19 #include <aidl/android/hardware/neuralnetworks/ExecutionPreference.h>
20 #include <aidl/android/hardware/neuralnetworks/ExtensionNameAndPrefix.h>
21 #include <aidl/android/hardware/neuralnetworks/Priority.h>
22 #include <aidl/android/hardware/neuralnetworks/TokenValuePair.h>
23 #ifdef BINDER_STABILITY_SUPPORT
24 #include <android/binder_stability.h>
25 #endif  // BINDER_STABILITY_SUPPORT
26 
27 namespace aidl::android::hardware::neuralnetworks {
28 class ExtensionNameAndPrefix;
29 class TokenValuePair;
30 }  // namespace aidl::android::hardware::neuralnetworks
31 namespace aidl {
32 namespace android {
33 namespace hardware {
34 namespace neuralnetworks {
35 class PrepareModelConfig {
36 public:
37   typedef std::false_type fixed_size;
38   static const char* descriptor;
39 
40   ::aidl::android::hardware::neuralnetworks::ExecutionPreference preference = ::aidl::android::hardware::neuralnetworks::ExecutionPreference(0);
41   ::aidl::android::hardware::neuralnetworks::Priority priority = ::aidl::android::hardware::neuralnetworks::Priority(0);
42   int64_t deadlineNs = 0L;
43   std::vector<::ndk::ScopedFileDescriptor> modelCache;
44   std::vector<::ndk::ScopedFileDescriptor> dataCache;
45   std::array<uint8_t, 32> cacheToken = {{}};
46   std::vector<::aidl::android::hardware::neuralnetworks::TokenValuePair> compilationHints;
47   std::vector<::aidl::android::hardware::neuralnetworks::ExtensionNameAndPrefix> extensionNameToPrefix;
48 
49   binder_status_t readFromParcel(const AParcel* parcel);
50   binder_status_t writeToParcel(AParcel* parcel) const;
51 
52   inline bool operator==(const PrepareModelConfig& _rhs) const {
53     return std::tie(preference, priority, deadlineNs, modelCache, dataCache, cacheToken, compilationHints, extensionNameToPrefix) == std::tie(_rhs.preference, _rhs.priority, _rhs.deadlineNs, _rhs.modelCache, _rhs.dataCache, _rhs.cacheToken, _rhs.compilationHints, _rhs.extensionNameToPrefix);
54   }
55   inline bool operator<(const PrepareModelConfig& _rhs) const {
56     return std::tie(preference, priority, deadlineNs, modelCache, dataCache, cacheToken, compilationHints, extensionNameToPrefix) < std::tie(_rhs.preference, _rhs.priority, _rhs.deadlineNs, _rhs.modelCache, _rhs.dataCache, _rhs.cacheToken, _rhs.compilationHints, _rhs.extensionNameToPrefix);
57   }
58   inline bool operator!=(const PrepareModelConfig& _rhs) const {
59     return !(*this == _rhs);
60   }
61   inline bool operator>(const PrepareModelConfig& _rhs) const {
62     return _rhs < *this;
63   }
64   inline bool operator>=(const PrepareModelConfig& _rhs) const {
65     return !(*this < _rhs);
66   }
67   inline bool operator<=(const PrepareModelConfig& _rhs) const {
68     return !(_rhs < *this);
69   }
70 
71   static const ::ndk::parcelable_stability_t _aidl_stability = ::ndk::STABILITY_VINTF;
72   enum : int32_t { BYTE_SIZE_OF_CACHE_TOKEN = 32 };
toString()73   inline std::string toString() const {
74     std::ostringstream _aidl_os;
75     _aidl_os << "PrepareModelConfig{";
76     _aidl_os << "preference: " << ::android::internal::ToString(preference);
77     _aidl_os << ", priority: " << ::android::internal::ToString(priority);
78     _aidl_os << ", deadlineNs: " << ::android::internal::ToString(deadlineNs);
79     _aidl_os << ", modelCache: " << ::android::internal::ToString(modelCache);
80     _aidl_os << ", dataCache: " << ::android::internal::ToString(dataCache);
81     _aidl_os << ", cacheToken: " << ::android::internal::ToString(cacheToken);
82     _aidl_os << ", compilationHints: " << ::android::internal::ToString(compilationHints);
83     _aidl_os << ", extensionNameToPrefix: " << ::android::internal::ToString(extensionNameToPrefix);
84     _aidl_os << "}";
85     return _aidl_os.str();
86   }
87 };
88 }  // namespace neuralnetworks
89 }  // namespace hardware
90 }  // namespace android
91 }  // namespace aidl
92