xref: /aosp_15_r20/external/tensorflow/tensorflow/lite/nnapi/sl/public/NeuralNetworksSupportLibraryImpl.h (revision b6fb3261f9314811a0f4371741dbb8839866f948)
1 /* Copyright 2021 The TensorFlow Authors. All Rights Reserved.
2 
3 Licensed under the Apache License, Version 2.0 (the "License");
4 you may not use this file except in compliance with the License.
5 You may obtain a copy of the License at
6 
7     http://www.apache.org/licenses/LICENSE-2.0
8 
9 Unless required by applicable law or agreed to in writing, software
10 distributed under the License is distributed on an "AS IS" BASIS,
11 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 See the License for the specific language governing permissions and
13 limitations under the License.
14 ==============================================================================*/
15 
16 #pragma once
17 
18 /******************************************************************
19  *
20  * IMPORTANT NOTICE:
21  *
22  *   This file is part of Android's set of stable system headers
23  *   exposed by the Android NDK (Native Development Kit).
24  *
25  *   Third-party source AND binary code relies on the definitions
26  *   here to be FROZEN ON ALL UPCOMING PLATFORM RELEASES.
27  *
28  *   - DO NOT MODIFY ENUMS (EXCEPT IF YOU ADD NEW 32-BIT VALUES)
29  *   - DO NOT MODIFY CONSTANTS OR FUNCTIONAL MACROS
30  *   - DO NOT CHANGE THE SIGNATURE OF FUNCTIONS IN ANY WAY
31  *   - DO NOT CHANGE THE LAYOUT OR SIZE OF STRUCTURES
32  */
33 
34 #include <stdbool.h>
35 #include <stdint.h>
36 #include <stdio.h>
37 #include <stdlib.h>
38 
39 // Changed when importing from AOSP
40 #include "tensorflow/lite/nnapi/NeuralNetworksTypes.h"
41 
42 #ifdef __cplusplus
43 extern "C" {
44 #endif
45 
46 /**
47  * Performance information for the reference workload.
48  *
49  * Used by a driver to report its performance characteristics.
50  */
51 typedef struct {
52   /**
53    * Ratio of the time taken by the driver to execute the workload compared to
54    * the time the CPU would take for the same workload. A lower number is
55    * better.
56    */
57   float execTime;
58 
59   /**
60    * Ratio of the energy used by the driver compared to what the CPU would use
61    * for doing the same workload. A lower number is better.
62    */
63   float powerUsage;
64 } SL_ANeuralNetworksPerformanceInfo;
65 
66 /**
67  * Driver performance when operating on a particular data type. In the case of
68  * float32 data, this is used when the calculations are not relaxed.
69  */
70 typedef struct {
71   int32_t operandType;
72   SL_ANeuralNetworksPerformanceInfo performanceInfo;
73 } SL_ANeuralNetworksOperandPerformanceInfo;
74 
75 /**
76  * Information about NNAPI Vendor extension operand type.
77  */
78 typedef struct {
79   /**
80    * The byte size of the operand (if scalar) or of a single element (if
81    * tensor).
82    */
83   uint32_t byteSize;
84 
85   /**
86    * The extension operand type.
87    */
88   uint16_t type;
89 
90   /**
91    * Indicates whether the extension operand type represents a tensor or a
92    * scalar.
93    */
94   bool isTensor;
95 } SL_ANeuralNetworksExtensionOperandTypeInformation;
96 
97 /**
98  * The different performance info kinds.
99  */
100 typedef enum {
101   /**
102    * Driver performance when operating on float32 data but performing
103    * calculations with range and/or precision as low as that of the IEEE 754
104    * 16-bit floating-point format.
105    */
106   SL_ANEURALNETWORKS_CAPABILITIES_PERFORMANCE_RELAXED_SCALAR = 0,
107 
108   /**
109    * Driver performance when operating on float32 data but performing
110    * calculations with range and/or precision as low as that of the IEEE 754
111    * 16-bit floating-point format.
112    */
113   SL_ANEURALNETWORKS_CAPABILITIES_PERFORMANCE_RELAXED_TENSOR = 1,
114 
115   /**
116    * Performance of an {@link ANEURALNETWORKS_IF} operation is the sum of {@link
117    * ANEURALNETWORKS_IF}'s performance and the mean of performance for the two
118    * branch subgraphs, where performance for a subgraph is the sum of the
119    * performance of all operations within the subgraph.
120    */
121   SL_ANEURALNETWORKS_CAPABILITIES_PERFORMANCE_IF = 2,
122 
123   /**
124    * Performance of a {@link ANEURALNETWORKS_WHILE} operation is the sum of
125    * {@link ANEURALNETWORKS_WHILE}'s performance, performance for the condition
126    * subgraph and performance for the body subgraph, where performance for a
127    * subgraph is the sum of the performance of all operations within the
128    * subgraph.
129    */
130   SL_ANEURALNETWORKS_CAPABILITIES_PERFORMANCE_WHILE = 3,
131 } SL_ANeuralNetworksPerformanceInfoCode;
132 
133 /**
134  * Sets the compilation caching signature and file descriptors.
135  *
136  * Provides optional caching information to the support library driver for
137  * faster repeated compilation.
138  *
139  * See {@link ANeuralNetworksCompilation} for information on multithreaded
140  * usage.
141  *
142  * @param compilation The compilation to be modified.
143  * @param modelCacheFds An array of file descriptors for the security-sensitive
144  * cache. The file descriptors will be duplicated.
145  * @param numModelCacheFiles The number of the model cache files.
146  * @param dataCacheFds An array of file descriptors for the constants' cache.
147  *                     The file descriptors will be duplicated.
148  * @param numDataCacheFiles The number of the data cache files.
149  * @param token The token provided by the user to specify a model must be of
150  * length ANEURALNETWORKS_BYTE_SIZE_OF_CACHE_TOKEN. The user should ensure that
151  *              the token is unique to a model within the application. The NNAPI
152  *              runtime cannot detect token collisions; a collision will result
153  * in a failed execution or in a successful execution that produces incorrect
154  *              output values.
155  *
156  * @return ANEURALNETWORKS_NO_ERROR if successful.
157  *
158  * Available in the compabibility library build only.
159  */
160 int SL_ANeuralNetworksCompilation_setCachingFromFds(
161     ANeuralNetworksCompilation* compilation, const int* modelCacheFds,
162     const uint32_t numModelCacheFiles, const int* dataCacheFds,
163     const uint32_t numDataCacheFiles, const uint8_t* token);
164 
165 /**
166  * Gets the caching requirements of the driver implementation.
167  *
168  * There are two types of cache file descriptors provided to the driver: model
169  * cache and data cache.
170  *
171  * The data cache is for caching constant data, possibly including preprocessed
172  * and transformed tensor buffers. Any modification to the data cache should
173  * have no worse effect than generating bad output values at execution time.
174  *
175  * The model cache is for caching security-sensitive data such as compiled
176  * executable machine code in the device's native binary format. A modification
177  * to the model cache may affect the driver's execution behavior, and a
178  * malicious client could make use of this to execute beyond the granted
179  * permission.
180  *
181  * ANeuralNetworksDevice_getNumberOfCacheFilesNeeded returns how many of each
182  * type of cache files the driver implementation needs to cache a single
183  * compilation. Returning 0 for both types indicates compilation caching is not
184  * supported by this driver. The driver may still choose not to cache certain
185  * compiled models even if it reports that caching is supported.
186  *
187  * @param device The representation of the specified device.
188  * @param numModelCacheFiles The number of the model cache files. A value of 0
189  * is returned on error.
190  * @param numDataCacheFiles The number of the data cache files. A value of 0 is
191  * returned on error.
192  *
193  * @return ANEURALNETWORKS_NO_ERROR if successful.
194  *
195  * Available in the compabibility library build only.
196  */
197 int SL_ANeuralNetworksDevice_getNumberOfCacheFilesNeeded(
198     const ANeuralNetworksDevice* device, uint32_t* numModelCacheFiles,
199     uint32_t* numDataCacheFiles);
200 
201 /**
202  * Get NNAPI Device performance/power capabilities.
203  *
204  * This returns performance of non-extension operations.
205  *
206  * Performance of an operation other than {@link ANEURALNETWORKS_IF} and {@link
207  * ANEURALNETWORKS_WHILE} comes from the type of its first operand.
208  *
209  * @param device The representation of the specified device.
210  * @param performanceInfoKind The kind of performance info to be queried. Must
211  * be one of the values from {@link SL_ANeuralNetworksPerformanceInfoCode}.
212  * @return ANEURALNETWORKS_NO_ERROR if successful.
213  *
214  * Available in the compabibility library build only.
215  */
216 int SL_ANeuralNetworksDevice_getPerformanceInfo(
217     const ANeuralNetworksDevice* device, int32_t performanceInfoKind,
218     SL_ANeuralNetworksPerformanceInfo* performanceInfo);
219 
220 /**
221  * Get NNAPI Device operand performance/power capabilities.
222  *
223  * This returns performance of non-extension operations.
224  *
225  * Performance of an operation other than {@link ANEURALNETWORKS_IF} and {@link
226  * ANEURALNETWORKS_WHILE} comes from the type of its first operand.
227  *
228  * @param device The representation of the specified device.
229  * @param context Context to pass to the callback.
230  * @param callback Callback taking operand performance and context.
231  * @return ANEURALNETWORKS_NO_ERROR if successful.
232  *
233  * Available in the compabibility library build only.
234  */
235 int SL_ANeuralNetworksDevice_forEachOperandPerformanceInfo(
236     const ANeuralNetworksDevice* device, void* context,
237     void (*callback)(SL_ANeuralNetworksOperandPerformanceInfo, void*));
238 
239 /**
240  * Get the number of extensions supported by the driver implementation.
241  *
242  * @param device The representation of the specified device.
243  * @param vendorExtensionCount The number of vendor extensions the device
244  * supports. To be used in
245  *                             {@link
246  * ANeuralNetworksDevice_getVendorExtensionName} and {@link
247  *                             ANeuralNetworksDevice_forEachVendorExtensionOperandTypeInformation}.
248  * @return ANEURALNETWORKS_NO_ERROR if successful.
249  *
250  * Available in the compabibility library build only.
251  */
252 int SL_ANeuralNetworksDevice_getVendorExtensionCount(
253     const ANeuralNetworksDevice* device, uint32_t* vendorExtensionCount);
254 
255 /**
256  * Gets information about a specified extension supported by the driver
257  * implementation.
258  *
259  * @param device The representation of the specified device.
260  * @param vendorExtensionIndex The index of the specified vendor extension. Must
261  * be less than the number of available vendor extensions.
262  * @param extensionName Name of the NNAPI HAL Extension.
263  * @return ANEURALNETWORKS_NO_ERROR if successful.
264  *
265  * Available in the compabibility library build only.
266  */
267 int SL_ANeuralNetworksDevice_getVendorExtensionName(
268     const ANeuralNetworksDevice* device, uint32_t vendorExtensionIndex,
269     const char** extensionName);
270 
271 /**
272  * Gets a specified extension's operand type information supported by the driver
273  * implementation.
274  *
275  * @param device The representation of the specified device.
276  * @param vendorExtensionIndex The index of the specified vendor extension. Must
277  * be less than the number of available vendor extensions.
278  * @param context Context to pass to the callback.
279  * @param callback Callback taking operand type information and context.
280  * @return ANEURALNETWORKS_NO_ERROR if successful.
281  *
282  * Available in the compabibility library build only.
283  */
284 int SL_ANeuralNetworksDevice_forEachVendorExtensionOperandTypeInformation(
285     const ANeuralNetworksDevice* device, uint32_t vendorExtensionIndex,
286     void* context,
287     void (*callback)(SL_ANeuralNetworksExtensionOperandTypeInformation, void*));
288 
289 typedef struct ANeuralNetworksDiagnosticCompilationInfo
290     ANeuralNetworksDiagnosticCompilationInfo;
291 
292 /**
293  * Gets the ID that identifies a single session of client interacting with NNAPI
294  * runtime.
295  *
296  * @param diagnosticCompilationInfo The NNAPI diagnostic compilation info
297  * object.
298  * @return Session info id.
299  */
300 int32_t SL_ANeuralNetworksDiagnosticCompilationInfo_getSessionId(
301     const ANeuralNetworksDiagnosticCompilationInfo* diagnosticCompilationInfo);
302 
303 /**
304  * Gets NNAPI version.
305  *
306  * @param diagnosticCompilationInfo The NNAPI diagnostic compilation info
307  * object.
308  * @return NNAPI version.
309  */
310 int64_t SL_ANeuralNetworksDiagnosticCompilationInfo_getNnApiVersion(
311     const ANeuralNetworksDiagnosticCompilationInfo* diagnosticCompilationInfo);
312 
313 /**
314  * Gets the hash of the model architecture (without weights).
315  *
316  * @param diagnosticCompilationInfo The NNAPI diagnostic compilation info
317  * object.
318  * @return Model hash.
319  */
320 const uint8_t* SL_ANeuralNetworksDiagnosticCompilationInfo_getModelArchHash(
321     const ANeuralNetworksDiagnosticCompilationInfo* diagnosticCompilationInfo);
322 
323 /**
324  * Gets the device IDs as a comma-concatenated string.
325  *
326  * @param diagnosticCompilationInfo The NNAPI diagnostic compilation info
327  * object.
328  * @return Device ID.
329  */
330 const char* SL_ANeuralNetworksDiagnosticCompilationInfo_getDeviceIds(
331     const ANeuralNetworksDiagnosticCompilationInfo* diagnosticCompilationInfo);
332 
333 /**
334  * Gets the error code.
335  *
336  * @param diagnosticCompilationInfo The NNAPI diagnostic compilation info
337  * object.
338  * @return Error code.
339  */
340 int32_t SL_ANeuralNetworksDiagnosticCompilationInfo_getErrorCode(
341     const ANeuralNetworksDiagnosticCompilationInfo* diagnosticCompilationInfo);
342 
343 /**
344  * Gets the type of tensors used for inputs.
345  *
346  * @param diagnosticCompilationInfo The NNAPI diagnostic compilation info
347  * object.
348  * @return Input data class.
349  */
350 ANeuralNetworksDiagnosticDataClass
351 SL_ANeuralNetworksDiagnosticCompilationInfo_getInputDataClass(
352     const ANeuralNetworksDiagnosticCompilationInfo* diagnosticCompilationInfo);
353 
354 /**
355  * Gets the type of tensors used for outputs.
356  *
357  * @param diagnosticCompilationInfo The NNAPI diagnostic compilation info
358  * object.
359  * @return Output data class.
360  */
361 ANeuralNetworksDiagnosticDataClass
362 SL_ANeuralNetworksDiagnosticCompilationInfo_getOutputDataClass(
363     const ANeuralNetworksDiagnosticCompilationInfo* diagnosticCompilationInfo);
364 
365 /**
366  * Gets how many nanoseconds elapsed when compiling the model.
367  *
368  * @param diagnosticCompilationInfo The NNAPI diagnostic compilation info
369  * object.
370  * @return Time to compile the model in nanoseconds. UINT64_MAX indicates that
371  * timing information is not available.
372  */
373 uint64_t SL_ANeuralNetworksDiagnosticCompilationInfo_getCompilationTimeNanos(
374     const ANeuralNetworksDiagnosticCompilationInfo* diagnosticCompilationInfo);
375 
376 /**
377  * Is caching enabled?
378  *
379  * @param diagnosticCompilationInfo The NNAPI diagnostic compilation info
380  * object.
381  * @return Whether caching is enabled.
382  */
383 bool SL_ANeuralNetworksDiagnosticCompilationInfo_isCachingEnabled(
384     const ANeuralNetworksDiagnosticCompilationInfo* diagnosticCompilationInfo);
385 
386 /**
387  * Is control flow used?
388  *
389  * @param diagnosticCompilationInfo The NNAPI diagnostic compilation info
390  * object.
391  * @return Whether control flow was used.
392  */
393 bool SL_ANeuralNetworksDiagnosticCompilationInfo_isControlFlowUsed(
394     const ANeuralNetworksDiagnosticCompilationInfo* diagnosticCompilationInfo);
395 
396 /**
397  * Are dynamic tensors used?
398  *
399  * @param diagnosticCompilationInfo The NNAPI diagnostic compilation info
400  * object.
401  * @return Whether dynamic tensors were used.
402  */
403 bool SL_ANeuralNetworksDiagnosticCompilationInfo_areDynamicTensorsUsed(
404     const ANeuralNetworksDiagnosticCompilationInfo* diagnosticCompilationInfo);
405 
406 typedef struct ANeuralNetworksDiagnosticExecutionInfo
407     ANeuralNetworksDiagnosticExecutionInfo;
408 
409 /**
410  * Gets the ID that identifies a single session of client interacting with NNAPI
411  * runtime.
412  *
413  * @param diagnosticExecutionInfo The NNAPI diagnostic compilation info object.
414  * @return Session info id.
415  */
416 int32_t SL_ANeuralNetworksDiagnosticExecutionInfo_getSessionId(
417     const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo);
418 
419 /**
420  * Gets NNAPI version.
421  *
422  * @param diagnosticExecutionInfo The NNAPI diagnostic compilation info object.
423  * @return NNAPI version.
424  */
425 int64_t SL_ANeuralNetworksDiagnosticExecutionInfo_getNnApiVersion(
426     const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo);
427 
428 /**
429  * Gets the hash of the model architecture (without weights).
430  *
431  * @param diagnosticExecutionInfo The NNAPI diagnostic compilation info object.
432  * @return Model hash.
433  */
434 const uint8_t* SL_ANeuralNetworksDiagnosticExecutionInfo_getModelArchHash(
435     const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo);
436 
437 /**
438  * Gets the device IDs as a comma-concatenated string.
439  *
440  * @param diagnosticExecutionInfo The NNAPI diagnostic compilation info object.
441  * @return Device ID.
442  */
443 const char* SL_ANeuralNetworksDiagnosticExecutionInfo_getDeviceIds(
444     const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo);
445 
446 /**
447  * Gets the execution mode.
448  *
449  * @param diagnosticExecutionInfo The NNAPI diagnostic compilation info object.
450  * @return Execution mode.
451  */
452 ANeuralNetworksDiagnosticExecutionMode
453 SL_ANeuralNetworksDiagnosticExecutionInfo_getExecutionMode(
454     const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo);
455 
456 /**
457  * Gets the input data class.
458  *
459  * @param diagnosticExecutionInfo The NNAPI diagnostic compilation info object.
460  * @return Input data class.
461  */
462 ANeuralNetworksDiagnosticDataClass
463 SL_ANeuralNetworksDiagnosticExecutionInfo_getInputDataClass(
464     const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo);
465 
466 /**
467  * Gets the output data class.
468  *
469  * @param diagnosticExecutionInfo The NNAPI diagnostic compilation info object.
470  * @return Output data class.
471  */
472 ANeuralNetworksDiagnosticDataClass
473 SL_ANeuralNetworksDiagnosticExecutionInfo_getOutputDataClass(
474     const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo);
475 
476 /**
477  * Gets the error code.
478  *
479  * @param diagnosticExecutionInfo The NNAPI diagnostic compilation info object.
480  * @return Error code.
481  */
482 uint32_t SL_ANeuralNetworksDiagnosticExecutionInfo_getErrorCode(
483     const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo);
484 
485 /**
486  * Gets the time taken to execute from runtime, including runtime/ipc overhead.
487  *
488  * @param diagnosticExecutionInfo The NNAPI diagnostic compilation info object.
489  * @return Time taken to execute as measured by the runtime in nanoseconds.
490  * UINT64_MAX indicates that timing information is not available.
491  */
492 uint64_t SL_ANeuralNetworksDiagnosticExecutionInfo_getRuntimeExecutionTimeNanos(
493     const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo);
494 
495 /**
496  * Gets the time taken to execute in the driver, excluding runtime/ipc overhead.
497  *
498  * @param diagnosticExecutionInfo The NNAPI diagnostic compilation info object.
499  * @return Time taken to execute on the driver in nanoseconds. UINT64_MAX
500  * indicates that timing information is not available.
501  */
502 uint64_t SL_ANeuralNetworksDiagnosticExecutionInfo_getDriverExecutionTimeNanos(
503     const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo);
504 
505 /**
506  * Gets the time taken to execute on the hardware, excluding driver overhead.
507  *
508  * @param diagnosticExecutionInfo The NNAPI diagnostic compilation info object.
509  * @return Time taken to execute on the hardware in nanoseconds. UINT64_MAX
510  * indicates that timing information is not available.
511  */
512 uint64_t
513 SL_ANeuralNetworksDiagnosticExecutionInfo_getHardwareExecutionTimeNanos(
514     const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo);
515 
516 /**
517  * Is caching enabled?
518  *
519  * @param diagnosticExecutionInfo The NNAPI diagnostic compilation info object.
520  * @return Whether caching is enabled.
521  */
522 bool SL_ANeuralNetworksDiagnosticExecutionInfo_isCachingEnabled(
523     const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo);
524 
525 /**
526  * Is control flow used?
527  *
528  * @param diagnosticExecutionInfo The NNAPI diagnostic compilation info object.
529  * @return Whether control flow was used.
530  */
531 bool SL_ANeuralNetworksDiagnosticExecutionInfo_isControlFlowUsed(
532     const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo);
533 
534 /**
535  * Are dynamic tensors used?
536  *
537  * @param diagnosticExecutionInfo The NNAPI diagnostic compilation info object.
538  * @return Whether dynamic tensors were used.
539  */
540 bool SL_ANeuralNetworksDiagnosticExecutionInfo_areDynamicTensorsUsed(
541     const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo);
542 
543 typedef void (*ANeuralNetworksDiagnosticCompilationFinishedCallback)(
544     const void* context, const ANeuralNetworksDiagnosticCompilationInfo* info);
545 
546 typedef void (*ANeuralNetworksDiagnosticExecutionFinishedCallback)(
547     const void* context, const ANeuralNetworksDiagnosticExecutionInfo* info);
548 
549 /**
550  * Sets the callbacks to be called when compilations or executions finish.
551  *
552  * Example usage:
553  *
554  * // Callback to be invoked whenever a compilation has completed.
555  * void compilationCallback(void* context,
556  * ANeuralNetworksDiagnosticCompilationInfo* info) {
557  *     // The context object can be used to store state without the use of a
558  * global variable. ExampleLoggerObject* logger =
559  * static_cast<ExampleLoggerObject*>(context);
560  *
561  *     // Calls to getters to get the details...
562  *     const int32_t sessionId =
563  * ANeuralNetworksDiagnosticCompilationInfo_getSessionId(info);
564  *
565  *     ...
566  *
567  *     logger->write(...);
568  * }
569  *
570  * void executionCallback(void* context, ANeuralNetworksDiagnosticExecutionInfo*
571  * info) {
572  *      ...
573  * }
574  *
575  * ExampleLoggerObject exampleLoggerObject;
576  * ANeuralNetworksDiagnostic_registerCallbacks(&compilationCallback,
577  * &executionCallback, static_cast<void*>(&exampleLoggerObject));
578  *
579  * @param compilationCallback The compilation callback to set.
580  * @param executionCallback The execution callback to set.
581  * @param callbackContext The context to be passed to the callbacks when they
582  * are invoked. The context object may be used by multiple threads
583  * simulatenously, so it must be thread-safe.
584  */
585 void SL_ANeuralNetworksDiagnostic_registerCallbacks(
586     ANeuralNetworksDiagnosticCompilationFinishedCallback compilationCallback,
587     ANeuralNetworksDiagnosticExecutionFinishedCallback executionCallback,
588     void* callbackContext);
589 
590 /**
591  * Base version of NnApiSLDriverImpl with version information.
592  *
593  * NnApiSLDriverImpl is non-opaque, versioning struct to make it possible to
594  * pass its instance straight from the SL Driver to the shim registration. The
595  * glue code that loads the SL and calls the shim is non-updatable. An opaque
596  * struct would require the glue code to be updated if we would like to use
597  * newer NNAPI Feature Level.
598  *
599  * There's expectation that for M>N, NnApiSLDriverImplFL(M) is
600  * a strict superset of NnApiSLDriverImplFL(N), and NnApiSLDriverImplFL(M)* can
601  * be reinterpret_cast to NnApiSLDriverImplFL(N)* safely.
602  */
603 typedef struct NnApiSLDriverImpl {
604   /**
605    * Version of the NnApiSLDriverImpl struct. Uses {@link FeatureLevelCode}
606    * values for versioning.
607    */
608   int64_t implFeatureLevel;
609 } NnApiSLDriverImpl;
610 
611 /**
612  * NnApiSLDriverImpl for an Updatable SL Driver implementing {@link
613  * ANEURALNETWORKS_FEATURE_LEVEL_5}.
614  *
615  * This struct must set its implFeatureLevel to {@link
616  * ANEURALNETWORKS_FEATURE_LEVEL_5}.
617  *
618  * LINT.IfChange
619  */
620 typedef struct NnApiSLDriverImplFL5 {
621   /**
622    * Base type with version information. Allows to cast a pointer of this type
623    * to NnApiSLDriverImpl* with valid version information.
624    * For this type, its .version fields should be always set to {@link
625    * ANEURALNETWORKS_FEATURE_LEVEL_5}.
626    */
627   NnApiSLDriverImpl base;
628 
629   /**
630    * SL Driver implementation of {@link ANeuralNetworksBurst_create}.
631    * Behavior, arguments, and outputs match NNAPI Runtime function
632    * {@link ANeuralNetworksBurst_create},
633    * at the feature level of this NnApiSLDriver struct.
634    */
635   int (*ANeuralNetworksBurst_create)(ANeuralNetworksCompilation* compilation,
636                                      ANeuralNetworksBurst** burst);
637 
638   /**
639    * SL Driver implementation of {@link ANeuralNetworksBurst_free}.
640    * Behavior, arguments, and outputs match NNAPI Runtime function
641    * {@link ANeuralNetworksBurst_free},
642    * at the feature level of this NnApiSLDriver struct.
643    */
644   void (*ANeuralNetworksBurst_free)(ANeuralNetworksBurst* burst);
645 
646   /**
647    * SL Driver implementation of {@link
648    * ANeuralNetworksCompilation_createForDevices}. Behavior, arguments, and
649    * outputs match NNAPI Runtime function
650    * {@link ANeuralNetworksCompilation_createForDevices},
651    * at the feature level of this NnApiSLDriver struct.
652    */
653   int (*ANeuralNetworksCompilation_createForDevices)(
654       ANeuralNetworksModel* model, const ANeuralNetworksDevice* const* devices,
655       uint32_t numDevices, ANeuralNetworksCompilation** compilation);
656 
657   /**
658    * SL Driver implementation of {@link ANeuralNetworksCompilation_finish}.
659    * Behavior, arguments, and outputs match NNAPI Runtime function
660    * {@link ANeuralNetworksCompilation_finish},
661    * at the feature level of this NnApiSLDriver struct.
662    */
663   int (*ANeuralNetworksCompilation_finish)(
664       ANeuralNetworksCompilation* compilation);
665 
666   /**
667    * SL Driver implementation of {@link ANeuralNetworksCompilation_free}.
668    * Behavior, arguments, and outputs match NNAPI Runtime function
669    * {@link ANeuralNetworksCompilation_free},
670    * at the feature level of this NnApiSLDriver struct.
671    */
672   void (*ANeuralNetworksCompilation_free)(
673       ANeuralNetworksCompilation* compilation);
674 
675   /**
676    * SL Driver implementation of {@link
677    * ANeuralNetworksCompilation_getPreferredMemoryAlignmentForInput}. Behavior,
678    * arguments, and outputs match NNAPI Runtime function
679    * {@link ANeuralNetworksCompilation_getPreferredMemoryAlignmentForInput},
680    * at the feature level of this NnApiSLDriver struct.
681    */
682   int (*ANeuralNetworksCompilation_getPreferredMemoryAlignmentForInput)(
683       const ANeuralNetworksCompilation* compilation, uint32_t index,
684       uint32_t* alignment);
685 
686   /**
687    * SL Driver implementation of {@link
688    * ANeuralNetworksCompilation_getPreferredMemoryAlignmentForOutput}. Behavior,
689    * arguments, and outputs match NNAPI Runtime function
690    * {@link ANeuralNetworksCompilation_getPreferredMemoryAlignmentForOutput},
691    * at the feature level of this NnApiSLDriver struct.
692    */
693   int (*ANeuralNetworksCompilation_getPreferredMemoryAlignmentForOutput)(
694       const ANeuralNetworksCompilation* compilation, uint32_t index,
695       uint32_t* alignment);
696 
697   /**
698    * SL Driver implementation of {@link
699    * ANeuralNetworksCompilation_getPreferredMemoryPaddingForInput}. Behavior,
700    * arguments, and outputs match NNAPI Runtime function
701    * {@link ANeuralNetworksCompilation_getPreferredMemoryPaddingForInput},
702    * at the feature level of this NnApiSLDriver struct.
703    */
704   int (*ANeuralNetworksCompilation_getPreferredMemoryPaddingForInput)(
705       const ANeuralNetworksCompilation* compilation, uint32_t index,
706       uint32_t* padding);
707 
708   /**
709    * SL Driver implementation of {@link
710    * ANeuralNetworksCompilation_getPreferredMemoryPaddingForOutput}. Behavior,
711    * arguments, and outputs match NNAPI Runtime function
712    * {@link ANeuralNetworksCompilation_getPreferredMemoryPaddingForOutput},
713    * at the feature level of this NnApiSLDriver struct.
714    */
715   int (*ANeuralNetworksCompilation_getPreferredMemoryPaddingForOutput)(
716       const ANeuralNetworksCompilation* compilation, uint32_t index,
717       uint32_t* padding);
718 
719   /**
720    * SL Driver implementation of {@link ANeuralNetworksCompilation_setCaching}.
721    * Behavior, arguments, and outputs match NNAPI Runtime function
722    * {@link ANeuralNetworksCompilation_setCaching},
723    * at the feature level of this NnApiSLDriver struct.
724    */
725   int (*ANeuralNetworksCompilation_setCaching)(
726       ANeuralNetworksCompilation* compilation, const char* cacheDir,
727       const uint8_t* token);
728 
729   /**
730    * SL Driver implementation of {@link
731    * ANeuralNetworksCompilation_setPreference}. Behavior, arguments, and outputs
732    * match NNAPI Runtime function
733    * {@link ANeuralNetworksCompilation_setPreference},
734    * at the feature level of this NnApiSLDriver struct.
735    */
736   int (*ANeuralNetworksCompilation_setPreference)(
737       ANeuralNetworksCompilation* compilation, int32_t preference);
738 
739   /**
740    * SL Driver implementation of {@link ANeuralNetworksCompilation_setPriority}.
741    * Behavior, arguments, and outputs match NNAPI Runtime function
742    * {@link ANeuralNetworksCompilation_setPriority},
743    * at the feature level of this NnApiSLDriver struct.
744    */
745   int (*ANeuralNetworksCompilation_setPriority)(
746       ANeuralNetworksCompilation* compilation, int priority);
747 
748   /**
749    * SL Driver implementation of {@link ANeuralNetworksCompilation_setTimeout}.
750    * Behavior, arguments, and outputs match NNAPI Runtime function
751    * {@link ANeuralNetworksCompilation_setTimeout},
752    * at the feature level of this NnApiSLDriver struct.
753    */
754   int (*ANeuralNetworksCompilation_setTimeout)(
755       ANeuralNetworksCompilation* compilation, uint64_t duration);
756 
757   /**
758    * SL Driver implementation of {@link
759    * ANeuralNetworksDevice_getExtensionSupport}. Behavior, arguments, and
760    * outputs match NNAPI Runtime function
761    * {@link ANeuralNetworksDevice_getExtensionSupport},
762    * at the feature level of this NnApiSLDriver struct.
763    */
764   int (*ANeuralNetworksDevice_getExtensionSupport)(
765       const ANeuralNetworksDevice* device, const char* extensionName,
766       bool* isExtensionSupported);
767 
768   /**
769    * SL Driver implementation of {@link ANeuralNetworksDevice_getFeatureLevel}.
770    * Behavior, arguments, and outputs match NNAPI Runtime function
771    * {@link ANeuralNetworksDevice_getFeatureLevel},
772    * at the feature level of this NnApiSLDriver struct.
773    */
774   int (*ANeuralNetworksDevice_getFeatureLevel)(
775       const ANeuralNetworksDevice* device, int64_t* featureLevel);
776 
777   /**
778    * SL Driver implementation of {@link ANeuralNetworksDevice_getName}.
779    * Behavior, arguments, and outputs match NNAPI Runtime function
780    * {@link ANeuralNetworksDevice_getName},
781    * at the feature level of this NnApiSLDriver struct.
782    */
783   int (*ANeuralNetworksDevice_getName)(const ANeuralNetworksDevice* device,
784                                        const char** name);
785 
786   /**
787    * SL Driver implementation of {@link ANeuralNetworksDevice_getType}.
788    * Behavior, arguments, and outputs match NNAPI Runtime function
789    * {@link ANeuralNetworksDevice_getType},
790    * at the feature level of this NnApiSLDriver struct.
791    */
792   int (*ANeuralNetworksDevice_getType)(const ANeuralNetworksDevice* device,
793                                        int32_t* type);
794 
795   /**
796    * SL Driver implementation of {@link ANeuralNetworksDevice_getVersion}.
797    * Behavior, arguments, and outputs match NNAPI Runtime function
798    * {@link ANeuralNetworksDevice_getVersion},
799    * at the feature level of this NnApiSLDriver struct.
800    */
801   int (*ANeuralNetworksDevice_getVersion)(const ANeuralNetworksDevice* device,
802                                           const char** version);
803 
804   /**
805    * SL Driver implementation of {@link ANeuralNetworksDevice_wait}.
806    * Behavior, arguments, and outputs match NNAPI Runtime function
807    * {@link ANeuralNetworksDevice_wait},
808    * at the feature level of this NnApiSLDriver struct.
809    */
810   int (*ANeuralNetworksDevice_wait)(const ANeuralNetworksDevice* device);
811 
812   /**
813    * SL Driver implementation of {@link
814    * ANeuralNetworksEvent_createFromSyncFenceFd}. Behavior, arguments, and
815    * outputs match NNAPI Runtime function
816    * {@link ANeuralNetworksEvent_createFromSyncFenceFd},
817    * at the feature level of this NnApiSLDriver struct.
818    */
819   int (*ANeuralNetworksEvent_createFromSyncFenceFd)(
820       int sync_fence_fd, ANeuralNetworksEvent** event);
821 
822   /**
823    * SL Driver implementation of {@link ANeuralNetworksEvent_free}.
824    * Behavior, arguments, and outputs match NNAPI Runtime function
825    * {@link ANeuralNetworksEvent_free},
826    * at the feature level of this NnApiSLDriver struct.
827    */
828   void (*ANeuralNetworksEvent_free)(ANeuralNetworksEvent* event);
829 
830   /**
831    * SL Driver implementation of {@link ANeuralNetworksEvent_getSyncFenceFd}.
832    * Behavior, arguments, and outputs match NNAPI Runtime function
833    * {@link ANeuralNetworksEvent_getSyncFenceFd},
834    * at the feature level of this NnApiSLDriver struct.
835    */
836   int (*ANeuralNetworksEvent_getSyncFenceFd)(const ANeuralNetworksEvent* event,
837                                              int* sync_fence_fd);
838 
839   /**
840    * SL Driver implementation of {@link ANeuralNetworksEvent_wait}.
841    * Behavior, arguments, and outputs match NNAPI Runtime function
842    * {@link ANeuralNetworksEvent_wait},
843    * at the feature level of this NnApiSLDriver struct.
844    */
845   int (*ANeuralNetworksEvent_wait)(ANeuralNetworksEvent* event);
846 
847   /**
848    * SL Driver implementation of {@link ANeuralNetworksExecution_burstCompute}.
849    * Behavior, arguments, and outputs match NNAPI Runtime function
850    * {@link ANeuralNetworksExecution_burstCompute},
851    * at the feature level of this NnApiSLDriver struct.
852    */
853   int (*ANeuralNetworksExecution_burstCompute)(
854       ANeuralNetworksExecution* execution, ANeuralNetworksBurst* burst);
855 
856   /**
857    * SL Driver implementation of {@link ANeuralNetworksExecution_compute}.
858    * Behavior, arguments, and outputs match NNAPI Runtime function
859    * {@link ANeuralNetworksExecution_compute},
860    * at the feature level of this NnApiSLDriver struct.
861    */
862   int (*ANeuralNetworksExecution_compute)(ANeuralNetworksExecution* execution);
863 
864   /**
865    * SL Driver implementation of {@link ANeuralNetworksExecution_create}.
866    * Behavior, arguments, and outputs match NNAPI Runtime function
867    * {@link ANeuralNetworksExecution_create},
868    * at the feature level of this NnApiSLDriver struct.
869    */
870   int (*ANeuralNetworksExecution_create)(
871       ANeuralNetworksCompilation* compilation,
872       ANeuralNetworksExecution** execution);
873 
874   /**
875    * SL Driver implementation of {@link
876    * ANeuralNetworksExecution_enableInputAndOutputPadding}. Behavior, arguments,
877    * and outputs match NNAPI Runtime function
878    * {@link ANeuralNetworksExecution_enableInputAndOutputPadding},
879    * at the feature level of this NnApiSLDriver struct.
880    */
881   int (*ANeuralNetworksExecution_enableInputAndOutputPadding)(
882       ANeuralNetworksExecution* execution, bool enable);
883 
884   /**
885    * SL Driver implementation of {@link ANeuralNetworksExecution_free}.
886    * Behavior, arguments, and outputs match NNAPI Runtime function
887    * {@link ANeuralNetworksExecution_free},
888    * at the feature level of this NnApiSLDriver struct.
889    */
890   void (*ANeuralNetworksExecution_free)(ANeuralNetworksExecution* execution);
891 
892   /**
893    * SL Driver implementation of {@link ANeuralNetworksExecution_getDuration}.
894    * Behavior, arguments, and outputs match NNAPI Runtime function
895    * {@link ANeuralNetworksExecution_getDuration},
896    * at the feature level of this NnApiSLDriver struct.
897    */
898   int (*ANeuralNetworksExecution_getDuration)(
899       const ANeuralNetworksExecution* execution, int32_t durationCode,
900       uint64_t* duration);
901 
902   /**
903    * SL Driver implementation of {@link
904    * ANeuralNetworksExecution_getOutputOperandDimensions}. Behavior, arguments,
905    * and outputs match NNAPI Runtime function
906    * {@link ANeuralNetworksExecution_getOutputOperandDimensions},
907    * at the feature level of this NnApiSLDriver struct.
908    */
909   int (*ANeuralNetworksExecution_getOutputOperandDimensions)(
910       ANeuralNetworksExecution* execution, int32_t index, uint32_t* dimensions);
911 
912   /**
913    * SL Driver implementation of {@link
914    * ANeuralNetworksExecution_getOutputOperandRank}. Behavior, arguments, and
915    * outputs match NNAPI Runtime function
916    * {@link ANeuralNetworksExecution_getOutputOperandRank},
917    * at the feature level of this NnApiSLDriver struct.
918    */
919   int (*ANeuralNetworksExecution_getOutputOperandRank)(
920       ANeuralNetworksExecution* execution, int32_t index, uint32_t* rank);
921 
922   /**
923    * SL Driver implementation of {@link ANeuralNetworksExecution_setInput}.
924    * Behavior, arguments, and outputs match NNAPI Runtime function
925    * {@link ANeuralNetworksExecution_setInput},
926    * at the feature level of this NnApiSLDriver struct.
927    */
928   int (*ANeuralNetworksExecution_setInput)(
929       ANeuralNetworksExecution* execution, int32_t index,
930       const ANeuralNetworksOperandType* type, const void* buffer,
931       size_t length);
932 
933   /**
934    * SL Driver implementation of {@link
935    * ANeuralNetworksExecution_setInputFromMemory}. Behavior, arguments, and
936    * outputs match NNAPI Runtime function
937    * {@link ANeuralNetworksExecution_setInputFromMemory},
938    * at the feature level of this NnApiSLDriver struct.
939    */
940   int (*ANeuralNetworksExecution_setInputFromMemory)(
941       ANeuralNetworksExecution* execution, int32_t index,
942       const ANeuralNetworksOperandType* type,
943       const ANeuralNetworksMemory* memory, size_t offset, size_t length);
944 
945   /**
946    * SL Driver implementation of {@link
947    * ANeuralNetworksExecution_setLoopTimeout}. Behavior, arguments, and outputs
948    * match NNAPI Runtime function
949    * {@link ANeuralNetworksExecution_setLoopTimeout},
950    * at the feature level of this NnApiSLDriver struct.
951    */
952   int (*ANeuralNetworksExecution_setLoopTimeout)(
953       ANeuralNetworksExecution* execution, uint64_t duration);
954 
955   /**
956    * SL Driver implementation of {@link
957    * ANeuralNetworksExecution_setMeasureTiming}. Behavior, arguments, and
958    * outputs match NNAPI Runtime function
959    * {@link ANeuralNetworksExecution_setMeasureTiming},
960    * at the feature level of this NnApiSLDriver struct.
961    */
962   int (*ANeuralNetworksExecution_setMeasureTiming)(
963       ANeuralNetworksExecution* execution, bool measure);
964 
965   /**
966    * SL Driver implementation of {@link ANeuralNetworksExecution_setOutput}.
967    * Behavior, arguments, and outputs match NNAPI Runtime function
968    * {@link ANeuralNetworksExecution_setOutput},
969    * at the feature level of this NnApiSLDriver struct.
970    */
971   int (*ANeuralNetworksExecution_setOutput)(
972       ANeuralNetworksExecution* execution, int32_t index,
973       const ANeuralNetworksOperandType* type, void* buffer, size_t length);
974 
975   /**
976    * SL Driver implementation of {@link
977    * ANeuralNetworksExecution_setOutputFromMemory}. Behavior, arguments, and
978    * outputs match NNAPI Runtime function
979    * {@link ANeuralNetworksExecution_setOutputFromMemory},
980    * at the feature level of this NnApiSLDriver struct.
981    */
982   int (*ANeuralNetworksExecution_setOutputFromMemory)(
983       ANeuralNetworksExecution* execution, int32_t index,
984       const ANeuralNetworksOperandType* type,
985       const ANeuralNetworksMemory* memory, size_t offset, size_t length);
986 
987   /**
988    * SL Driver implementation of {@link ANeuralNetworksExecution_setReusable}.
989    * Behavior, arguments, and outputs match NNAPI Runtime function
990    * {@link ANeuralNetworksExecution_setReusable},
991    * at the feature level of this NnApiSLDriver struct.
992    */
993   int (*ANeuralNetworksExecution_setReusable)(
994       ANeuralNetworksExecution* execution, bool reusable);
995 
996   /**
997    * SL Driver implementation of {@link ANeuralNetworksExecution_setTimeout}.
998    * Behavior, arguments, and outputs match NNAPI Runtime function
999    * {@link ANeuralNetworksExecution_setTimeout},
1000    * at the feature level of this NnApiSLDriver struct.
1001    */
1002   int (*ANeuralNetworksExecution_setTimeout)(
1003       ANeuralNetworksExecution* execution, uint64_t duration);
1004 
1005   /**
1006    * SL Driver implementation of {@link
1007    * ANeuralNetworksExecution_startComputeWithDependencies}. Behavior,
1008    * arguments, and outputs match NNAPI Runtime function
1009    * {@link ANeuralNetworksExecution_startComputeWithDependencies},
1010    * at the feature level of this NnApiSLDriver struct.
1011    */
1012   int (*ANeuralNetworksExecution_startComputeWithDependencies)(
1013       ANeuralNetworksExecution* execution,
1014       const ANeuralNetworksEvent* const* dependencies,
1015       uint32_t num_dependencies, uint64_t duration,
1016       ANeuralNetworksEvent** event);
1017 
1018   /**
1019    * SL Driver implementation of {@link ANeuralNetworksMemoryDesc_addInputRole}.
1020    * Behavior, arguments, and outputs match NNAPI Runtime function
1021    * {@link ANeuralNetworksMemoryDesc_addInputRole},
1022    * at the feature level of this NnApiSLDriver struct.
1023    */
1024   int (*ANeuralNetworksMemoryDesc_addInputRole)(
1025       ANeuralNetworksMemoryDesc* desc,
1026       const ANeuralNetworksCompilation* compilation, uint32_t index,
1027       float frequency);
1028 
1029   /**
1030    * SL Driver implementation of {@link
1031    * ANeuralNetworksMemoryDesc_addOutputRole}. Behavior, arguments, and outputs
1032    * match NNAPI Runtime function
1033    * {@link ANeuralNetworksMemoryDesc_addOutputRole},
1034    * at the feature level of this NnApiSLDriver struct.
1035    */
1036   int (*ANeuralNetworksMemoryDesc_addOutputRole)(
1037       ANeuralNetworksMemoryDesc* desc,
1038       const ANeuralNetworksCompilation* compilation, uint32_t index,
1039       float frequency);
1040 
1041   /**
1042    * SL Driver implementation of {@link ANeuralNetworksMemoryDesc_create}.
1043    * Behavior, arguments, and outputs match NNAPI Runtime function
1044    * {@link ANeuralNetworksMemoryDesc_create},
1045    * at the feature level of this NnApiSLDriver struct.
1046    */
1047   int (*ANeuralNetworksMemoryDesc_create)(ANeuralNetworksMemoryDesc** desc);
1048 
1049   /**
1050    * SL Driver implementation of {@link ANeuralNetworksMemoryDesc_finish}.
1051    * Behavior, arguments, and outputs match NNAPI Runtime function
1052    * {@link ANeuralNetworksMemoryDesc_finish},
1053    * at the feature level of this NnApiSLDriver struct.
1054    */
1055   int (*ANeuralNetworksMemoryDesc_finish)(ANeuralNetworksMemoryDesc* desc);
1056 
1057   /**
1058    * SL Driver implementation of {@link ANeuralNetworksMemoryDesc_free}.
1059    * Behavior, arguments, and outputs match NNAPI Runtime function
1060    * {@link ANeuralNetworksMemoryDesc_free},
1061    * at the feature level of this NnApiSLDriver struct.
1062    */
1063   void (*ANeuralNetworksMemoryDesc_free)(ANeuralNetworksMemoryDesc* desc);
1064 
1065   /**
1066    * SL Driver implementation of {@link
1067    * ANeuralNetworksMemoryDesc_setDimensions}. Behavior, arguments, and outputs
1068    * match NNAPI Runtime function
1069    * {@link ANeuralNetworksMemoryDesc_setDimensions},
1070    * at the feature level of this NnApiSLDriver struct.
1071    */
1072   int (*ANeuralNetworksMemoryDesc_setDimensions)(
1073       ANeuralNetworksMemoryDesc* desc, uint32_t rank,
1074       const uint32_t* dimensions);
1075 
1076   /**
1077    * SL Driver implementation of {@link ANeuralNetworksMemory_copy}.
1078    * Behavior, arguments, and outputs match NNAPI Runtime function
1079    * {@link ANeuralNetworksMemory_copy},
1080    * at the feature level of this NnApiSLDriver struct.
1081    */
1082   int (*ANeuralNetworksMemory_copy)(const ANeuralNetworksMemory* src,
1083                                     const ANeuralNetworksMemory* dst);
1084 
1085   /**
1086    * SL Driver implementation of {@link
1087    * ANeuralNetworksMemory_createFromAHardwareBuffer}. Behavior, arguments, and
1088    * outputs match NNAPI Runtime function
1089    * {@link ANeuralNetworksMemory_createFromAHardwareBuffer},
1090    * at the feature level of this NnApiSLDriver struct.
1091    */
1092   int (*ANeuralNetworksMemory_createFromAHardwareBuffer)(
1093       const AHardwareBuffer* ahwb, ANeuralNetworksMemory** memory);
1094 
1095   /**
1096    * SL Driver implementation of {@link ANeuralNetworksMemory_createFromDesc}.
1097    * Behavior, arguments, and outputs match NNAPI Runtime function
1098    * {@link ANeuralNetworksMemory_createFromDesc},
1099    * at the feature level of this NnApiSLDriver struct.
1100    */
1101   int (*ANeuralNetworksMemory_createFromDesc)(
1102       const ANeuralNetworksMemoryDesc* desc, ANeuralNetworksMemory** memory);
1103 
1104   /**
1105    * SL Driver implementation of {@link ANeuralNetworksMemory_createFromFd}.
1106    * Behavior, arguments, and outputs match NNAPI Runtime function
1107    * {@link ANeuralNetworksMemory_createFromFd},
1108    * at the feature level of this NnApiSLDriver struct.
1109    */
1110   int (*ANeuralNetworksMemory_createFromFd)(size_t size, int protect, int fd,
1111                                             size_t offset,
1112                                             ANeuralNetworksMemory** memory);
1113 
1114   /**
1115    * SL Driver implementation of {@link ANeuralNetworksMemory_free}.
1116    * Behavior, arguments, and outputs match NNAPI Runtime function
1117    * {@link ANeuralNetworksMemory_free},
1118    * at the feature level of this NnApiSLDriver struct.
1119    */
1120   void (*ANeuralNetworksMemory_free)(ANeuralNetworksMemory* memory);
1121 
1122   /**
1123    * SL Driver implementation of {@link ANeuralNetworksModel_addOperand}.
1124    * Behavior, arguments, and outputs match NNAPI Runtime function
1125    * {@link ANeuralNetworksModel_addOperand},
1126    * at the feature level of this NnApiSLDriver struct.
1127    */
1128   int (*ANeuralNetworksModel_addOperand)(
1129       ANeuralNetworksModel* model, const ANeuralNetworksOperandType* type);
1130 
1131   /**
1132    * SL Driver implementation of {@link ANeuralNetworksModel_addOperation}.
1133    * Behavior, arguments, and outputs match NNAPI Runtime function
1134    * {@link ANeuralNetworksModel_addOperation},
1135    * at the feature level of this NnApiSLDriver struct.
1136    */
1137   int (*ANeuralNetworksModel_addOperation)(ANeuralNetworksModel* model,
1138                                            ANeuralNetworksOperationType type,
1139                                            uint32_t inputCount,
1140                                            const uint32_t* inputs,
1141                                            uint32_t outputCount,
1142                                            const uint32_t* outputs);
1143 
1144   /**
1145    * SL Driver implementation of {@link ANeuralNetworksModel_create}.
1146    * Behavior, arguments, and outputs match NNAPI Runtime function
1147    * {@link ANeuralNetworksModel_create},
1148    * at the feature level of this NnApiSLDriver struct.
1149    */
1150   int (*ANeuralNetworksModel_create)(ANeuralNetworksModel** model);
1151 
1152   /**
1153    * SL Driver implementation of {@link ANeuralNetworksModel_finish}.
1154    * Behavior, arguments, and outputs match NNAPI Runtime function
1155    * {@link ANeuralNetworksModel_finish},
1156    * at the feature level of this NnApiSLDriver struct.
1157    */
1158   int (*ANeuralNetworksModel_finish)(ANeuralNetworksModel* model);
1159 
1160   /**
1161    * SL Driver implementation of {@link ANeuralNetworksModel_free}.
1162    * Behavior, arguments, and outputs match NNAPI Runtime function
1163    * {@link ANeuralNetworksModel_free},
1164    * at the feature level of this NnApiSLDriver struct.
1165    */
1166   void (*ANeuralNetworksModel_free)(ANeuralNetworksModel* model);
1167 
1168   /**
1169    * SL Driver implementation of {@link
1170    * ANeuralNetworksModel_getExtensionOperandType}. Behavior, arguments, and
1171    * outputs match NNAPI Runtime function
1172    * {@link ANeuralNetworksModel_getExtensionOperandType},
1173    * at the feature level of this NnApiSLDriver struct.
1174    */
1175   int (*ANeuralNetworksModel_getExtensionOperandType)(
1176       ANeuralNetworksModel* model, const char* extensionName,
1177       uint16_t operandCodeWithinExtension, int32_t* type);
1178 
1179   /**
1180    * SL Driver implementation of {@link
1181    * ANeuralNetworksModel_getExtensionOperationType}. Behavior, arguments, and
1182    * outputs match NNAPI Runtime function
1183    * {@link ANeuralNetworksModel_getExtensionOperationType},
1184    * at the feature level of this NnApiSLDriver struct.
1185    */
1186   int (*ANeuralNetworksModel_getExtensionOperationType)(
1187       ANeuralNetworksModel* model, const char* extensionName,
1188       uint16_t operationCodeWithinExtension,
1189       ANeuralNetworksOperationType* type);
1190 
1191   /**
1192    * SL Driver implementation of {@link
1193    * ANeuralNetworksModel_getSupportedOperationsForDevices}. Behavior,
1194    * arguments, and outputs match NNAPI Runtime function
1195    * {@link ANeuralNetworksModel_getSupportedOperationsForDevices},
1196    * at the feature level of this NnApiSLDriver struct.
1197    */
1198   int (*ANeuralNetworksModel_getSupportedOperationsForDevices)(
1199       const ANeuralNetworksModel* model,
1200       const ANeuralNetworksDevice* const* devices, uint32_t numDevices,
1201       bool* supportedOps);
1202 
1203   /**
1204    * SL Driver implementation of {@link
1205    * ANeuralNetworksModel_identifyInputsAndOutputs}. Behavior, arguments, and
1206    * outputs match NNAPI Runtime function
1207    * {@link ANeuralNetworksModel_identifyInputsAndOutputs},
1208    * at the feature level of this NnApiSLDriver struct.
1209    */
1210   int (*ANeuralNetworksModel_identifyInputsAndOutputs)(
1211       ANeuralNetworksModel* model, uint32_t inputCount, const uint32_t* inputs,
1212       uint32_t outputCount, const uint32_t* outputs);
1213 
1214   /**
1215    * SL Driver implementation of {@link
1216    * ANeuralNetworksModel_relaxComputationFloat32toFloat16}. Behavior,
1217    * arguments, and outputs match NNAPI Runtime function
1218    * {@link ANeuralNetworksModel_relaxComputationFloat32toFloat16},
1219    * at the feature level of this NnApiSLDriver struct.
1220    */
1221   int (*ANeuralNetworksModel_relaxComputationFloat32toFloat16)(
1222       ANeuralNetworksModel* model, bool allow);
1223 
1224   /**
1225    * SL Driver implementation of {@link
1226    * ANeuralNetworksModel_setOperandExtensionData}. Behavior, arguments, and
1227    * outputs match NNAPI Runtime function
1228    * {@link ANeuralNetworksModel_setOperandExtensionData},
1229    * at the feature level of this NnApiSLDriver struct.
1230    */
1231   int (*ANeuralNetworksModel_setOperandExtensionData)(
1232       ANeuralNetworksModel* model, int32_t index, const void* data,
1233       size_t length);
1234 
1235   /**
1236    * SL Driver implementation of {@link
1237    * ANeuralNetworksModel_setOperandSymmPerChannelQuantParams}. Behavior,
1238    * arguments, and outputs match NNAPI Runtime function
1239    * {@link ANeuralNetworksModel_setOperandSymmPerChannelQuantParams},
1240    * at the feature level of this NnApiSLDriver struct.
1241    */
1242   int (*ANeuralNetworksModel_setOperandSymmPerChannelQuantParams)(
1243       ANeuralNetworksModel* model, int32_t index,
1244       const ANeuralNetworksSymmPerChannelQuantParams* channelQuant);
1245 
1246   /**
1247    * SL Driver implementation of {@link ANeuralNetworksModel_setOperandValue}.
1248    * Behavior, arguments, and outputs match NNAPI Runtime function
1249    * {@link ANeuralNetworksModel_setOperandValue},
1250    * at the feature level of this NnApiSLDriver struct.
1251    */
1252   int (*ANeuralNetworksModel_setOperandValue)(ANeuralNetworksModel* model,
1253                                               int32_t index, const void* buffer,
1254                                               size_t length);
1255 
1256   /**
1257    * SL Driver implementation of {@link
1258    * ANeuralNetworksModel_setOperandValueFromMemory}. Behavior, arguments, and
1259    * outputs match NNAPI Runtime function
1260    * {@link ANeuralNetworksModel_setOperandValueFromMemory},
1261    * at the feature level of this NnApiSLDriver struct.
1262    */
1263   int (*ANeuralNetworksModel_setOperandValueFromMemory)(
1264       ANeuralNetworksModel* model, int32_t index,
1265       const ANeuralNetworksMemory* memory, size_t offset, size_t length);
1266 
1267   /**
1268    * SL Driver implementation of {@link
1269    * ANeuralNetworksModel_setOperandValueFromModel}. Behavior, arguments, and
1270    * outputs match NNAPI Runtime function
1271    * {@link ANeuralNetworksModel_setOperandValueFromModel},
1272    * at the feature level of this NnApiSLDriver struct.
1273    */
1274   int (*ANeuralNetworksModel_setOperandValueFromModel)(
1275       ANeuralNetworksModel* model, int32_t index,
1276       const ANeuralNetworksModel* value);
1277 
1278   /**
1279    * SL Driver implementation of {@link ANeuralNetworks_getDefaultLoopTimeout}.
1280    * Behavior, arguments, and outputs match NNAPI Runtime function
1281    * {@link ANeuralNetworks_getDefaultLoopTimeout},
1282    * at the feature level of this NnApiSLDriver struct.
1283    */
1284   uint64_t (*ANeuralNetworks_getDefaultLoopTimeout)();
1285 
1286   /**
1287    * SL Driver implementation of {@link ANeuralNetworks_getDevice}.
1288    * Behavior, arguments, and outputs match NNAPI Runtime function
1289    * {@link ANeuralNetworks_getDevice},
1290    * at the feature level of this NnApiSLDriver struct.
1291    */
1292   int (*ANeuralNetworks_getDevice)(uint32_t devIndex,
1293                                    ANeuralNetworksDevice** device);
1294 
1295   /**
1296    * SL Driver implementation of {@link ANeuralNetworks_getDeviceCount}.
1297    * Behavior, arguments, and outputs match NNAPI Runtime function
1298    * {@link ANeuralNetworks_getDeviceCount},
1299    * at the feature level of this NnApiSLDriver struct.
1300    */
1301   int (*ANeuralNetworks_getDeviceCount)(uint32_t* numDevices);
1302 
1303   /**
1304    * SL Driver implementation of {@link ANeuralNetworks_getMaximumLoopTimeout}.
1305    * Behavior, arguments, and outputs match NNAPI Runtime function
1306    * {@link ANeuralNetworks_getMaximumLoopTimeout},
1307    * at the feature level of this NnApiSLDriver struct.
1308    */
1309   uint64_t (*ANeuralNetworks_getMaximumLoopTimeout)();
1310 
1311   /**
1312    * SL Driver implementation of {@link ANeuralNetworks_getRuntimeFeatureLevel}.
1313    * Behavior, arguments, and outputs match NNAPI Runtime function
1314    * {@link ANeuralNetworks_getRuntimeFeatureLevel},
1315    * at the feature level of this NnApiSLDriver struct.
1316    */
1317   int64_t (*ANeuralNetworks_getRuntimeFeatureLevel)();
1318 
1319   /**
1320    * SL Driver implementation of a function similar to
1321    * {@link ANeuralNetworksCompilation_setCaching} that takes file descriptors
1322    * instead of a cache directory.
1323    * Behavior and outputs match NNAPI Runtime function
1324    * {@link ANeuralNetworksCompilation_setCaching},
1325    * at the feature level of this NnApiSLDriver struct.
1326    */
1327   int (*SL_ANeuralNetworksCompilation_setCachingFromFds)(
1328       ANeuralNetworksCompilation* compilation, const int* modelCacheFds,
1329       const uint32_t numModelCacheFiles, const int* dataCacheFds,
1330       const uint32_t numDataCacheFiles, const uint8_t* token);
1331 
1332   /**
1333    * SL Driver implementation of {@link
1334    * SL_ANeuralNetworksDevice_getNumberOfCacheFilesNeeded}. Behavior, arguments,
1335    * and outputs match NNAPI Runtime function
1336    * {@link SL_ANeuralNetworksDevice_getNumberOfCacheFilesNeeded},
1337    * at the feature level of this NnApiSLDriver struct.
1338    */
1339   int (*SL_ANeuralNetworksDevice_getNumberOfCacheFilesNeeded)(
1340       const ANeuralNetworksDevice* device, uint32_t* numModelCacheFiles,
1341       uint32_t* numDataCacheFiles);
1342 
1343   /**
1344    * SL Driver implementation of {@link
1345    * SL_ANeuralNetworksDevice_getPerformanceInfo}. Behavior, arguments, and
1346    * outputs match NNAPI Runtime function
1347    * {@link SL_ANeuralNetworksDevice_getPerformanceInfo},
1348    * at the feature level of this NnApiSLDriver struct.
1349    */
1350   int (*SL_ANeuralNetworksDevice_getPerformanceInfo)(
1351       const ANeuralNetworksDevice* device, int32_t performanceInfoKind,
1352       SL_ANeuralNetworksPerformanceInfo* performanceInfo);
1353 
1354   /**
1355    * SL Driver implementation of {@link
1356    * SL_ANeuralNetworksDevice_forEachOperandPerformanceInfo}. Behavior,
1357    * arguments, and outputs match NNAPI Runtime function
1358    * {@link SL_ANeuralNetworksDevice_forEachOperandPerformanceInfo},
1359    * at the feature level of this NnApiSLDriver struct.
1360    */
1361   int (*SL_ANeuralNetworksDevice_forEachOperandPerformanceInfo)(
1362       const ANeuralNetworksDevice* device, void* context,
1363       void (*callback)(SL_ANeuralNetworksOperandPerformanceInfo, void*));
1364 
1365   /**
1366    * SL Driver implementation of {@link
1367    * SL_ANeuralNetworksDevice_getVendorExtensionCount}. Behavior, arguments, and
1368    * outputs match NNAPI Runtime function
1369    * {@link SL_ANeuralNetworksDevice_getVendorExtensionCount},
1370    * at the feature level of this NnApiSLDriver struct.
1371    */
1372   int (*SL_ANeuralNetworksDevice_getVendorExtensionCount)(
1373       const ANeuralNetworksDevice* device, uint32_t* vendorExtensionCount);
1374 
1375   /**
1376    * SL Driver implementation of {@link
1377    * SL_ANeuralNetworksDevice_getVendorExtensionName}. Behavior, arguments, and
1378    * outputs match NNAPI Runtime function
1379    * {@link SL_ANeuralNetworksDevice_getVendorExtensionName},
1380    * at the feature level of this NnApiSLDriver struct.
1381    */
1382   int (*SL_ANeuralNetworksDevice_getVendorExtensionName)(
1383       const ANeuralNetworksDevice* device, uint32_t vendorExtensionIndex,
1384       const char** extensionName);
1385 
1386   /**
1387    * SL Driver implementation of {@link
1388    * SL_ANeuralNetworksDevice_forEachVendorExtensionOperandTypeInformation}.
1389    * Behavior, arguments, and outputs match NNAPI Runtime function
1390    * {@link
1391    * SL_ANeuralNetworksDevice_forEachVendorExtensionOperandTypeInformation}, at
1392    * the feature level of this NnApiSLDriver struct.
1393    */
1394   int (*SL_ANeuralNetworksDevice_forEachVendorExtensionOperandTypeInformation)(
1395       const ANeuralNetworksDevice* device, uint32_t vendorExtensionIndex,
1396       void* context,
1397       void (*callback)(SL_ANeuralNetworksExtensionOperandTypeInformation,
1398                        void*));
1399 
1400   /**
1401    * SL Driver implementation of {@link
1402    * SL_ANeuralNetworksDiagnosticCompilationInfo_getSessionId}. Behavior,
1403    * arguments, and outputs match NNAPI Runtime function {@link
1404    * SL_ANeuralNetworksDiagnosticCompilationInfo_getSessionId}, at the feature
1405    * level of this NnApiSLDriver struct.
1406    */
1407   int32_t (*SL_ANeuralNetworksDiagnosticCompilationInfo_getSessionId)(
1408       const ANeuralNetworksDiagnosticCompilationInfo*
1409           diagnosticCompilationInfo);
1410 
1411   /**
1412    * SL Driver implementation of {@link
1413    * SL_ANeuralNetworksDiagnosticCompilationInfo_getNnApiVersion}. Behavior,
1414    * arguments, and outputs match NNAPI Runtime function {@link
1415    * SL_ANeuralNetworksDiagnosticCompilationInfo_getNnApiVersion}, at the
1416    * feature level of this NnApiSLDriver struct.
1417    */
1418   int64_t (*SL_ANeuralNetworksDiagnosticCompilationInfo_getNnApiVersion)(
1419       const ANeuralNetworksDiagnosticCompilationInfo*
1420           diagnosticCompilationInfo);
1421 
1422   /**
1423    * SL Driver implementation of {@link
1424    * SL_ANeuralNetworksDiagnosticCompilationInfo_getModelArchHash}. Behavior,
1425    * arguments, and outputs match NNAPI Runtime function {@link
1426    * SL_ANeuralNetworksDiagnosticCompilationInfo_getModelArchHash}, at the
1427    * feature level of this NnApiSLDriver struct.
1428    */
1429   const uint8_t* (
1430       *SL_ANeuralNetworksDiagnosticCompilationInfo_getModelArchHash)(
1431       const ANeuralNetworksDiagnosticCompilationInfo*
1432           diagnosticCompilationInfo);
1433 
1434   /**
1435    * SL Driver implementation of {@link
1436    * SL_ANeuralNetworksDiagnosticCompilationInfo_getDeviceIds}. Behavior,
1437    * arguments, and outputs match NNAPI Runtime function {@link
1438    * SL_ANeuralNetworksDiagnosticCompilationInfo_getDeviceIds}, at the feature
1439    * level of this NnApiSLDriver struct.
1440    */
1441   const char* (*SL_ANeuralNetworksDiagnosticCompilationInfo_getDeviceIds)(
1442       const ANeuralNetworksDiagnosticCompilationInfo*
1443           diagnosticCompilationInfo);
1444 
1445   /**
1446    * SL Driver implementation of {@link
1447    * SL_ANeuralNetworksDiagnosticCompilationInfo_getErrorCode}. Behavior,
1448    * arguments, and outputs match NNAPI Runtime function {@link
1449    * SL_ANeuralNetworksDiagnosticCompilationInfo_getErrorCode}, at the feature
1450    * level of this NnApiSLDriver struct.
1451    */
1452   int32_t (*SL_ANeuralNetworksDiagnosticCompilationInfo_getErrorCode)(
1453       const ANeuralNetworksDiagnosticCompilationInfo*
1454           diagnosticCompilationInfo);
1455 
1456   /**
1457    * SL Driver implementation of {@link
1458    * SL_ANeuralNetworksDiagnosticCompilationInfo_getInputDataClass}. Behavior,
1459    * arguments, and outputs match NNAPI Runtime function {@link
1460    * SL_ANeuralNetworksDiagnosticCompilationInfo_getInputDataClass}, at the
1461    * feature level of this NnApiSLDriver struct.
1462    */
1463   ANeuralNetworksDiagnosticDataClass (
1464       *SL_ANeuralNetworksDiagnosticCompilationInfo_getInputDataClass)(
1465       const ANeuralNetworksDiagnosticCompilationInfo*
1466           diagnosticCompilationInfo);
1467 
1468   /**
1469    * SL Driver implementation of {@link
1470    * SL_ANeuralNetworksDiagnosticCompilationInfo_getOutputDataClass}. Behavior,
1471    * arguments, and outputs match NNAPI Runtime function {@link
1472    * SL_ANeuralNetworksDiagnosticCompilationInfo_getOutputDataClass}, at the
1473    * feature level of this NnApiSLDriver struct.
1474    */
1475   ANeuralNetworksDiagnosticDataClass (
1476       *SL_ANeuralNetworksDiagnosticCompilationInfo_getOutputDataClass)(
1477       const ANeuralNetworksDiagnosticCompilationInfo*
1478           diagnosticCompilationInfo);
1479 
1480   /**
1481    * SL Driver implementation of {@link
1482    * SL_ANeuralNetworksDiagnosticCompilationInfo_getCompilationTimeNanos}.
1483    * Behavior, arguments, and outputs match NNAPI Runtime function {@link
1484    * SL_ANeuralNetworksDiagnosticCompilationInfo_getCompilationTimeNanos}, at
1485    * the feature level of this NnApiSLDriver struct.
1486    */
1487   uint64_t (
1488       *SL_ANeuralNetworksDiagnosticCompilationInfo_getCompilationTimeNanos)(
1489       const ANeuralNetworksDiagnosticCompilationInfo*
1490           diagnosticCompilationInfo);
1491 
1492   /**
1493    * SL Driver implementation of {@link
1494    * SL_ANeuralNetworksDiagnosticCompilationInfo_isCachingEnabled}. Behavior,
1495    * arguments, and outputs match NNAPI Runtime function {@link
1496    * SL_ANeuralNetworksDiagnosticCompilationInfo_isCachingEnabled}, at the
1497    * feature level of this NnApiSLDriver struct.
1498    */
1499   bool (*SL_ANeuralNetworksDiagnosticCompilationInfo_isCachingEnabled)(
1500       const ANeuralNetworksDiagnosticCompilationInfo*
1501           diagnosticCompilationInfo);
1502 
1503   /**
1504    * SL Driver implementation of {@link
1505    * SL_ANeuralNetworksDiagnosticCompilationInfo_isControlFlowUsed}. Behavior,
1506    * arguments, and outputs match NNAPI Runtime function {@link
1507    * SL_ANeuralNetworksDiagnosticCompilationInfo_isControlFlowUsed}, at the
1508    * feature level of this NnApiSLDriver struct.
1509    */
1510   bool (*SL_ANeuralNetworksDiagnosticCompilationInfo_isControlFlowUsed)(
1511       const ANeuralNetworksDiagnosticCompilationInfo*
1512           diagnosticCompilationInfo);
1513 
1514   /**
1515    * SL Driver implementation of {@link
1516    * SL_ANeuralNetworksDiagnosticCompilationInfo_areDynamicTensorsUsed}.
1517    * Behavior, arguments, and outputs match NNAPI Runtime function {@link
1518    * SL_ANeuralNetworksDiagnosticCompilationInfo_areDynamicTensorsUsed}, at the
1519    * feature level of this NnApiSLDriver struct.
1520    */
1521   bool (*SL_ANeuralNetworksDiagnosticCompilationInfo_areDynamicTensorsUsed)(
1522       const ANeuralNetworksDiagnosticCompilationInfo*
1523           diagnosticCompilationInfo);
1524 
1525   /**
1526    * SL Driver implementation of {@link
1527    * SL_ANeuralNetworksDiagnosticExecutionInfo_getSessionId}. Behavior,
1528    * arguments, and outputs match NNAPI Runtime function {@link
1529    * SL_ANeuralNetworksDiagnosticExecutionInfo_getSessionId}, at the feature
1530    * level of this NnApiSLDriver struct.
1531    */
1532   int32_t (*SL_ANeuralNetworksDiagnosticExecutionInfo_getSessionId)(
1533       const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo);
1534 
1535   /**
1536    * SL Driver implementation of {@link
1537    * SL_ANeuralNetworksDiagnosticExecutionInfo_getNnApiVersion}. Behavior,
1538    * arguments, and outputs match NNAPI Runtime function {@link
1539    * SL_ANeuralNetworksDiagnosticExecutionInfo_getNnApiVersion}, at the feature
1540    * level of this NnApiSLDriver struct.
1541    */
1542   int64_t (*SL_ANeuralNetworksDiagnosticExecutionInfo_getNnApiVersion)(
1543       const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo);
1544 
1545   /**
1546    * SL Driver implementation of {@link
1547    * SL_ANeuralNetworksDiagnosticExecutionInfo_getModelArchHash}. Behavior,
1548    * arguments, and outputs match NNAPI Runtime function {@link
1549    * SL_ANeuralNetworksDiagnosticExecutionInfo_getModelArchHash}, at the feature
1550    * level of this NnApiSLDriver struct.
1551    */
1552   const uint8_t* (*SL_ANeuralNetworksDiagnosticExecutionInfo_getModelArchHash)(
1553       const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo);
1554 
1555   /**
1556    * SL Driver implementation of {@link
1557    * SL_ANeuralNetworksDiagnosticExecutionInfo_getDeviceIds}. Behavior,
1558    * arguments, and outputs match NNAPI Runtime function {@link
1559    * SL_ANeuralNetworksDiagnosticExecutionInfo_getDeviceIds}, at the feature
1560    * level of this NnApiSLDriver struct.
1561    */
1562   const char* (*SL_ANeuralNetworksDiagnosticExecutionInfo_getDeviceIds)(
1563       const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo);
1564 
1565   /**
1566    * SL Driver implementation of {@link
1567    * SL_ANeuralNetworksDiagnosticExecutionInfo_getExecutionMode}. Behavior,
1568    * arguments, and outputs match NNAPI Runtime function {@link
1569    * SL_ANeuralNetworksDiagnosticExecutionInfo_getExecutionMode}, at the feature
1570    * level of this NnApiSLDriver struct.
1571    */
1572   ANeuralNetworksDiagnosticExecutionMode (
1573       *SL_ANeuralNetworksDiagnosticExecutionInfo_getExecutionMode)(
1574       const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo);
1575 
1576   /**
1577    * SL Driver implementation of {@link
1578    * SL_ANeuralNetworksDiagnosticExecutionInfo_getInputDataClass}. Behavior,
1579    * arguments, and outputs match NNAPI Runtime function {@link
1580    * SL_ANeuralNetworksDiagnosticExecutionInfo_getInputDataClass}, at the
1581    * feature level of this NnApiSLDriver struct.
1582    */
1583   ANeuralNetworksDiagnosticDataClass (
1584       *SL_ANeuralNetworksDiagnosticExecutionInfo_getInputDataClass)(
1585       const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo);
1586 
1587   /**
1588    * SL Driver implementation of {@link
1589    * SL_ANeuralNetworksDiagnosticExecutionInfo_getOutputDataClass}. Behavior,
1590    * arguments, and outputs match NNAPI Runtime function {@link
1591    * SL_ANeuralNetworksDiagnosticExecutionInfo_getOutputDataClass}, at the
1592    * feature level of this NnApiSLDriver struct.
1593    */
1594   ANeuralNetworksDiagnosticDataClass (
1595       *SL_ANeuralNetworksDiagnosticExecutionInfo_getOutputDataClass)(
1596       const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo);
1597 
1598   /**
1599    * SL Driver implementation of {@link
1600    * SL_ANeuralNetworksDiagnosticExecutionInfo_getErrorCode}. Behavior,
1601    * arguments, and outputs match NNAPI Runtime function {@link
1602    * SL_ANeuralNetworksDiagnosticExecutionInfo_getErrorCode}, at the feature
1603    * level of this NnApiSLDriver struct.
1604    */
1605   uint32_t (*SL_ANeuralNetworksDiagnosticExecutionInfo_getErrorCode)(
1606       const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo);
1607 
1608   /**
1609    * SL Driver implementation of {@link
1610    * SL_ANeuralNetworksDiagnosticExecutionInfo_getRuntimeExecutionTimeNanos}.
1611    * Behavior, arguments, and outputs match NNAPI Runtime function {@link
1612    * SL_ANeuralNetworksDiagnosticExecutionInfo_getRuntimeExecutionTimeNanos}, at
1613    * the feature level of this NnApiSLDriver struct.
1614    */
1615   uint64_t (
1616       *SL_ANeuralNetworksDiagnosticExecutionInfo_getRuntimeExecutionTimeNanos)(
1617       const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo);
1618 
1619   /**
1620    * SL Driver implementation of {@link
1621    * SL_ANeuralNetworksDiagnosticExecutionInfo_getDriverExecutionTimeNanos}.
1622    * Behavior, arguments, and outputs match NNAPI Runtime function {@link
1623    * SL_ANeuralNetworksDiagnosticExecutionInfo_getDriverExecutionTimeNanos}, at
1624    * the feature level of this NnApiSLDriver struct.
1625    */
1626   uint64_t (
1627       *SL_ANeuralNetworksDiagnosticExecutionInfo_getDriverExecutionTimeNanos)(
1628       const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo);
1629 
1630   /**
1631    * SL Driver implementation of {@link
1632    * SL_ANeuralNetworksDiagnosticExecutionInfo_getHardwareExecutionTimeNanos}.
1633    * Behavior, arguments, and outputs match NNAPI Runtime function {@link
1634    * SL_ANeuralNetworksDiagnosticExecutionInfo_getHardwareExecutionTimeNanos},
1635    * at the feature level of this NnApiSLDriver struct.
1636    */
1637   uint64_t (
1638       *SL_ANeuralNetworksDiagnosticExecutionInfo_getHardwareExecutionTimeNanos)(
1639       const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo);
1640 
1641   /**
1642    * SL Driver implementation of {@link
1643    * SL_ANeuralNetworksDiagnosticExecutionInfo_isCachingEnabled}. Behavior,
1644    * arguments, and outputs match NNAPI Runtime function {@link
1645    * SL_ANeuralNetworksDiagnosticExecutionInfo_isCachingEnabled}, at the feature
1646    * level of this NnApiSLDriver struct.
1647    */
1648   bool (*SL_ANeuralNetworksDiagnosticExecutionInfo_isCachingEnabled)(
1649       const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo);
1650 
1651   /**
1652    * SL Driver implementation of {@link
1653    * SL_ANeuralNetworksDiagnosticExecutionInfo_isControlFlowUsed}. Behavior,
1654    * arguments, and outputs match NNAPI Runtime function {@link
1655    * SL_ANeuralNetworksDiagnosticExecutionInfo_isControlFlowUsed}, at the
1656    * feature level of this NnApiSLDriver struct.
1657    */
1658   bool (*SL_ANeuralNetworksDiagnosticExecutionInfo_isControlFlowUsed)(
1659       const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo);
1660 
1661   /**
1662    * SL Driver implementation of {@link
1663    * SL_ANeuralNetworksDiagnosticExecutionInfo_areDynamicTensorsUsed}. Behavior,
1664    * arguments, and outputs match NNAPI Runtime function {@link
1665    * SL_ANeuralNetworksDiagnosticExecutionInfo_areDynamicTensorsUsed}, at the
1666    * feature level of this NnApiSLDriver struct.
1667    */
1668   bool (*SL_ANeuralNetworksDiagnosticExecutionInfo_areDynamicTensorsUsed)(
1669       const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo);
1670 
1671   /**
1672    * SL Driver implementation of {@link
1673    * SL_ANeuralNetworksDiagnostic_registerCallbacks}. Behavior, arguments, and
1674    * outputs match NNAPI Runtime function {@link
1675    * SL_ANeuralNetworksDiagnostic_registerCallbacks}, at the feature level of
1676    * this NnApiSLDriver struct.
1677    */
1678   void (*SL_ANeuralNetworksDiagnostic_registerCallbacks)(
1679       ANeuralNetworksDiagnosticCompilationFinishedCallback compilationCallback,
1680       ANeuralNetworksDiagnosticExecutionFinishedCallback executionCallback,
1681       void* callbackContext);
1682 
1683 } NnApiSLDriverImplFL5;
1684 // LINT.ThenChange()
1685 
1686 /**
1687  * NnApiSLDriverImpl for an Updatable SL Driver implementing {@link
1688  * ANEURALNETWORKS_FEATURE_LEVEL_6}.
1689  *
1690  * This struct must set its implFeatureLevel to {@link
1691  * ANEURALNETWORKS_FEATURE_LEVEL_6}.
1692  *
1693  */
1694 typedef struct NnApiSLDriverImplFL5 NnApiSLDriverImplFL6;
1695 
1696 /**
1697  * NnApiSLDriverImpl for an Updatable SL Driver implementing {@link
1698  * ANEURALNETWORKS_FEATURE_LEVEL_7}.
1699  *
1700  * This struct must set its implFeatureLevel to {@link
1701  * ANEURALNETWORKS_FEATURE_LEVEL_7}.
1702  *
1703  */
1704 typedef NnApiSLDriverImplFL6 NnApiSLDriverImplFL7;
1705 
1706 #ifdef __cplusplus
1707 }  // extern "C"
1708 #endif
1709 
1710