1 /*
2 * Copyright (C) 2017 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include <android-base/logging.h>
18 #include <android-base/scopeguard.h>
19 // android/log.h contains __INTRODUCED_IN() macro and must be included before
20 // sharedmem.h
21 #include <android/log.h>
22 #include <android/sharedmem.h>
23 #include <gtest/gtest.h>
24 #include <sys/mman.h>
25
26 #include <algorithm>
27 #include <functional>
28 #include <future>
29 #include <limits>
30 #include <set>
31 #include <string>
32 #include <utility>
33 #include <vector>
34
35 #include "AndroidVersionUtil.h"
36 #include "NeuralNetworks.h"
37 #include "NeuralNetworksOEM.h"
38 #include "TmpDirectoryUtils.h"
39
40 #ifdef __ANDROID__
41 #include <android/hardware_buffer.h>
42 #else // __ANDROID__
43 #include <android-base/file.h>
44 #endif // __ANDROID__
45
46 #ifndef NNTEST_ONLY_PUBLIC_API
47 #include "NeuralNetworksExtensions.h"
48 #include "TypeManager.h"
49 #endif
50
51 // This file tests all the validations done by the Neural Networks API.
52
53 namespace {
54
55 constexpr uint64_t kShortWaitInNanoseconds = 1'000'000'000; // 1 second
56
57 class ValidationTest : public ::testing::Test {
58 protected:
SetUp()59 virtual void SetUp() {}
60 };
61
62 class ValidationTestModel : public ValidationTest {
63 protected:
SetUp()64 virtual void SetUp() {
65 ValidationTest::SetUp();
66 ASSERT_EQ(ANeuralNetworksModel_create(&mModel), ANEURALNETWORKS_NO_ERROR);
67 }
TearDown()68 virtual void TearDown() {
69 ANeuralNetworksModel_free(mModel);
70 ValidationTest::TearDown();
71 }
72
addScalarOperand(int32_t type=ANEURALNETWORKS_INT32)73 uint32_t addScalarOperand(int32_t type = ANEURALNETWORKS_INT32) {
74 ANeuralNetworksOperandType operandType = {
75 .type = type, .dimensionCount = 0, .dimensions = nullptr};
76 EXPECT_EQ(ANeuralNetworksModel_addOperand(mModel, &operandType), ANEURALNETWORKS_NO_ERROR);
77 return mNumOperands++;
78 }
79
addOperand(const ANeuralNetworksOperandType & operandType)80 uint32_t addOperand(const ANeuralNetworksOperandType& operandType) {
81 EXPECT_EQ(ANeuralNetworksModel_addOperand(mModel, &operandType), ANEURALNETWORKS_NO_ERROR);
82 return mNumOperands++;
83 }
84
addTensorOperand(int32_t type=ANEURALNETWORKS_TENSOR_FLOAT32)85 uint32_t addTensorOperand(int32_t type = ANEURALNETWORKS_TENSOR_FLOAT32) {
86 return addTensorOperand(type, {2});
87 }
88
addTensorOperand(int32_t type,const std::vector<uint32_t> & dimensions)89 uint32_t addTensorOperand(int32_t type, const std::vector<uint32_t>& dimensions) {
90 ANeuralNetworksOperandType operandType = {
91 .type = type,
92 .dimensionCount = static_cast<uint32_t>(dimensions.size()),
93 .dimensions = dimensions.data(),
94 };
95 return addOperand(operandType);
96 }
97
addOperation(ANeuralNetworksOperationType type,const std::vector<uint32_t> & inputs,const std::vector<uint32_t> & outputs)98 int addOperation(ANeuralNetworksOperationType type, const std::vector<uint32_t>& inputs,
99 const std::vector<uint32_t>& outputs) {
100 ++mNumOperations;
101 return ANeuralNetworksModel_addOperation(mModel, type, inputs.size(), inputs.data(),
102 outputs.size(), outputs.data());
103 }
identifyInputsAndOutputs(const std::vector<uint32_t> & inputs,const std::vector<uint32_t> & outputs)104 int identifyInputsAndOutputs(const std::vector<uint32_t>& inputs,
105 const std::vector<uint32_t>& outputs) {
106 return ANeuralNetworksModel_identifyInputsAndOutputs(mModel, inputs.size(), inputs.data(),
107 outputs.size(), outputs.data());
108 }
modelFinish()109 int modelFinish() { return ANeuralNetworksModel_finish(mModel); }
110
createModel()111 virtual void createModel() {
112 addTensorOperand();
113 addTensorOperand();
114 addScalarOperand();
115 addTensorOperand();
116 const std::vector<uint32_t> inList = {0, 1, 2};
117 const std::vector<uint32_t> outList = {3};
118 ASSERT_EQ(addOperation(ANEURALNETWORKS_ADD, inList, outList), ANEURALNETWORKS_NO_ERROR);
119 ASSERT_EQ(identifyInputsAndOutputs(inList, outList), ANEURALNETWORKS_NO_ERROR);
120 ASSERT_EQ(modelFinish(), ANEURALNETWORKS_NO_ERROR);
121 }
122
123 uint32_t mNumOperands = 0;
124 uint32_t mNumOperations = 0;
125 ANeuralNetworksModel* mModel = nullptr;
126
127 const uint32_t kDummyDimensionValue = 1;
128 const ANeuralNetworksOperandType kInvalidTensorType1{
129 .type = ANEURALNETWORKS_TENSOR_FLOAT32,
130 // dimensionCount must be consistent with dimensions.
131 .dimensionCount = 1,
132 .dimensions = nullptr,
133 };
134 const ANeuralNetworksOperandType kInvalidTensorType2{
135 .type = ANEURALNETWORKS_TENSOR_FLOAT32,
136 // dimensionCount must be consistent with dimensions.
137 .dimensionCount = 0,
138 .dimensions = &kDummyDimensionValue,
139 };
140 };
141
142 #ifndef NNTEST_ONLY_PUBLIC_API
143 constexpr const char* kTestExtensionName = "com.android.test_extension";
144 constexpr int32_t kTestExtensionTensorType = ANEURALNETWORKS_TENSOR_QUANT8_SYMM_PER_CHANNEL;
145
146 class ValidationTestModelExtensions : public ValidationTestModel {
147 protected:
SetUp()148 virtual void SetUp() {
149 ValidationTestModel::SetUp();
150 EXPECT_TRUE(::android::nn::TypeManager::get()->forTest_registerExtension({
151 .name = kTestExtensionName,
152 .operandTypes =
153 {
154 {
155 .type = kTestExtensionTensorType,
156 .isTensor = true,
157 .byteSize = 1,
158 },
159 },
160 }));
161 }
162
TearDown()163 virtual void TearDown() {
164 ::android::nn::TypeManager::get()->forTest_reset();
165 ValidationTestModel::TearDown();
166 }
167
getExtensionOperandType(uint16_t typeWithinExtension)168 int32_t getExtensionOperandType(uint16_t typeWithinExtension) {
169 int32_t result;
170 EXPECT_EQ(ANeuralNetworksModel_getExtensionOperandType(mModel, kTestExtensionName,
171 typeWithinExtension, &result),
172 ANEURALNETWORKS_NO_ERROR);
173 return result;
174 }
175 };
176 #endif
177
178 class ValidationTestIdentify : public ValidationTestModel {
SetUp()179 virtual void SetUp() {
180 ValidationTestModel::SetUp();
181
182 uint32_t dimensions[]{1};
183 ANeuralNetworksOperandType tensorType{.type = ANEURALNETWORKS_TENSOR_FLOAT32,
184 .dimensionCount = 1,
185 .dimensions = dimensions};
186 ANeuralNetworksOperandType scalarType{
187 .type = ANEURALNETWORKS_INT32, .dimensionCount = 0, .dimensions = nullptr};
188 ASSERT_EQ(ANeuralNetworksModel_addOperand(mModel, &tensorType), ANEURALNETWORKS_NO_ERROR);
189 ASSERT_EQ(ANeuralNetworksModel_addOperand(mModel, &tensorType), ANEURALNETWORKS_NO_ERROR);
190 ASSERT_EQ(ANeuralNetworksModel_addOperand(mModel, &scalarType), ANEURALNETWORKS_NO_ERROR);
191 ASSERT_EQ(ANeuralNetworksModel_addOperand(mModel, &tensorType), ANEURALNETWORKS_NO_ERROR);
192 ASSERT_EQ(addOperation(ANEURALNETWORKS_ADD, {0, 1, 2}, {3}), ANEURALNETWORKS_NO_ERROR);
193 }
TearDown()194 virtual void TearDown() { ValidationTestModel::TearDown(); }
195 };
196
197 class ValidationTestCompilation : public ValidationTestModel {
198 protected:
SetUp()199 virtual void SetUp() {
200 ValidationTestModel::SetUp();
201 createModel();
202 ASSERT_EQ(ANeuralNetworksCompilation_create(mModel, &mCompilation),
203 ANEURALNETWORKS_NO_ERROR);
204 }
205
TearDown()206 virtual void TearDown() {
207 ANeuralNetworksCompilation_free(mCompilation);
208 ValidationTestModel::TearDown();
209 }
210
211 ANeuralNetworksCompilation* mCompilation = nullptr;
212 };
213
214 class ValidationTestExecution : public ValidationTestCompilation {
215 protected:
SetUp()216 virtual void SetUp() {
217 ValidationTestCompilation::SetUp();
218
219 ASSERT_EQ(ANeuralNetworksCompilation_finish(mCompilation), ANEURALNETWORKS_NO_ERROR);
220
221 ASSERT_EQ(ANeuralNetworksExecution_create(mCompilation, &mExecution),
222 ANEURALNETWORKS_NO_ERROR);
223 }
TearDown()224 virtual void TearDown() {
225 ANeuralNetworksExecution_free(mExecution);
226 ValidationTestCompilation::TearDown();
227 }
228 ANeuralNetworksExecution* mExecution = nullptr;
229 };
230
231 class ValidationTestBurst : public ValidationTestExecution {
232 protected:
SetUp()233 virtual void SetUp() {
234 ValidationTestExecution::SetUp();
235
236 ASSERT_EQ(ANeuralNetworksBurst_create(mCompilation, &mBurst), ANEURALNETWORKS_NO_ERROR);
237 }
TearDown()238 virtual void TearDown() {
239 ANeuralNetworksBurst_free(mBurst);
240 ValidationTestExecution::TearDown();
241 }
242 ANeuralNetworksBurst* mBurst = nullptr;
243 };
244
245 class ValidationTestMemoryDesc : public ValidationTestCompilation {
246 protected:
SetUp()247 virtual void SetUp() {
248 ValidationTestCompilation::SetUp();
249 ASSERT_EQ(ANeuralNetworksMemoryDesc_create(&mDesc), ANEURALNETWORKS_NO_ERROR);
250 }
TearDown()251 virtual void TearDown() {
252 ANeuralNetworksMemoryDesc_free(mDesc);
253 for (auto* memory : mMemories) ANeuralNetworksMemory_free(memory);
254 for (int fd : mFds) close(fd);
255 ValidationTestCompilation::TearDown();
256 }
257
createAshmem(uint32_t size)258 ANeuralNetworksMemory* createAshmem(uint32_t size) {
259 #ifdef __ANDROID__
260 int fd = ASharedMemory_create("nnMemory", size);
261 #else // __ANDROID__
262 TemporaryFile tmpFile;
263 int fd = tmpFile.release();
264 CHECK_EQ(ftruncate(fd, size), 0);
265 #endif // __ANDROID__
266 EXPECT_GT(fd, 0);
267 mFds.push_back(fd);
268 ANeuralNetworksMemory* ashmem = nullptr;
269 EXPECT_EQ(ANeuralNetworksMemory_createFromFd(size, PROT_READ | PROT_WRITE, fd, 0, &ashmem),
270 ANEURALNETWORKS_NO_ERROR);
271 mMemories.push_back(ashmem);
272 return ashmem;
273 }
274
275 ANeuralNetworksMemoryDesc* mDesc = nullptr;
276 std::vector<ANeuralNetworksMemory*> mMemories;
277 std::vector<int> mFds;
278 };
279
280 class ValidationTestExecutionDeviceMemory : public ValidationTest {
281 protected:
SetUp()282 virtual void SetUp() {
283 ValidationTest::SetUp();
284 ASSERT_EQ(ANeuralNetworksModel_create(&mModel), ANEURALNETWORKS_NO_ERROR);
285 createModel(mModel, /*dimensionsUnspecified=*/false, /*isValid=*/true);
286 ASSERT_EQ(ANeuralNetworksCompilation_create(mModel, &mCompilation),
287 ANEURALNETWORKS_NO_ERROR);
288 ASSERT_EQ(ANeuralNetworksCompilation_finish(mCompilation), ANEURALNETWORKS_NO_ERROR);
289 ASSERT_EQ(ANeuralNetworksExecution_create(mCompilation, &mExecution),
290 ANEURALNETWORKS_NO_ERROR);
291
292 ASSERT_EQ(ANeuralNetworksModel_create(&mModelDynamic), ANEURALNETWORKS_NO_ERROR);
293 createModel(mModelDynamic, /*dimensionsUnspecified=*/true, /*isValid=*/true);
294 ASSERT_EQ(ANeuralNetworksCompilation_create(mModelDynamic, &mCompilationDynamic),
295 ANEURALNETWORKS_NO_ERROR);
296 ASSERT_EQ(ANeuralNetworksCompilation_finish(mCompilationDynamic), ANEURALNETWORKS_NO_ERROR);
297 ASSERT_EQ(ANeuralNetworksExecution_create(mCompilationDynamic, &mExecutionDynamic),
298 ANEURALNETWORKS_NO_ERROR);
299
300 ASSERT_EQ(ANeuralNetworksModel_create(&mInitModel), ANEURALNETWORKS_NO_ERROR);
301 createModel(mInitModel, /*dimensionsUnspecified=*/false, /*isValid=*/true);
302 ASSERT_EQ(ANeuralNetworksCompilation_create(mInitModel, &mInitCompilation),
303 ANEURALNETWORKS_NO_ERROR);
304 ASSERT_EQ(ANeuralNetworksCompilation_finish(mInitCompilation), ANEURALNETWORKS_NO_ERROR);
305
306 ASSERT_EQ(ANeuralNetworksModel_create(&mDeinitModel), ANEURALNETWORKS_NO_ERROR);
307 createModel(mDeinitModel, /*dimensionsUnspecified=*/false, /*isValid=*/false);
308 ASSERT_EQ(ANeuralNetworksCompilation_create(mDeinitModel, &mDeinitCompilation),
309 ANEURALNETWORKS_NO_ERROR);
310 ASSERT_EQ(ANeuralNetworksCompilation_finish(mDeinitCompilation), ANEURALNETWORKS_NO_ERROR);
311 }
TearDown()312 virtual void TearDown() {
313 ANeuralNetworksExecution_free(mExecution);
314 ANeuralNetworksCompilation_free(mCompilation);
315 ANeuralNetworksModel_free(mModel);
316 ANeuralNetworksExecution_free(mExecutionDynamic);
317 ANeuralNetworksCompilation_free(mCompilationDynamic);
318 ANeuralNetworksModel_free(mModelDynamic);
319
320 ANeuralNetworksCompilation_free(mInitCompilation);
321 ANeuralNetworksModel_free(mInitModel);
322 ANeuralNetworksCompilation_free(mDeinitCompilation);
323 ANeuralNetworksModel_free(mDeinitModel);
324
325 ValidationTest::TearDown();
326 }
327
addScalarOperand(ANeuralNetworksModel * model)328 void addScalarOperand(ANeuralNetworksModel* model) {
329 ANeuralNetworksOperandType operandType = {
330 .type = ANEURALNETWORKS_INT32, .dimensionCount = 0, .dimensions = nullptr};
331 EXPECT_EQ(ANeuralNetworksModel_addOperand(model, &operandType), ANEURALNETWORKS_NO_ERROR);
332 }
333
addTensorOperand(ANeuralNetworksModel * model,bool dimensionsUnspecified)334 void addTensorOperand(ANeuralNetworksModel* model, bool dimensionsUnspecified) {
335 uint32_t dimension = dimensionsUnspecified ? 0 : 1;
336 ANeuralNetworksOperandType operandType = {
337 .type = ANEURALNETWORKS_TENSOR_FLOAT32,
338 .dimensionCount = 1,
339 .dimensions = &dimension,
340 };
341 EXPECT_EQ(ANeuralNetworksModel_addOperand(model, &operandType), ANEURALNETWORKS_NO_ERROR);
342 }
343
createModel(ANeuralNetworksModel * model,bool dimensionsUnspecified,bool isValid)344 void createModel(ANeuralNetworksModel* model, bool dimensionsUnspecified, bool isValid) {
345 const float constData = 0;
346 const uint32_t actData = isValid ? 0 : 999;
347
348 addTensorOperand(model, dimensionsUnspecified);
349 addTensorOperand(model, /*dimensionsUnspecified=*/false);
350 addScalarOperand(model);
351 addTensorOperand(model, dimensionsUnspecified);
352
353 ASSERT_EQ(ANeuralNetworksModel_setOperandValue(model, 1, &constData, sizeof(float)),
354 ANEURALNETWORKS_NO_ERROR);
355 ASSERT_EQ(ANeuralNetworksModel_setOperandValue(model, 2, &actData, sizeof(uint32_t)),
356 ANEURALNETWORKS_NO_ERROR);
357
358 uint32_t inList[] = {0, 1, 2}, outList[] = {3};
359 ASSERT_EQ(ANeuralNetworksModel_addOperation(model, ANEURALNETWORKS_ADD, 3, inList, 1,
360 outList),
361 ANEURALNETWORKS_NO_ERROR);
362 uint32_t inputList[] = {0}, outputList[] = {3};
363 ASSERT_EQ(ANeuralNetworksModel_identifyInputsAndOutputs(model, 1, inputList, 1, outputList),
364 ANEURALNETWORKS_NO_ERROR);
365 ASSERT_EQ(ANeuralNetworksModel_finish(model), ANEURALNETWORKS_NO_ERROR);
366 }
367
executeWithMemoryAsInput(ANeuralNetworksCompilation * compilation,ANeuralNetworksMemory * memory,int expectedResult)368 void executeWithMemoryAsInput(ANeuralNetworksCompilation* compilation,
369 ANeuralNetworksMemory* memory, int expectedResult) {
370 float data = 0;
371 ANeuralNetworksExecution* execution = nullptr;
372 ASSERT_EQ(ANeuralNetworksExecution_create(compilation, &execution),
373 ANEURALNETWORKS_NO_ERROR);
374 ASSERT_EQ(ANeuralNetworksExecution_setInputFromMemory(execution, 0, nullptr, memory, 0, 0),
375 ANEURALNETWORKS_NO_ERROR);
376 ASSERT_EQ(ANeuralNetworksExecution_setOutput(execution, 0, nullptr, &data, sizeof(float)),
377 ANEURALNETWORKS_NO_ERROR);
378 ASSERT_EQ(ANeuralNetworksExecution_compute(execution), expectedResult);
379 ANeuralNetworksExecution_free(execution);
380 }
381
executeWithMemoryAsOutput(ANeuralNetworksCompilation * compilation,ANeuralNetworksMemory * memory,int expectedResult)382 void executeWithMemoryAsOutput(ANeuralNetworksCompilation* compilation,
383 ANeuralNetworksMemory* memory, int expectedResult) {
384 const float data = 0;
385 ANeuralNetworksExecution* execution = nullptr;
386 ASSERT_EQ(ANeuralNetworksExecution_create(compilation, &execution),
387 ANEURALNETWORKS_NO_ERROR);
388 ASSERT_EQ(ANeuralNetworksExecution_setInput(execution, 0, nullptr, &data, sizeof(float)),
389 ANEURALNETWORKS_NO_ERROR);
390 ASSERT_EQ(ANeuralNetworksExecution_setOutputFromMemory(execution, 0, nullptr, memory, 0, 0),
391 ANEURALNETWORKS_NO_ERROR);
392 ASSERT_EQ(ANeuralNetworksExecution_compute(execution), expectedResult);
393 ANeuralNetworksExecution_free(execution);
394 }
395
396 ANeuralNetworksModel* mModel = nullptr;
397 ANeuralNetworksCompilation* mCompilation = nullptr;
398 ANeuralNetworksExecution* mExecution = nullptr;
399
400 ANeuralNetworksModel* mModelDynamic = nullptr;
401 ANeuralNetworksCompilation* mCompilationDynamic = nullptr;
402 ANeuralNetworksExecution* mExecutionDynamic = nullptr;
403
404 ANeuralNetworksModel* mInitModel = nullptr;
405 ANeuralNetworksCompilation* mInitCompilation = nullptr;
406 ANeuralNetworksModel* mDeinitModel = nullptr;
407 ANeuralNetworksCompilation* mDeinitCompilation = nullptr;
408 };
409
TEST_F(ValidationTest,CreateModel)410 TEST_F(ValidationTest, CreateModel) {
411 EXPECT_EQ(ANeuralNetworksModel_create(nullptr), ANEURALNETWORKS_UNEXPECTED_NULL);
412 }
413
TEST_F(ValidationTestModel,AddOperand)414 TEST_F(ValidationTestModel, AddOperand) {
415 ANeuralNetworksOperandType floatType{
416 .type = ANEURALNETWORKS_FLOAT32, .dimensionCount = 0, .dimensions = nullptr};
417 EXPECT_EQ(ANeuralNetworksModel_addOperand(nullptr, &floatType),
418 ANEURALNETWORKS_UNEXPECTED_NULL);
419 EXPECT_EQ(ANeuralNetworksModel_addOperand(mModel, nullptr), ANEURALNETWORKS_UNEXPECTED_NULL);
420
421 ANeuralNetworksOperandType quant8TypeInvalidScale{
422 .type = ANEURALNETWORKS_TENSOR_QUANT8_ASYMM,
423 .dimensionCount = 0,
424 .dimensions = nullptr,
425 // Scale has to be non-negative
426 .scale = -1.0f,
427 .zeroPoint = 0,
428 };
429 EXPECT_EQ(ANeuralNetworksModel_addOperand(mModel, &quant8TypeInvalidScale),
430 ANEURALNETWORKS_BAD_DATA);
431
432 ANeuralNetworksOperandType quant8TypeInvalidZeroPoint{
433 .type = ANEURALNETWORKS_TENSOR_QUANT8_ASYMM,
434 .dimensionCount = 0,
435 .dimensions = nullptr,
436 .scale = 1.0f,
437 // zeroPoint has to be in [0, 255]
438 .zeroPoint = -1,
439 };
440 EXPECT_EQ(ANeuralNetworksModel_addOperand(mModel, &quant8TypeInvalidZeroPoint),
441 ANEURALNETWORKS_BAD_DATA);
442
443 const uint32_t dim = 2;
444 ANeuralNetworksOperandType invalidScalarType{
445 .type = ANEURALNETWORKS_INT32,
446 // a scalar type must have 0 dimensions.
447 .dimensionCount = 1,
448 .dimensions = &dim,
449 };
450 EXPECT_EQ(ANeuralNetworksModel_addOperand(mModel, &invalidScalarType),
451 ANEURALNETWORKS_BAD_DATA);
452
453 EXPECT_EQ(ANeuralNetworksModel_addOperand(mModel, &kInvalidTensorType1),
454 ANEURALNETWORKS_BAD_DATA);
455 EXPECT_EQ(ANeuralNetworksModel_addOperand(mModel, &kInvalidTensorType2),
456 ANEURALNETWORKS_BAD_DATA);
457
458 modelFinish();
459 // This should fail, as the model is already finished.
460 EXPECT_EQ(ANeuralNetworksModel_addOperand(mModel, &floatType), ANEURALNETWORKS_BAD_STATE);
461 }
462
TEST_F(ValidationTestModel,SetOperandSymmPerChannelQuantParams)463 TEST_F(ValidationTestModel, SetOperandSymmPerChannelQuantParams) {
464 const int32_t operandIndex = addTensorOperand(ANEURALNETWORKS_TENSOR_QUANT8_SYMM_PER_CHANNEL);
465
466 float scales[2] = {1.0, 2.0};
467 ANeuralNetworksSymmPerChannelQuantParams channelQuant = {
468 .channelDim = 0,
469 .scaleCount = 2,
470 .scales = scales,
471 };
472
473 EXPECT_EQ(ANeuralNetworksModel_setOperandSymmPerChannelQuantParams(nullptr, operandIndex,
474 &channelQuant),
475 ANEURALNETWORKS_UNEXPECTED_NULL);
476 EXPECT_EQ(
477 ANeuralNetworksModel_setOperandSymmPerChannelQuantParams(mModel, operandIndex, nullptr),
478 ANEURALNETWORKS_UNEXPECTED_NULL);
479 EXPECT_EQ(ANeuralNetworksModel_setOperandSymmPerChannelQuantParams(mModel, operandIndex + 1,
480 &channelQuant),
481 ANEURALNETWORKS_BAD_DATA);
482 EXPECT_EQ(ANeuralNetworksModel_setOperandSymmPerChannelQuantParams(mModel, operandIndex,
483 &channelQuant),
484 ANEURALNETWORKS_NO_ERROR);
485 }
486
487 #ifndef NNTEST_ONLY_PUBLIC_API
TEST_F(ValidationTestModelExtensions,AddOperand_UnknownPrefix)488 TEST_F(ValidationTestModelExtensions, AddOperand_UnknownPrefix) {
489 ANeuralNetworksOperandType type = {.type = -1};
490 ASSERT_EQ(ANeuralNetworksModel_addOperand(mModel, &type), ANEURALNETWORKS_BAD_DATA);
491 }
492
TEST_F(ValidationTestModelExtensions,SetOperandSymmPerChannelQuantParams_ExtensionOperand)493 TEST_F(ValidationTestModelExtensions, SetOperandSymmPerChannelQuantParams_ExtensionOperand) {
494 const int32_t operandIndex =
495 addTensorOperand(getExtensionOperandType(kTestExtensionTensorType));
496
497 float scales[2] = {1.0, 2.0};
498 ANeuralNetworksSymmPerChannelQuantParams channelQuant = {
499 .channelDim = 0,
500 .scaleCount = 2,
501 .scales = scales,
502 };
503
504 EXPECT_EQ(ANeuralNetworksModel_setOperandSymmPerChannelQuantParams(mModel, operandIndex,
505 &channelQuant),
506 ANEURALNETWORKS_BAD_DATA);
507 }
508
TEST_F(ValidationTestModelExtensions,SetOperandExtensionData)509 TEST_F(ValidationTestModelExtensions, SetOperandExtensionData) {
510 const int32_t operandIndex =
511 addTensorOperand(getExtensionOperandType(kTestExtensionTensorType));
512 const int32_t data = 42;
513 const size_t dataLength = sizeof(data);
514 EXPECT_EQ(
515 ANeuralNetworksModel_setOperandExtensionData(nullptr, operandIndex, &data, dataLength),
516 ANEURALNETWORKS_UNEXPECTED_NULL);
517 EXPECT_EQ(
518 ANeuralNetworksModel_setOperandExtensionData(mModel, operandIndex, nullptr, dataLength),
519 ANEURALNETWORKS_UNEXPECTED_NULL);
520 EXPECT_EQ(ANeuralNetworksModel_setOperandExtensionData(mModel, operandIndex, &data, 0),
521 ANEURALNETWORKS_BAD_DATA);
522 EXPECT_EQ(ANeuralNetworksModel_setOperandExtensionData(mModel, operandIndex + 1, &data,
523 dataLength),
524 ANEURALNETWORKS_BAD_DATA);
525 EXPECT_EQ(ANeuralNetworksModel_setOperandExtensionData(mModel, operandIndex, &data, dataLength),
526 ANEURALNETWORKS_NO_ERROR);
527 }
528
TEST_F(ValidationTestModelExtensions,SetOperandExtensionData_Empty)529 TEST_F(ValidationTestModelExtensions, SetOperandExtensionData_Empty) {
530 const int32_t operandIndex =
531 addTensorOperand(getExtensionOperandType(kTestExtensionTensorType));
532 EXPECT_EQ(ANeuralNetworksModel_setOperandExtensionData(mModel, operandIndex, nullptr, 0),
533 ANEURALNETWORKS_NO_ERROR);
534 }
535
TEST_F(ValidationTestModelExtensions,SetOperandExtensionData_NonExtensionOperand)536 TEST_F(ValidationTestModelExtensions, SetOperandExtensionData_NonExtensionOperand) {
537 const int32_t operandIndex = addTensorOperand();
538 const int32_t data = 42;
539 const size_t dataLength = sizeof(data);
540 EXPECT_EQ(ANeuralNetworksModel_setOperandExtensionData(mModel, operandIndex, &data, dataLength),
541 ANEURALNETWORKS_BAD_DATA);
542 }
543
TEST_F(ValidationTestModelExtensions,SetOperandValue_UnspecifiedDimension)544 TEST_F(ValidationTestModelExtensions, SetOperandValue_UnspecifiedDimension) {
545 const uint32_t dimensions[2] = {3, 0};
546 ANeuralNetworksOperandType type = {
547 .type = getExtensionOperandType(kTestExtensionTensorType),
548 .dimensionCount = 2,
549 .dimensions = dimensions,
550 };
551 const int32_t operandIndex = addOperand(type);
552 char buffer[20];
553 EXPECT_EQ(ANeuralNetworksModel_setOperandValue(mModel, operandIndex, buffer, sizeof(buffer)),
554 ANEURALNETWORKS_BAD_DATA);
555 }
556
TEST_F(ValidationTestModelExtensions,SetOperandValue_UnspecifiedRank)557 TEST_F(ValidationTestModelExtensions, SetOperandValue_UnspecifiedRank) {
558 ANeuralNetworksOperandType type = {
559 .type = getExtensionOperandType(kTestExtensionTensorType),
560 .dimensionCount = 0,
561 .dimensions = nullptr,
562 };
563 const int32_t operandIndex = addOperand(type);
564 char buffer[20];
565 EXPECT_EQ(ANeuralNetworksModel_setOperandValue(mModel, operandIndex, buffer, sizeof(buffer)),
566 ANEURALNETWORKS_BAD_DATA);
567 }
568
TEST_F(ValidationTestModelExtensions,AddOperandDimensionProductOverflow)569 TEST_F(ValidationTestModelExtensions, AddOperandDimensionProductOverflow) {
570 uint32_t dimensions[] = {5, 4, 4, 786433, 5, 3, 16777216, 4, 5};
571 ANeuralNetworksOperandType operandType = {
572 .type = getExtensionOperandType(kTestExtensionTensorType),
573 .dimensionCount = std::size(dimensions),
574 .dimensions = dimensions,
575 };
576 // This should fail, as the operand type's dimension product overflows uint32_t.
577 ASSERT_EQ(ANeuralNetworksModel_addOperand(mModel, &operandType), ANEURALNETWORKS_BAD_DATA);
578 }
579 #endif
580
TEST_F(ValidationTestModel,SetOptionalOperand)581 TEST_F(ValidationTestModel, SetOptionalOperand) {
582 ANeuralNetworksOperandType floatType{
583 .type = ANEURALNETWORKS_FLOAT32, .dimensionCount = 0, .dimensions = nullptr};
584 EXPECT_EQ(ANeuralNetworksModel_addOperand(mModel, &floatType), ANEURALNETWORKS_NO_ERROR);
585
586 EXPECT_EQ(ANeuralNetworksModel_setOperandValue(mModel, 0, nullptr, 0),
587 ANEURALNETWORKS_NO_ERROR);
588 }
589
TEST_F(ValidationTestModel,SetOperandValue)590 TEST_F(ValidationTestModel, SetOperandValue) {
591 ANeuralNetworksOperandType floatType{
592 .type = ANEURALNETWORKS_FLOAT32, .dimensionCount = 0, .dimensions = nullptr};
593 EXPECT_EQ(ANeuralNetworksModel_addOperand(mModel, &floatType), ANEURALNETWORKS_NO_ERROR);
594
595 char buffer[20];
596 EXPECT_EQ(ANeuralNetworksModel_setOperandValue(nullptr, 0, buffer, sizeof(buffer)),
597 ANEURALNETWORKS_UNEXPECTED_NULL);
598 EXPECT_EQ(ANeuralNetworksModel_setOperandValue(mModel, 0, nullptr, sizeof(buffer)),
599 ANEURALNETWORKS_UNEXPECTED_NULL);
600
601 // This should fail, because buffer is not the size of a float32.
602 EXPECT_EQ(ANeuralNetworksModel_setOperandValue(mModel, 0, buffer, sizeof(buffer)),
603 ANEURALNETWORKS_BAD_DATA);
604
605 // This should succeed.
606 EXPECT_EQ(ANeuralNetworksModel_setOperandValue(mModel, 0, buffer, sizeof(float)),
607 ANEURALNETWORKS_NO_ERROR);
608
609 // This should fail, as this operand does not exist.
610 EXPECT_EQ(ANeuralNetworksModel_setOperandValue(mModel, 1, buffer, sizeof(float)),
611 ANEURALNETWORKS_BAD_DATA);
612
613 modelFinish();
614 // This should fail, as the model is already finished.
615 EXPECT_EQ(ANeuralNetworksModel_setOperandValue(mModel, 0, buffer, sizeof(float)),
616 ANEURALNETWORKS_BAD_STATE);
617 }
618
TEST_F(ValidationTestModel,SetOperandValueFromMemory)619 TEST_F(ValidationTestModel, SetOperandValueFromMemory) {
620 uint32_t dimensions[]{1};
621 ANeuralNetworksOperandType floatType{
622 .type = ANEURALNETWORKS_TENSOR_FLOAT32, .dimensionCount = 1, .dimensions = dimensions};
623 EXPECT_EQ(ANeuralNetworksModel_addOperand(mModel, &floatType), ANEURALNETWORKS_NO_ERROR);
624
625 const size_t memorySize = 20;
626 #ifdef __ANDROID__
627 int memoryFd = ASharedMemory_create("nnMemory", memorySize);
628 #else // __ANDROID__
629 TemporaryFile tmpFile;
630 int memoryFd = tmpFile.release();
631 CHECK_EQ(ftruncate(memoryFd, memorySize), 0);
632 #endif // __ANDROID__
633 ASSERT_GT(memoryFd, 0);
634
635 ANeuralNetworksMemory* memory;
636 EXPECT_EQ(ANeuralNetworksMemory_createFromFd(memorySize, PROT_READ | PROT_WRITE, memoryFd, 0,
637 &memory),
638 ANEURALNETWORKS_NO_ERROR);
639
640 EXPECT_EQ(ANeuralNetworksModel_setOperandValueFromMemory(nullptr, 0, memory, 0, sizeof(float)),
641 ANEURALNETWORKS_UNEXPECTED_NULL);
642 EXPECT_EQ(ANeuralNetworksModel_setOperandValueFromMemory(mModel, 0, nullptr, 0, sizeof(float)),
643 ANEURALNETWORKS_UNEXPECTED_NULL);
644
645 // This should fail, because the operand does not exist.
646 EXPECT_EQ(ANeuralNetworksModel_setOperandValueFromMemory(mModel, -1, memory, 0, sizeof(float)),
647 ANEURALNETWORKS_BAD_DATA);
648
649 // This should fail, because memory is not the size of a float32.
650 EXPECT_EQ(ANeuralNetworksModel_setOperandValueFromMemory(mModel, 0, memory, 0, memorySize),
651 ANEURALNETWORKS_BAD_DATA);
652
653 // This should fail, as this operand does not exist.
654 EXPECT_EQ(ANeuralNetworksModel_setOperandValueFromMemory(mModel, 1, memory, 0, sizeof(float)),
655 ANEURALNETWORKS_BAD_DATA);
656
657 // This should fail, because offset is larger than memorySize.
658 EXPECT_EQ(ANeuralNetworksModel_setOperandValueFromMemory(mModel, 0, memory, memorySize + 1,
659 sizeof(float)),
660 ANEURALNETWORKS_BAD_DATA);
661
662 // This should fail, because requested size is larger than the memory.
663 EXPECT_EQ(ANeuralNetworksModel_setOperandValueFromMemory(mModel, 0, memory, memorySize - 3,
664 sizeof(float)),
665 ANEURALNETWORKS_BAD_DATA);
666
667 modelFinish();
668 // This should fail, as the model is already finished.
669 EXPECT_EQ(ANeuralNetworksModel_setOperandValueFromMemory(mModel, 0, memory, 0, sizeof(float)),
670 ANEURALNETWORKS_BAD_STATE);
671
672 // close memory
673 ANeuralNetworksMemory_free(memory);
674 close(memoryFd);
675 }
676
677 #ifdef __ANDROID__
TEST_F(ValidationTestModel,SetOperandValueFromAHardwareBuffer)678 TEST_F(ValidationTestModel, SetOperandValueFromAHardwareBuffer) {
679 uint32_t dimensions[]{1};
680 ANeuralNetworksOperandType quant8Type{.type = ANEURALNETWORKS_TENSOR_QUANT8_ASYMM,
681 .dimensionCount = 1,
682 .dimensions = dimensions,
683 .scale = 1.0,
684 .zeroPoint = 0};
685 EXPECT_EQ(ANeuralNetworksModel_addOperand(mModel, &quant8Type), ANEURALNETWORKS_NO_ERROR);
686
687 AHardwareBuffer_Desc desc{
688 .width = 16,
689 .height = 16,
690 .layers = 1,
691 .format = AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM,
692 .usage = AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN | AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN,
693 };
694
695 AHardwareBuffer* buffer = nullptr;
696 ASSERT_EQ(AHardwareBuffer_allocate(&desc, &buffer), 0);
697
698 ANeuralNetworksMemory* memory;
699 EXPECT_EQ(ANeuralNetworksMemory_createFromAHardwareBuffer(buffer, &memory),
700 ANEURALNETWORKS_NO_ERROR);
701
702 // This should fail, because non-BLOB AHardwareBuffer is not allowed.
703 EXPECT_EQ(ANeuralNetworksModel_setOperandValueFromMemory(mModel, 0, memory, 0, sizeof(uint8_t)),
704 ANEURALNETWORKS_BAD_DATA);
705
706 // close memory
707 ANeuralNetworksMemory_free(memory);
708 AHardwareBuffer_release(buffer);
709 }
710
TEST_F(ValidationTestModel,SetOperandValueFromAHardwareBufferBlob)711 TEST_F(ValidationTestModel, SetOperandValueFromAHardwareBufferBlob) {
712 uint32_t dimensions[]{1};
713 ANeuralNetworksOperandType floatType{
714 .type = ANEURALNETWORKS_TENSOR_FLOAT32, .dimensionCount = 1, .dimensions = dimensions};
715 EXPECT_EQ(ANeuralNetworksModel_addOperand(mModel, &floatType), ANEURALNETWORKS_NO_ERROR);
716
717 const size_t memorySize = 20;
718 AHardwareBuffer_Desc desc{
719 .width = memorySize,
720 .height = 1,
721 .layers = 1,
722 .format = AHARDWAREBUFFER_FORMAT_BLOB,
723 .usage = AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN | AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN,
724 };
725
726 AHardwareBuffer* buffer = nullptr;
727 ASSERT_EQ(AHardwareBuffer_allocate(&desc, &buffer), 0);
728
729 ANeuralNetworksMemory* memory;
730 EXPECT_EQ(ANeuralNetworksMemory_createFromAHardwareBuffer(buffer, &memory),
731 ANEURALNETWORKS_NO_ERROR);
732
733 // This should fail, because offset is larger than memorySize.
734 EXPECT_EQ(ANeuralNetworksModel_setOperandValueFromMemory(mModel, 0, memory, memorySize + 1,
735 sizeof(float)),
736 ANEURALNETWORKS_BAD_DATA);
737
738 // This should fail, because requested size is larger than the memory.
739 EXPECT_EQ(ANeuralNetworksModel_setOperandValueFromMemory(mModel, 0, memory, memorySize - 3,
740 sizeof(float)),
741 ANEURALNETWORKS_BAD_DATA);
742
743 // close memory
744 ANeuralNetworksMemory_free(memory);
745 AHardwareBuffer_release(buffer);
746 }
747 #endif // __ANDROID__
748
TEST_F(ValidationTestModel,SetOperandValueFromModel)749 TEST_F(ValidationTestModel, SetOperandValueFromModel) {
750 uint32_t dimensions[] = {2};
751 ANeuralNetworksOperandType tensorType = {
752 .type = ANEURALNETWORKS_TENSOR_FLOAT32,
753 .dimensionCount = std::size(dimensions),
754 .dimensions = dimensions,
755 };
756 ANeuralNetworksOperandType scalarType = {.type = ANEURALNETWORKS_INT32};
757 ANeuralNetworksOperandType modelType = {.type = ANEURALNETWORKS_MODEL};
758
759 ANeuralNetworksModel* valueModel = nullptr;
760 ASSERT_EQ(ANeuralNetworksModel_create(&valueModel), ANEURALNETWORKS_NO_ERROR);
761 ASSERT_EQ(ANeuralNetworksModel_addOperand(valueModel, &tensorType), ANEURALNETWORKS_NO_ERROR);
762 ASSERT_EQ(ANeuralNetworksModel_addOperand(valueModel, &tensorType), ANEURALNETWORKS_NO_ERROR);
763 ASSERT_EQ(ANeuralNetworksModel_addOperand(valueModel, &scalarType), ANEURALNETWORKS_NO_ERROR);
764 ASSERT_EQ(ANeuralNetworksModel_addOperand(valueModel, &tensorType), ANEURALNETWORKS_NO_ERROR);
765 uint32_t inList[3] = {0, 1, 2};
766 uint32_t outList[1] = {3};
767 ASSERT_EQ(ANeuralNetworksModel_addOperation(valueModel, ANEURALNETWORKS_ADD, 3, inList, 1,
768 outList),
769 ANEURALNETWORKS_NO_ERROR);
770 ASSERT_EQ(ANeuralNetworksModel_identifyInputsAndOutputs(valueModel, 3, inList, 1, outList),
771 ANEURALNETWORKS_NO_ERROR);
772
773 EXPECT_EQ(ANeuralNetworksModel_addOperand(mModel, &modelType), ANEURALNETWORKS_NO_ERROR);
774
775 // This should fail, as the value model is not finished.
776 EXPECT_EQ(ANeuralNetworksModel_setOperandValueFromModel(mModel, 0, valueModel),
777 ANEURALNETWORKS_BAD_STATE);
778 ANeuralNetworksModel_finish(valueModel);
779
780 EXPECT_EQ(ANeuralNetworksModel_setOperandValueFromModel(nullptr, 0, valueModel),
781 ANEURALNETWORKS_UNEXPECTED_NULL);
782 EXPECT_EQ(ANeuralNetworksModel_setOperandValueFromModel(mModel, 0, nullptr),
783 ANEURALNETWORKS_UNEXPECTED_NULL);
784
785 // This should fail, because the operand does not exist.
786 EXPECT_EQ(ANeuralNetworksModel_setOperandValueFromModel(mModel, -1, valueModel),
787 ANEURALNETWORKS_BAD_DATA);
788
789 // This should fail, as this operand does not exist.
790 EXPECT_EQ(ANeuralNetworksModel_setOperandValueFromModel(mModel, 1, valueModel),
791 ANEURALNETWORKS_BAD_DATA);
792
793 modelFinish();
794 // This should fail, as the model is already finished.
795 EXPECT_EQ(ANeuralNetworksModel_setOperandValueFromModel(mModel, 0, valueModel),
796 ANEURALNETWORKS_BAD_STATE);
797
798 ANeuralNetworksModel_free(valueModel);
799 }
800
TEST_F(ValidationTestModel,AddOEMOperand)801 TEST_F(ValidationTestModel, AddOEMOperand) {
802 ANeuralNetworksOperandType OEMScalarType{
803 .type = ANEURALNETWORKS_OEM_SCALAR, .dimensionCount = 0, .dimensions = nullptr};
804 EXPECT_EQ(ANeuralNetworksModel_addOperand(mModel, &OEMScalarType), ANEURALNETWORKS_NO_ERROR);
805 char buffer[20];
806 EXPECT_EQ(ANeuralNetworksModel_setOperandValue(mModel, 0, buffer, sizeof(buffer)),
807 ANEURALNETWORKS_NO_ERROR);
808
809 const size_t kByteSizeOfOEMTensor = 4;
810 uint32_t dimensions[]{kByteSizeOfOEMTensor};
811 ANeuralNetworksOperandType OEMTensorType{
812 .type = ANEURALNETWORKS_TENSOR_OEM_BYTE, .dimensionCount = 1, .dimensions = dimensions};
813 EXPECT_EQ(ANeuralNetworksModel_addOperand(mModel, &OEMTensorType), ANEURALNETWORKS_NO_ERROR);
814 EXPECT_EQ(ANeuralNetworksModel_setOperandValue(mModel, 1, buffer, kByteSizeOfOEMTensor),
815 ANEURALNETWORKS_NO_ERROR);
816
817 modelFinish();
818 // This should fail, as the model is already finished.
819 EXPECT_EQ(ANeuralNetworksModel_addOperand(mModel, &OEMTensorType), ANEURALNETWORKS_BAD_STATE);
820 }
821
TEST_F(ValidationTestModel,AddOperation)822 TEST_F(ValidationTestModel, AddOperation) {
823 uint32_t input = 0;
824 uint32_t output = 0;
825 EXPECT_EQ(ANeuralNetworksModel_addOperation(nullptr, ANEURALNETWORKS_AVERAGE_POOL_2D, 1, &input,
826 1, &output),
827 ANEURALNETWORKS_UNEXPECTED_NULL);
828 EXPECT_EQ(ANeuralNetworksModel_addOperation(mModel, ANEURALNETWORKS_AVERAGE_POOL_2D, 0, nullptr,
829 1, &output),
830 ANEURALNETWORKS_UNEXPECTED_NULL);
831 EXPECT_EQ(ANeuralNetworksModel_addOperation(mModel, ANEURALNETWORKS_AVERAGE_POOL_2D, 1, &input,
832 0, nullptr),
833 ANEURALNETWORKS_UNEXPECTED_NULL);
834
835 ANeuralNetworksOperationType invalidOp = -1;
836 EXPECT_EQ(addOperation(invalidOp, {input}, {output}), ANEURALNETWORKS_BAD_DATA);
837
838 modelFinish();
839 // This should fail, as the model is already finished.
840 EXPECT_EQ(addOperation(ANEURALNETWORKS_AVERAGE_POOL_2D, {input}, {output}),
841 ANEURALNETWORKS_BAD_STATE);
842 }
843
TEST_F(ValidationTestModel,IdentifyInputsAndOutputs)844 TEST_F(ValidationTestModel, IdentifyInputsAndOutputs) {
845 uint32_t input = 0;
846 uint32_t output = 0;
847 EXPECT_EQ(ANeuralNetworksModel_identifyInputsAndOutputs(nullptr, 1, &input, 1, &output),
848 ANEURALNETWORKS_UNEXPECTED_NULL);
849 EXPECT_EQ(ANeuralNetworksModel_identifyInputsAndOutputs(mModel, 0, nullptr, 1, &output),
850 ANEURALNETWORKS_UNEXPECTED_NULL);
851 EXPECT_EQ(ANeuralNetworksModel_identifyInputsAndOutputs(mModel, 1, &input, 0, nullptr),
852 ANEURALNETWORKS_UNEXPECTED_NULL);
853
854 createModel();
855 // This should fail, as the model is already finished.
856 EXPECT_EQ(identifyInputsAndOutputs({input}, {output}), ANEURALNETWORKS_BAD_STATE);
857 }
858
TEST_F(ValidationTestModel,RelaxComputationFloat32toFloat16)859 TEST_F(ValidationTestModel, RelaxComputationFloat32toFloat16) {
860 EXPECT_EQ(ANeuralNetworksModel_relaxComputationFloat32toFloat16(nullptr, true),
861 ANEURALNETWORKS_UNEXPECTED_NULL);
862
863 createModel();
864 // This should fail, as the model is already finished.
865 EXPECT_EQ(ANeuralNetworksModel_relaxComputationFloat32toFloat16(mModel, true),
866 ANEURALNETWORKS_BAD_STATE);
867 EXPECT_EQ(ANeuralNetworksModel_relaxComputationFloat32toFloat16(mModel, false),
868 ANEURALNETWORKS_BAD_STATE);
869 }
870
TEST_F(ValidationTestModel,Finish)871 TEST_F(ValidationTestModel, Finish) {
872 EXPECT_EQ(ANeuralNetworksModel_finish(nullptr), ANEURALNETWORKS_UNEXPECTED_NULL);
873 createModel();
874 EXPECT_EQ(modelFinish(), ANEURALNETWORKS_BAD_STATE);
875 }
876
TEST_F(ValidationTestModel,EmptyModel)877 TEST_F(ValidationTestModel, EmptyModel) {
878 // An empty model is invalid
879 EXPECT_EQ(modelFinish(), ANEURALNETWORKS_BAD_DATA);
880 }
881
TEST_F(ValidationTestModel,CreateCompilation)882 TEST_F(ValidationTestModel, CreateCompilation) {
883 ANeuralNetworksCompilation* compilation = nullptr;
884 EXPECT_EQ(ANeuralNetworksCompilation_create(nullptr, &compilation),
885 ANEURALNETWORKS_UNEXPECTED_NULL);
886 EXPECT_EQ(ANeuralNetworksCompilation_create(mModel, nullptr), ANEURALNETWORKS_UNEXPECTED_NULL);
887 EXPECT_EQ(ANeuralNetworksCompilation_create(mModel, &compilation), ANEURALNETWORKS_BAD_STATE);
888 }
889
TEST_F(ValidationTestModel,CreateCompilationForDevices)890 TEST_F(ValidationTestModel, CreateCompilationForDevices) {
891 createModel();
892 uint32_t numDevices = 0;
893 EXPECT_EQ(ANeuralNetworks_getDeviceCount(&numDevices), ANEURALNETWORKS_NO_ERROR);
894
895 if (numDevices > 0) {
896 ANeuralNetworksDevice* device;
897 EXPECT_EQ(ANeuralNetworks_getDevice(0, &device), ANEURALNETWORKS_NO_ERROR);
898 ANeuralNetworksCompilation* compilation = nullptr;
899 EXPECT_EQ(ANeuralNetworksCompilation_createForDevices(nullptr, &device, 1, &compilation),
900 ANEURALNETWORKS_UNEXPECTED_NULL);
901 EXPECT_EQ(ANeuralNetworksCompilation_createForDevices(mModel, &device, 1, nullptr),
902 ANEURALNETWORKS_UNEXPECTED_NULL);
903
904 // empty device list
905 EXPECT_EQ(ANeuralNetworksCompilation_createForDevices(mModel, &device, 0, &compilation),
906 ANEURALNETWORKS_BAD_DATA);
907
908 // duplicate devices in the list.
909 ANeuralNetworksDevice* invalidDevices[2] = {device, device};
910 EXPECT_EQ(ANeuralNetworksCompilation_createForDevices(mModel, invalidDevices, 2,
911 &compilation),
912 ANEURALNETWORKS_BAD_DATA);
913 // nullptr in the list.
914 invalidDevices[1] = nullptr;
915 EXPECT_EQ(ANeuralNetworksCompilation_createForDevices(mModel, invalidDevices, 2,
916 &compilation),
917 ANEURALNETWORKS_UNEXPECTED_NULL);
918 }
919
920 ANeuralNetworksCompilation* compilation = nullptr;
921 EXPECT_EQ(ANeuralNetworksCompilation_createForDevices(nullptr, nullptr, 1, &compilation),
922 ANEURALNETWORKS_UNEXPECTED_NULL);
923 EXPECT_EQ(ANeuralNetworksCompilation_createForDevices(mModel, nullptr, 1, nullptr),
924 ANEURALNETWORKS_UNEXPECTED_NULL);
925 EXPECT_EQ(ANeuralNetworksCompilation_createForDevices(mModel, nullptr, 1, &compilation),
926 ANEURALNETWORKS_UNEXPECTED_NULL);
927 }
928
TEST_F(ValidationTestModel,GetSupportedOperationsForDevices)929 TEST_F(ValidationTestModel, GetSupportedOperationsForDevices) {
930 createModel();
931 uint32_t numDevices = 0;
932 EXPECT_EQ(ANeuralNetworks_getDeviceCount(&numDevices), ANEURALNETWORKS_NO_ERROR);
933
934 bool supportedOps[20];
935 ASSERT_LE(mNumOperations, sizeof(supportedOps) / sizeof(supportedOps[0]));
936 if (numDevices > 0) {
937 ANeuralNetworksDevice* device;
938 EXPECT_EQ(ANeuralNetworks_getDevice(0, &device), ANEURALNETWORKS_NO_ERROR);
939 EXPECT_EQ(ANeuralNetworksModel_getSupportedOperationsForDevices(nullptr, &device, 1,
940 supportedOps),
941 ANEURALNETWORKS_UNEXPECTED_NULL);
942 EXPECT_EQ(
943 ANeuralNetworksModel_getSupportedOperationsForDevices(mModel, &device, 1, nullptr),
944 ANEURALNETWORKS_UNEXPECTED_NULL);
945
946 // empty device list
947 EXPECT_EQ(ANeuralNetworksModel_getSupportedOperationsForDevices(mModel, &device, 0,
948 supportedOps),
949 ANEURALNETWORKS_BAD_DATA);
950
951 // duplicate devices in the list.
952 ANeuralNetworksDevice* invalidDevices[2] = {device, device};
953 EXPECT_EQ(ANeuralNetworksModel_getSupportedOperationsForDevices(mModel, invalidDevices, 2,
954 supportedOps),
955 ANEURALNETWORKS_BAD_DATA);
956 // nullptr in the list.
957 invalidDevices[1] = nullptr;
958 EXPECT_EQ(ANeuralNetworksModel_getSupportedOperationsForDevices(mModel, invalidDevices, 2,
959 supportedOps),
960 ANEURALNETWORKS_UNEXPECTED_NULL);
961 }
962
963 EXPECT_EQ(ANeuralNetworksModel_getSupportedOperationsForDevices(nullptr, nullptr, 1,
964 supportedOps),
965 ANEURALNETWORKS_UNEXPECTED_NULL);
966 EXPECT_EQ(ANeuralNetworksModel_getSupportedOperationsForDevices(mModel, nullptr, 1, nullptr),
967 ANEURALNETWORKS_UNEXPECTED_NULL);
968 EXPECT_EQ(
969 ANeuralNetworksModel_getSupportedOperationsForDevices(mModel, nullptr, 1, supportedOps),
970 ANEURALNETWORKS_UNEXPECTED_NULL);
971 }
972
TEST_F(ValidationTestModel,Cycle)973 TEST_F(ValidationTestModel, Cycle) {
974 uint32_t dimensions[]{1};
975 ANeuralNetworksOperandType tensorType{
976 .type = ANEURALNETWORKS_TENSOR_FLOAT32, .dimensionCount = 1, .dimensions = dimensions};
977 ANeuralNetworksOperandType scalarType{
978 .type = ANEURALNETWORKS_INT32, .dimensionCount = 0, .dimensions = nullptr};
979
980 // opnd0 = model input TENSOR_FLOAT32
981 // opnd1 = model input TENSOR_FLOAT32
982 // opnd2 = model input INT32
983 // opnd3 = ADD(opnd0, opnd4, opnd2)
984 // opnd4 = ADD(opnd1, opnd3, opnd2)
985 // opnd5 = ADD(opnd4, opnd0, opnd2) // model output
986 //
987 // +-----+
988 // | |
989 // v |
990 // 3 = ADD(0, 4, 2) |
991 // | |
992 // +----------+ |
993 // | |
994 // v |
995 // 4 = ADD(1, 3, 2) |
996 // | |
997 // +----------------+
998 // |
999 // |
1000 // +-------+
1001 // |
1002 // v
1003 // 5 = ADD(4, 0, 2)
1004
1005 ASSERT_EQ(ANeuralNetworksModel_addOperand(mModel, &tensorType), ANEURALNETWORKS_NO_ERROR);
1006 ASSERT_EQ(ANeuralNetworksModel_addOperand(mModel, &tensorType), ANEURALNETWORKS_NO_ERROR);
1007 ASSERT_EQ(ANeuralNetworksModel_addOperand(mModel, &scalarType), ANEURALNETWORKS_NO_ERROR);
1008 ASSERT_EQ(ANeuralNetworksModel_addOperand(mModel, &tensorType), ANEURALNETWORKS_NO_ERROR);
1009 ASSERT_EQ(ANeuralNetworksModel_addOperand(mModel, &tensorType), ANEURALNETWORKS_NO_ERROR);
1010 ASSERT_EQ(ANeuralNetworksModel_addOperand(mModel, &tensorType), ANEURALNETWORKS_NO_ERROR);
1011
1012 ASSERT_EQ(addOperation(ANEURALNETWORKS_ADD, {0, 4, 2}, {3}), ANEURALNETWORKS_NO_ERROR);
1013 ASSERT_EQ(addOperation(ANEURALNETWORKS_ADD, {1, 3, 2}, {4}), ANEURALNETWORKS_NO_ERROR);
1014 ASSERT_EQ(addOperation(ANEURALNETWORKS_ADD, {4, 0, 2}, {5}), ANEURALNETWORKS_NO_ERROR);
1015
1016 ASSERT_EQ(identifyInputsAndOutputs({0, 1, 2}, {5}), ANEURALNETWORKS_NO_ERROR);
1017 ASSERT_EQ(modelFinish(), ANEURALNETWORKS_BAD_DATA);
1018 }
1019
TEST_F(ValidationTestModel,AcyclicReadBeforeWrite)1020 TEST_F(ValidationTestModel, AcyclicReadBeforeWrite) {
1021 uint32_t dimensions[]{1};
1022 ANeuralNetworksOperandType tensorType{
1023 .type = ANEURALNETWORKS_TENSOR_FLOAT32, .dimensionCount = 1, .dimensions = dimensions};
1024
1025 // opnd0 = TENSOR_FLOAT32 // model input
1026 // opnd1 = LOGISTIC(opnd2) // model output
1027 // opnd2 = LOGISTIC(opnd0)
1028 ASSERT_EQ(ANeuralNetworksModel_addOperand(mModel, &tensorType), ANEURALNETWORKS_NO_ERROR);
1029 ASSERT_EQ(ANeuralNetworksModel_addOperand(mModel, &tensorType), ANEURALNETWORKS_NO_ERROR);
1030 ASSERT_EQ(ANeuralNetworksModel_addOperand(mModel, &tensorType), ANEURALNETWORKS_NO_ERROR);
1031
1032 ASSERT_EQ(addOperation(ANEURALNETWORKS_LOGISTIC, {2}, {1}), ANEURALNETWORKS_NO_ERROR);
1033 ASSERT_EQ(addOperation(ANEURALNETWORKS_LOGISTIC, {0}, {2}), ANEURALNETWORKS_NO_ERROR);
1034 ASSERT_EQ(identifyInputsAndOutputs({0}, {1}), ANEURALNETWORKS_NO_ERROR);
1035
1036 // This should succeed, because NN API doesn't require that operations be sorted.
1037 ASSERT_EQ(modelFinish(), ANEURALNETWORKS_NO_ERROR);
1038 }
1039
TEST_F(ValidationTestModel,MissingWrite)1040 TEST_F(ValidationTestModel, MissingWrite) {
1041 uint32_t dimensions[]{1};
1042 ANeuralNetworksOperandType tensorType{
1043 .type = ANEURALNETWORKS_TENSOR_FLOAT32, .dimensionCount = 1, .dimensions = dimensions};
1044
1045 // opnd0 = TENSOR_FLOAT32 // model input
1046 // opnd1 = TENSOR_FLOAT32 // never written
1047 // opnd2 = LOGISTIC(opnd1) // model output
1048 // opnd3 = LOGISTIC(opnd0) // model output
1049 ASSERT_EQ(ANeuralNetworksModel_addOperand(mModel, &tensorType), ANEURALNETWORKS_NO_ERROR);
1050 ASSERT_EQ(ANeuralNetworksModel_addOperand(mModel, &tensorType), ANEURALNETWORKS_NO_ERROR);
1051 ASSERT_EQ(ANeuralNetworksModel_addOperand(mModel, &tensorType), ANEURALNETWORKS_NO_ERROR);
1052 ASSERT_EQ(ANeuralNetworksModel_addOperand(mModel, &tensorType), ANEURALNETWORKS_NO_ERROR);
1053
1054 ASSERT_EQ(addOperation(ANEURALNETWORKS_LOGISTIC, {1}, {2}), ANEURALNETWORKS_NO_ERROR);
1055 ASSERT_EQ(addOperation(ANEURALNETWORKS_LOGISTIC, {0}, {3}), ANEURALNETWORKS_NO_ERROR);
1056 ASSERT_EQ(identifyInputsAndOutputs({0}, {2, 3}), ANEURALNETWORKS_NO_ERROR);
1057
1058 ASSERT_EQ(modelFinish(), ANEURALNETWORKS_BAD_DATA);
1059 }
1060
TEST_F(ValidationTestModel,UnwrittenOperand)1061 TEST_F(ValidationTestModel, UnwrittenOperand) {
1062 uint32_t dimensions[]{1};
1063 ANeuralNetworksOperandType tensorType{
1064 .type = ANEURALNETWORKS_TENSOR_FLOAT32, .dimensionCount = 1, .dimensions = dimensions};
1065
1066 // opnd0 = TENSOR_FLOAT32 // model input
1067 // opnd1 = TENSOR_FLOAT32 // never written
1068 // opnd2 = LOGISTIC(opnd0) // model output
1069 ASSERT_EQ(ANeuralNetworksModel_addOperand(mModel, &tensorType), ANEURALNETWORKS_NO_ERROR);
1070 ASSERT_EQ(ANeuralNetworksModel_addOperand(mModel, &tensorType), ANEURALNETWORKS_NO_ERROR);
1071 ASSERT_EQ(ANeuralNetworksModel_addOperand(mModel, &tensorType), ANEURALNETWORKS_NO_ERROR);
1072
1073 ASSERT_EQ(addOperation(ANEURALNETWORKS_LOGISTIC, {0}, {2}), ANEURALNETWORKS_NO_ERROR);
1074 ASSERT_EQ(identifyInputsAndOutputs({0}, {2}), ANEURALNETWORKS_NO_ERROR);
1075
1076 ASSERT_EQ(modelFinish(), ANEURALNETWORKS_BAD_DATA);
1077 }
1078
TEST_F(ValidationTestModel,MultipleWrite)1079 TEST_F(ValidationTestModel, MultipleWrite) {
1080 uint32_t dimensions[]{1};
1081 ANeuralNetworksOperandType tensorType{
1082 .type = ANEURALNETWORKS_TENSOR_FLOAT32, .dimensionCount = 1, .dimensions = dimensions};
1083 ANeuralNetworksOperandType scalarType{
1084 .type = ANEURALNETWORKS_INT32, .dimensionCount = 0, .dimensions = nullptr};
1085
1086 // opnd0 = TENSOR_FLOAT32 // model input
1087 // opnd1 = INT32 // model input
1088 // opnd2 = ADD(opnd0, opnd0, opnd1) // model output; do this twice
1089 ASSERT_EQ(ANeuralNetworksModel_addOperand(mModel, &tensorType), ANEURALNETWORKS_NO_ERROR);
1090 ASSERT_EQ(ANeuralNetworksModel_addOperand(mModel, &scalarType), ANEURALNETWORKS_NO_ERROR);
1091 ASSERT_EQ(ANeuralNetworksModel_addOperand(mModel, &tensorType), ANEURALNETWORKS_NO_ERROR);
1092
1093 for (int i = 0; i < 2; ++i) {
1094 SCOPED_TRACE(i);
1095 ASSERT_EQ(addOperation(ANEURALNETWORKS_ADD, {0, 0, 1}, {2}), ANEURALNETWORKS_NO_ERROR);
1096 }
1097
1098 ASSERT_EQ(identifyInputsAndOutputs({0, 1}, {2}), ANEURALNETWORKS_NO_ERROR);
1099 ASSERT_EQ(modelFinish(), ANEURALNETWORKS_BAD_DATA);
1100 }
1101
TEST_F(ValidationTestIdentify,Ok)1102 TEST_F(ValidationTestIdentify, Ok) {
1103 ASSERT_EQ(identifyInputsAndOutputs({0, 1, 2}, {3}), ANEURALNETWORKS_NO_ERROR);
1104 ASSERT_EQ(modelFinish(), ANEURALNETWORKS_NO_ERROR);
1105 }
1106
TEST_F(ValidationTestIdentify,InputIsOutput)1107 TEST_F(ValidationTestIdentify, InputIsOutput) {
1108 ASSERT_EQ(identifyInputsAndOutputs({0, 1, 2}, {3, 0}), ANEURALNETWORKS_BAD_DATA);
1109 }
1110
TEST_F(ValidationTestIdentify,OutputIsInput)1111 TEST_F(ValidationTestIdentify, OutputIsInput) {
1112 ASSERT_EQ(identifyInputsAndOutputs({0, 1, 2, 3}, {3}), ANEURALNETWORKS_BAD_DATA);
1113 }
1114
TEST_F(ValidationTestIdentify,DuplicateInputs)1115 TEST_F(ValidationTestIdentify, DuplicateInputs) {
1116 ASSERT_EQ(identifyInputsAndOutputs({0, 1, 2, 0}, {3}), ANEURALNETWORKS_BAD_DATA);
1117 }
1118
TEST_F(ValidationTestIdentify,DuplicateOutputs)1119 TEST_F(ValidationTestIdentify, DuplicateOutputs) {
1120 ASSERT_EQ(identifyInputsAndOutputs({0, 1, 2}, {3, 3}), ANEURALNETWORKS_BAD_DATA);
1121 }
1122
1123 // Also see TEST_F(ValidationTestCompilationForDevices_1, SetPreference)
TEST_F(ValidationTestCompilation,SetPreference)1124 TEST_F(ValidationTestCompilation, SetPreference) {
1125 EXPECT_EQ(ANeuralNetworksCompilation_setPreference(nullptr, ANEURALNETWORKS_PREFER_LOW_POWER),
1126 ANEURALNETWORKS_UNEXPECTED_NULL);
1127
1128 EXPECT_EQ(ANeuralNetworksCompilation_setPreference(mCompilation, 40), ANEURALNETWORKS_BAD_DATA);
1129 }
1130
1131 // Also see TEST_F(ValidationTestCompilationForDevices_1, SetCaching)
TEST_F(ValidationTestCompilation,SetCaching)1132 TEST_F(ValidationTestCompilation, SetCaching) {
1133 std::vector<uint8_t> token(ANEURALNETWORKS_BYTE_SIZE_OF_CACHE_TOKEN, 0);
1134 EXPECT_EQ(ANeuralNetworksCompilation_setCaching(nullptr, NN_TMP_DIR, token.data()),
1135 ANEURALNETWORKS_UNEXPECTED_NULL);
1136 EXPECT_EQ(ANeuralNetworksCompilation_setCaching(mCompilation, nullptr, token.data()),
1137 ANEURALNETWORKS_UNEXPECTED_NULL);
1138 EXPECT_EQ(ANeuralNetworksCompilation_setCaching(mCompilation, NN_TMP_DIR, nullptr),
1139 ANEURALNETWORKS_UNEXPECTED_NULL);
1140 }
1141
TEST_F(ValidationTestCompilation,SetPriority)1142 TEST_F(ValidationTestCompilation, SetPriority) {
1143 EXPECT_EQ(ANeuralNetworksCompilation_setPriority(nullptr, ANEURALNETWORKS_PRIORITY_DEFAULT),
1144 ANEURALNETWORKS_UNEXPECTED_NULL);
1145
1146 // Test invalid values of priority.
1147 constexpr int kInvalidPriorities[] = {0,
1148 ANEURALNETWORKS_PRIORITY_LOW - 1,
1149 ANEURALNETWORKS_PRIORITY_LOW + 1,
1150 ANEURALNETWORKS_PRIORITY_MEDIUM - 1,
1151 ANEURALNETWORKS_PRIORITY_MEDIUM + 1,
1152 ANEURALNETWORKS_PRIORITY_HIGH - 1,
1153 ANEURALNETWORKS_PRIORITY_HIGH + 1};
1154 for (int invalidPriority : kInvalidPriorities) {
1155 EXPECT_EQ(ANeuralNetworksCompilation_setPriority(mCompilation, invalidPriority),
1156 ANEURALNETWORKS_BAD_DATA);
1157 }
1158 }
1159
1160 // Also see TEST_F(ValidationTestCompilationForDevices_1, SetTimeout)
1161 // Also see TEST_F(ValidationTestCompilationForDevices_2, SetTimeout)
TEST_F(ValidationTestCompilation,SetTimeout)1162 TEST_F(ValidationTestCompilation, SetTimeout) {
1163 EXPECT_EQ(ANeuralNetworksCompilation_setTimeout(nullptr, kShortWaitInNanoseconds),
1164 ANEURALNETWORKS_UNEXPECTED_NULL);
1165 // Timeout can only be set on Compilations created from CompilationForDevices with one device
1166 // specified.
1167 EXPECT_EQ(ANeuralNetworksCompilation_setTimeout(mCompilation, kShortWaitInNanoseconds),
1168 ANEURALNETWORKS_BAD_DATA);
1169 }
1170
TEST_F(ValidationTestCompilation,GetPreferredMemoryAlignmentAndPadding)1171 TEST_F(ValidationTestCompilation, GetPreferredMemoryAlignmentAndPadding) {
1172 if (__builtin_available(android __NNAPI_FL5_MIN_ANDROID_API__, *)) {
1173 uint32_t result;
1174
1175 // The following calls should fail, because the compilation has not been finished.
1176 EXPECT_EQ(ANeuralNetworksCompilation_getPreferredMemoryAlignmentForInput(mCompilation, 0,
1177 &result),
1178 ANEURALNETWORKS_BAD_STATE);
1179 EXPECT_EQ(ANeuralNetworksCompilation_getPreferredMemoryPaddingForInput(mCompilation, 0,
1180 &result),
1181 ANEURALNETWORKS_BAD_STATE);
1182 EXPECT_EQ(ANeuralNetworksCompilation_getPreferredMemoryAlignmentForOutput(mCompilation, 0,
1183 &result),
1184 ANEURALNETWORKS_BAD_STATE);
1185 EXPECT_EQ(ANeuralNetworksCompilation_getPreferredMemoryPaddingForOutput(mCompilation, 0,
1186 &result),
1187 ANEURALNETWORKS_BAD_STATE);
1188
1189 EXPECT_EQ(ANeuralNetworksCompilation_finish(mCompilation), ANEURALNETWORKS_NO_ERROR);
1190
1191 // The following calls should fail because of unexpected nullptr.
1192 EXPECT_EQ(
1193 ANeuralNetworksCompilation_getPreferredMemoryAlignmentForInput(nullptr, 0, &result),
1194 ANEURALNETWORKS_UNEXPECTED_NULL);
1195 EXPECT_EQ(ANeuralNetworksCompilation_getPreferredMemoryAlignmentForInput(mCompilation, 0,
1196 nullptr),
1197 ANEURALNETWORKS_UNEXPECTED_NULL);
1198 EXPECT_EQ(ANeuralNetworksCompilation_getPreferredMemoryPaddingForInput(nullptr, 0, &result),
1199 ANEURALNETWORKS_UNEXPECTED_NULL);
1200 EXPECT_EQ(ANeuralNetworksCompilation_getPreferredMemoryPaddingForInput(mCompilation, 0,
1201 nullptr),
1202 ANEURALNETWORKS_UNEXPECTED_NULL);
1203 EXPECT_EQ(ANeuralNetworksCompilation_getPreferredMemoryAlignmentForOutput(nullptr, 0,
1204 &result),
1205 ANEURALNETWORKS_UNEXPECTED_NULL);
1206 EXPECT_EQ(ANeuralNetworksCompilation_getPreferredMemoryAlignmentForOutput(mCompilation, 0,
1207 nullptr),
1208 ANEURALNETWORKS_UNEXPECTED_NULL);
1209 EXPECT_EQ(
1210 ANeuralNetworksCompilation_getPreferredMemoryPaddingForOutput(nullptr, 0, &result),
1211 ANEURALNETWORKS_UNEXPECTED_NULL);
1212 EXPECT_EQ(ANeuralNetworksCompilation_getPreferredMemoryPaddingForOutput(mCompilation, 0,
1213 nullptr),
1214 ANEURALNETWORKS_UNEXPECTED_NULL);
1215
1216 // The following calls should fail, because the index is out of range.
1217 const uint32_t invalidIndex = 1000;
1218 EXPECT_EQ(ANeuralNetworksCompilation_getPreferredMemoryAlignmentForInput(
1219 mCompilation, invalidIndex, &result),
1220 ANEURALNETWORKS_BAD_DATA);
1221 EXPECT_EQ(ANeuralNetworksCompilation_getPreferredMemoryPaddingForInput(
1222 mCompilation, invalidIndex, &result),
1223 ANEURALNETWORKS_BAD_DATA);
1224 EXPECT_EQ(ANeuralNetworksCompilation_getPreferredMemoryAlignmentForOutput(
1225 mCompilation, invalidIndex, &result),
1226 ANEURALNETWORKS_BAD_DATA);
1227 EXPECT_EQ(ANeuralNetworksCompilation_getPreferredMemoryPaddingForOutput(
1228 mCompilation, invalidIndex, &result),
1229 ANEURALNETWORKS_BAD_DATA);
1230
1231 } else {
1232 GTEST_SKIP();
1233 }
1234 }
1235
1236 // Also see TEST_F(ValidationTestCompilationForDevices_1, CreateExecution)
TEST_F(ValidationTestCompilation,CreateExecution)1237 TEST_F(ValidationTestCompilation, CreateExecution) {
1238 ANeuralNetworksExecution* execution = nullptr;
1239 EXPECT_EQ(ANeuralNetworksExecution_create(nullptr, &execution),
1240 ANEURALNETWORKS_UNEXPECTED_NULL);
1241 EXPECT_EQ(ANeuralNetworksExecution_create(mCompilation, nullptr),
1242 ANEURALNETWORKS_UNEXPECTED_NULL);
1243 EXPECT_EQ(ANeuralNetworksExecution_create(mCompilation, &execution), ANEURALNETWORKS_BAD_STATE);
1244 }
1245
1246 // Also see TEST_F(ValidationTestCompilationForDevices_1, Finish)
TEST_F(ValidationTestCompilation,Finish)1247 TEST_F(ValidationTestCompilation, Finish) {
1248 EXPECT_EQ(ANeuralNetworksCompilation_finish(nullptr), ANEURALNETWORKS_UNEXPECTED_NULL);
1249 EXPECT_EQ(ANeuralNetworksCompilation_finish(mCompilation), ANEURALNETWORKS_NO_ERROR);
1250 EXPECT_EQ(ANeuralNetworksCompilation_setPreference(mCompilation,
1251 ANEURALNETWORKS_PREFER_FAST_SINGLE_ANSWER),
1252 ANEURALNETWORKS_BAD_STATE);
1253 EXPECT_EQ(
1254 ANeuralNetworksCompilation_setPriority(mCompilation, ANEURALNETWORKS_PRIORITY_DEFAULT),
1255 ANEURALNETWORKS_BAD_STATE);
1256 EXPECT_EQ(ANeuralNetworksCompilation_setTimeout(mCompilation, kShortWaitInNanoseconds),
1257 ANEURALNETWORKS_BAD_STATE);
1258 std::vector<uint8_t> token(ANEURALNETWORKS_BYTE_SIZE_OF_CACHE_TOKEN, 0);
1259 EXPECT_EQ(ANeuralNetworksCompilation_setCaching(mCompilation, NN_TMP_DIR, token.data()),
1260 ANEURALNETWORKS_BAD_STATE);
1261 EXPECT_EQ(ANeuralNetworksCompilation_finish(mCompilation), ANEURALNETWORKS_BAD_STATE);
1262 }
1263
1264 // Also see TEST_F(ValidationTestCompilationForDevices_1, ExecutionSetTimeout)
1265 // Also see TEST_F(ValidationTestCompilationForDevices_2, ExecutionSetTimeout)
TEST_F(ValidationTestCompilation,ExecutionSetTimeout)1266 TEST_F(ValidationTestCompilation, ExecutionSetTimeout) {
1267 EXPECT_EQ(ANeuralNetworksExecution_setTimeout(nullptr, kShortWaitInNanoseconds),
1268 ANEURALNETWORKS_UNEXPECTED_NULL);
1269
1270 ASSERT_EQ(ANeuralNetworksCompilation_finish(mCompilation), ANEURALNETWORKS_NO_ERROR);
1271 ANeuralNetworksExecution* execution;
1272 ASSERT_EQ(ANeuralNetworksExecution_create(mCompilation, &execution), ANEURALNETWORKS_NO_ERROR);
1273 // Timeout can only be set on Compilations created from CompilationForDevices with one device
1274 // specified.
1275 EXPECT_EQ(ANeuralNetworksExecution_setTimeout(execution, kShortWaitInNanoseconds),
1276 ANEURALNETWORKS_BAD_DATA);
1277 ANeuralNetworksExecution_free(execution);
1278 }
1279
1280 // Also see TEST_F(ValidationTestCompilationForDevices_1, ExecutionTiming)
1281 // Also see TEST_F(ValidationTestCompilationForDevices_2, ExecutionTiming)
TEST_F(ValidationTestCompilation,ExecutionTiming)1282 TEST_F(ValidationTestCompilation, ExecutionTiming) {
1283 ASSERT_EQ(ANeuralNetworksCompilation_finish(mCompilation), ANEURALNETWORKS_NO_ERROR);
1284 ANeuralNetworksExecution* execution;
1285 ASSERT_EQ(ANeuralNetworksExecution_create(mCompilation, &execution), ANEURALNETWORKS_NO_ERROR);
1286 // Cannot setMeasureTiming() with Compilation rather than CompilationForDevices.
1287 EXPECT_EQ(ANeuralNetworksExecution_setMeasureTiming(execution, false),
1288 ANEURALNETWORKS_BAD_DATA);
1289 EXPECT_EQ(ANeuralNetworksExecution_setMeasureTiming(execution, true), ANEURALNETWORKS_BAD_DATA);
1290
1291 // close memory
1292 ANeuralNetworksExecution_free(execution);
1293 }
1294
1295 // Also see TEST_F(ValidationTestCompilationForDevices_1, ExecutionTiming)
TEST_F(ValidationTestCompilation,ExecutionUsability)1296 TEST_F(ValidationTestCompilation, ExecutionUsability) {
1297 ASSERT_EQ(ANeuralNetworksCompilation_finish(mCompilation), ANEURALNETWORKS_NO_ERROR);
1298
1299 enum class ExecutionType : uint32_t { ASYNC, SYNC, BURST, FENCED };
1300 for (auto executionType :
1301 {ExecutionType::ASYNC, ExecutionType::SYNC, ExecutionType::BURST, ExecutionType::FENCED}) {
1302 for (bool explicitlyDisableReusablility : {false, true}) {
1303 SCOPED_TRACE(static_cast<uint32_t>(executionType));
1304 SCOPED_TRACE(explicitlyDisableReusablility);
1305
1306 ANeuralNetworksExecution* execution;
1307 ASSERT_EQ(ANeuralNetworksExecution_create(mCompilation, &execution),
1308 ANEURALNETWORKS_NO_ERROR);
1309
1310 if (explicitlyDisableReusablility) {
1311 if (__builtin_available(android __NNAPI_FL5_MIN_ANDROID_API__, *)) {
1312 ASSERT_EQ(ANeuralNetworksExecution_setReusable(execution, false),
1313 ANEURALNETWORKS_NO_ERROR);
1314 } else {
1315 ANeuralNetworksExecution_free(execution);
1316 continue;
1317 }
1318 }
1319
1320 // Set inputs and outputs.
1321 float in0[] = {0.0f, 0.0f}, in1[] = {1.0f, 1.0f}, out0[2];
1322 int in2 = 0;
1323 ASSERT_EQ(ANeuralNetworksExecution_setInput(execution, 0, nullptr, &in0, sizeof(in0)),
1324 ANEURALNETWORKS_NO_ERROR);
1325 ASSERT_EQ(ANeuralNetworksExecution_setInput(execution, 1, nullptr, &in1, sizeof(in1)),
1326 ANEURALNETWORKS_NO_ERROR);
1327 ASSERT_EQ(ANeuralNetworksExecution_setInput(execution, 2, nullptr, &in2, sizeof(in2)),
1328 ANEURALNETWORKS_NO_ERROR);
1329 ASSERT_EQ(
1330 ANeuralNetworksExecution_setOutput(execution, 0, nullptr, &out0, sizeof(out0)),
1331 ANEURALNETWORKS_NO_ERROR);
1332
1333 const size_t memorySize = std::max(sizeof(in0), sizeof(out0));
1334 #ifdef __ANDROID__
1335 int memoryFd = ASharedMemory_create("nnMemory", memorySize);
1336 #else // __ANDROID__
1337 TemporaryFile tmpFile;
1338 int memoryFd = tmpFile.release();
1339 CHECK_EQ(ftruncate(memoryFd, memorySize), 0);
1340 #endif // __ANDROID__
1341 ASSERT_GT(memoryFd, 0);
1342 ANeuralNetworksMemory* memory;
1343 EXPECT_EQ(ANeuralNetworksMemory_createFromFd(memorySize, PROT_READ | PROT_WRITE,
1344 memoryFd, 0, &memory),
1345 ANEURALNETWORKS_NO_ERROR);
1346
1347 auto testTooLate = [this, execution, &in0, &out0, memory] {
1348 // Try a bunch of things that are impermissible if the execution has started.
1349
1350 // Set loop timeout.
1351 ASSERT_EQ(
1352 ANeuralNetworksExecution_setLoopTimeout(execution, kShortWaitInNanoseconds),
1353 ANEURALNETWORKS_BAD_STATE);
1354
1355 // Enable/Disable input and output padding.
1356 if (__builtin_available(android __NNAPI_FL5_MIN_ANDROID_API__, *)) {
1357 ASSERT_EQ(ANeuralNetworksExecution_enableInputAndOutputPadding(execution, true),
1358 ANEURALNETWORKS_BAD_STATE);
1359 ASSERT_EQ(
1360 ANeuralNetworksExecution_enableInputAndOutputPadding(execution, false),
1361 ANEURALNETWORKS_BAD_STATE);
1362 }
1363
1364 // Set inputs and outputs.
1365 ASSERT_EQ(
1366 ANeuralNetworksExecution_setInput(execution, 0, nullptr, &in0, sizeof(in0)),
1367 ANEURALNETWORKS_BAD_STATE);
1368 ASSERT_EQ(ANeuralNetworksExecution_setOutput(execution, 0, nullptr, &out0,
1369 sizeof(out0)),
1370 ANEURALNETWORKS_BAD_STATE);
1371 ASSERT_EQ(ANeuralNetworksExecution_setInputFromMemory(execution, 0, nullptr, memory,
1372 0, sizeof(in0)),
1373 ANEURALNETWORKS_BAD_STATE);
1374 ASSERT_EQ(ANeuralNetworksExecution_setOutputFromMemory(execution, 0, nullptr,
1375 memory, 0, sizeof(out0)),
1376 ANEURALNETWORKS_BAD_STATE);
1377
1378 // Set reusable.
1379 if (__builtin_available(android __NNAPI_FL5_MIN_ANDROID_API__, *)) {
1380 ASSERT_EQ(ANeuralNetworksExecution_setReusable(execution, true),
1381 ANEURALNETWORKS_BAD_STATE);
1382 ASSERT_EQ(ANeuralNetworksExecution_setReusable(execution, false),
1383 ANEURALNETWORKS_BAD_STATE);
1384 }
1385
1386 // Reuse for asynchronous execution.
1387 {
1388 ANeuralNetworksEvent* event;
1389 ASSERT_EQ(ANeuralNetworksExecution_startCompute(execution, &event),
1390 ANEURALNETWORKS_BAD_STATE);
1391 }
1392
1393 // Reuse for synchronous execution.
1394 ASSERT_EQ(ANeuralNetworksExecution_compute(execution), ANEURALNETWORKS_BAD_STATE);
1395
1396 // Reuse for burst execution.
1397 {
1398 ANeuralNetworksBurst* burst;
1399 ASSERT_EQ(ANeuralNetworksBurst_create(mCompilation, &burst),
1400 ANEURALNETWORKS_NO_ERROR);
1401 ASSERT_EQ(ANeuralNetworksExecution_burstCompute(execution, burst),
1402 ANEURALNETWORKS_BAD_STATE);
1403 ANeuralNetworksBurst_free(burst);
1404 }
1405
1406 // Reuse for fenced execution.
1407 {
1408 ANeuralNetworksEvent* event;
1409 ASSERT_EQ(ANeuralNetworksExecution_startComputeWithDependencies(
1410 execution, nullptr, 0, 0, &event),
1411 ANEURALNETWORKS_BAD_STATE);
1412 }
1413 };
1414
1415 // Compute.
1416 switch (executionType) {
1417 case ExecutionType::ASYNC: {
1418 ANeuralNetworksEvent* event;
1419 ASSERT_EQ(ANeuralNetworksExecution_startCompute(execution, &event),
1420 ANEURALNETWORKS_NO_ERROR);
1421 testTooLate();
1422 ASSERT_EQ(ANeuralNetworksEvent_wait(event), ANEURALNETWORKS_NO_ERROR);
1423 testTooLate();
1424 ANeuralNetworksEvent_free(event);
1425 break;
1426 }
1427 case ExecutionType::SYNC: {
1428 ASSERT_EQ(ANeuralNetworksExecution_compute(execution),
1429 ANEURALNETWORKS_NO_ERROR);
1430 testTooLate();
1431 break;
1432 }
1433 case ExecutionType::BURST: {
1434 ANeuralNetworksBurst* burst;
1435 ASSERT_EQ(ANeuralNetworksBurst_create(mCompilation, &burst),
1436 ANEURALNETWORKS_NO_ERROR);
1437 ASSERT_EQ(ANeuralNetworksExecution_burstCompute(execution, burst),
1438 ANEURALNETWORKS_NO_ERROR);
1439 testTooLate();
1440 ANeuralNetworksBurst_free(burst);
1441 break;
1442 }
1443 case ExecutionType::FENCED: {
1444 ANeuralNetworksEvent* event;
1445 ASSERT_EQ(ANeuralNetworksExecution_startComputeWithDependencies(
1446 execution, nullptr, 0, 0, &event),
1447 ANEURALNETWORKS_NO_ERROR);
1448 testTooLate();
1449 ASSERT_EQ(ANeuralNetworksEvent_wait(event), ANEURALNETWORKS_NO_ERROR);
1450 testTooLate();
1451 ANeuralNetworksEvent_free(event);
1452 break;
1453 }
1454 default:
1455 FAIL() << "Unreachable";
1456 }
1457
1458 // close memory
1459 ANeuralNetworksExecution_free(execution);
1460 ANeuralNetworksMemory_free(memory);
1461 close(memoryFd);
1462 }
1463 }
1464 }
1465
testConcurrentExecution(bool reusable,ANeuralNetworksCompilation * compilation)1466 static void testConcurrentExecution(bool reusable, ANeuralNetworksCompilation* compilation) {
1467 ASSERT_EQ(ANeuralNetworksCompilation_finish(compilation), ANEURALNETWORKS_NO_ERROR);
1468
1469 enum class ExecutionType : uint32_t { ASYNC, SYNC, BURST, FENCED };
1470 const auto compute = [compilation](ExecutionType executionType,
1471 ANeuralNetworksExecution* execution) -> int {
1472 switch (executionType) {
1473 case ExecutionType::ASYNC: {
1474 ANeuralNetworksEvent* event;
1475 int result = ANeuralNetworksExecution_startCompute(execution, &event);
1476 if (result == ANEURALNETWORKS_NO_ERROR) {
1477 result = ANeuralNetworksEvent_wait(event);
1478 }
1479 ANeuralNetworksEvent_free(event);
1480 return result;
1481 }
1482 case ExecutionType::SYNC: {
1483 return ANeuralNetworksExecution_compute(execution);
1484 }
1485 case ExecutionType::BURST: {
1486 ANeuralNetworksBurst* burst;
1487 int result = ANeuralNetworksBurst_create(compilation, &burst);
1488 if (result == ANEURALNETWORKS_NO_ERROR) {
1489 result = ANeuralNetworksExecution_burstCompute(execution, burst);
1490 }
1491 ANeuralNetworksBurst_free(burst);
1492 return result;
1493 }
1494 case ExecutionType::FENCED: {
1495 ANeuralNetworksEvent* event;
1496 int result = ANeuralNetworksExecution_startComputeWithDependencies(
1497 execution, nullptr, 0, 0, &event);
1498 if (result == ANEURALNETWORKS_NO_ERROR) {
1499 result = ANeuralNetworksEvent_wait(event);
1500 }
1501 ANeuralNetworksEvent_free(event);
1502 return result;
1503 }
1504 }
1505 };
1506
1507 const std::vector<ExecutionType> kExecutionTypes = {
1508 ExecutionType::ASYNC, ExecutionType::SYNC, ExecutionType::BURST, ExecutionType::FENCED};
1509 for (auto executionType1 : kExecutionTypes) {
1510 for (auto executionType2 : kExecutionTypes) {
1511 SCOPED_TRACE(static_cast<uint32_t>(executionType1));
1512 SCOPED_TRACE(static_cast<uint32_t>(executionType2));
1513
1514 ANeuralNetworksExecution* execution;
1515 ASSERT_EQ(ANeuralNetworksExecution_create(compilation, &execution),
1516 ANEURALNETWORKS_NO_ERROR);
1517
1518 float in0[] = {0.0f, 0.0f}, in1[] = {1.0f, 1.0f}, out0[2];
1519 int in2 = 0;
1520 ASSERT_EQ(ANeuralNetworksExecution_setInput(execution, 0, nullptr, &in0, sizeof(in0)),
1521 ANEURALNETWORKS_NO_ERROR);
1522 ASSERT_EQ(ANeuralNetworksExecution_setInput(execution, 1, nullptr, &in1, sizeof(in1)),
1523 ANEURALNETWORKS_NO_ERROR);
1524 ASSERT_EQ(ANeuralNetworksExecution_setInput(execution, 2, nullptr, &in2, sizeof(in2)),
1525 ANEURALNETWORKS_NO_ERROR);
1526 ASSERT_EQ(
1527 ANeuralNetworksExecution_setOutput(execution, 0, nullptr, &out0, sizeof(out0)),
1528 ANEURALNETWORKS_NO_ERROR);
1529 if (__builtin_available(android __NNAPI_FL5_MIN_ANDROID_API__, *)) {
1530 ASSERT_EQ(ANeuralNetworksExecution_setReusable(execution, reusable),
1531 ANEURALNETWORKS_NO_ERROR);
1532 } else {
1533 if (reusable) {
1534 ANeuralNetworksExecution_free(execution);
1535 return;
1536 }
1537 }
1538
1539 // Compute on the same execution concurrently.
1540 auto first = std::async(std::launch::async, [compute, executionType1, execution] {
1541 return compute(executionType1, execution);
1542 });
1543 auto second = std::async(std::launch::async, [compute, executionType2, execution] {
1544 return compute(executionType2, execution);
1545 });
1546 const int result1 = first.get();
1547 const int result2 = second.get();
1548
1549 // At least one result must be ANEURALNETWORKS_NO_ERROR. One may return
1550 // ANEURALNETWORKS_BAD_STATE if the other is already executing.
1551 EXPECT_TRUE(result1 == ANEURALNETWORKS_BAD_STATE ||
1552 result1 == ANEURALNETWORKS_NO_ERROR);
1553 EXPECT_TRUE(result2 == ANEURALNETWORKS_BAD_STATE ||
1554 result2 == ANEURALNETWORKS_NO_ERROR);
1555 EXPECT_TRUE(result1 == ANEURALNETWORKS_NO_ERROR || result2 == ANEURALNETWORKS_NO_ERROR);
1556
1557 // If the execution is not reusable, one result must be ANEURALNETWORKS_BAD_STATE.
1558 if (!reusable) {
1559 EXPECT_TRUE(result1 == ANEURALNETWORKS_BAD_STATE ||
1560 result2 == ANEURALNETWORKS_BAD_STATE);
1561 }
1562
1563 ANeuralNetworksExecution_free(execution);
1564 }
1565 }
1566 }
1567
1568 // Also see TEST_F(ValidationTestBurst, BurstComputeConcurrent)
TEST_F(ValidationTestCompilation,ReusableExecutionConcurrent)1569 TEST_F(ValidationTestCompilation, ReusableExecutionConcurrent) {
1570 testConcurrentExecution(/*reusable=*/true, mCompilation);
1571 }
TEST_F(ValidationTestCompilation,NonReusableExecutionConcurrent)1572 TEST_F(ValidationTestCompilation, NonReusableExecutionConcurrent) {
1573 testConcurrentExecution(/*reusable=*/false, mCompilation);
1574 }
1575
TEST_F(ValidationTestExecution,SetLoopTimeout)1576 TEST_F(ValidationTestExecution, SetLoopTimeout) {
1577 EXPECT_EQ(ANeuralNetworksExecution_setLoopTimeout(nullptr, kShortWaitInNanoseconds),
1578 ANEURALNETWORKS_UNEXPECTED_NULL);
1579 }
1580
TEST_F(ValidationTestExecution,EnableInputAndOutputPadding)1581 TEST_F(ValidationTestExecution, EnableInputAndOutputPadding) {
1582 if (__builtin_available(android __NNAPI_FL5_MIN_ANDROID_API__, *)) {
1583 EXPECT_EQ(ANeuralNetworksExecution_enableInputAndOutputPadding(nullptr, true),
1584 ANEURALNETWORKS_UNEXPECTED_NULL);
1585 EXPECT_EQ(ANeuralNetworksExecution_enableInputAndOutputPadding(nullptr, false),
1586 ANEURALNETWORKS_UNEXPECTED_NULL);
1587 } else {
1588 GTEST_SKIP();
1589 }
1590 }
1591
TEST_F(ValidationTestExecution,ExecutionSetReusable)1592 TEST_F(ValidationTestExecution, ExecutionSetReusable) {
1593 if (__builtin_available(android __NNAPI_FL5_MIN_ANDROID_API__, *)) {
1594 EXPECT_EQ(ANeuralNetworksExecution_setReusable(nullptr, true),
1595 ANEURALNETWORKS_UNEXPECTED_NULL);
1596 EXPECT_EQ(ANeuralNetworksExecution_setReusable(nullptr, false),
1597 ANEURALNETWORKS_UNEXPECTED_NULL);
1598 } else {
1599 GTEST_SKIP();
1600 }
1601 }
1602
TEST_F(ValidationTestExecution,SetInput)1603 TEST_F(ValidationTestExecution, SetInput) {
1604 char buffer[20];
1605 EXPECT_EQ(ANeuralNetworksExecution_setInput(nullptr, 0, nullptr, buffer, sizeof(float)),
1606 ANEURALNETWORKS_UNEXPECTED_NULL);
1607 EXPECT_EQ(ANeuralNetworksExecution_setInput(mExecution, 0, nullptr, nullptr, sizeof(float)),
1608 ANEURALNETWORKS_UNEXPECTED_NULL);
1609
1610 // This should fail, because memory is not the size of a float32.
1611 EXPECT_EQ(ANeuralNetworksExecution_setInput(mExecution, 0, nullptr, buffer, 20),
1612 ANEURALNETWORKS_BAD_DATA);
1613
1614 // This should fail, as this operand does not exist.
1615 EXPECT_EQ(ANeuralNetworksExecution_setInput(mExecution, 999, nullptr, buffer, sizeof(float)),
1616 ANEURALNETWORKS_BAD_DATA);
1617
1618 // This should fail, as this operand does not exist.
1619 EXPECT_EQ(ANeuralNetworksExecution_setInput(mExecution, -1, nullptr, buffer, sizeof(float)),
1620 ANEURALNETWORKS_BAD_DATA);
1621
1622 // These should fail, because the tensor types are invalid.
1623 EXPECT_EQ(ANeuralNetworksExecution_setInput(mExecution, 0, &kInvalidTensorType1, buffer,
1624 sizeof(float)),
1625 ANEURALNETWORKS_BAD_DATA);
1626 EXPECT_EQ(ANeuralNetworksExecution_setInput(mExecution, 0, &kInvalidTensorType2, buffer,
1627 sizeof(float)),
1628 ANEURALNETWORKS_BAD_DATA);
1629
1630 // Cannot do this twice.
1631 EXPECT_EQ(ANeuralNetworksExecution_setInput(mExecution, 0, nullptr, buffer, 8),
1632 ANEURALNETWORKS_NO_ERROR);
1633 EXPECT_EQ(ANeuralNetworksExecution_setInput(mExecution, 0, nullptr, buffer, 8),
1634 ANEURALNETWORKS_BAD_STATE);
1635 }
1636
TEST_F(ValidationTestExecution,SetInputEnablePadding)1637 TEST_F(ValidationTestExecution, SetInputEnablePadding) {
1638 if (__builtin_available(android __NNAPI_FL5_MIN_ANDROID_API__, *)) {
1639 EXPECT_EQ(ANeuralNetworksExecution_enableInputAndOutputPadding(mExecution, true),
1640 ANEURALNETWORKS_NO_ERROR);
1641
1642 // This should fail, because length is less than the size of a float32.
1643 char buffer[20];
1644 EXPECT_EQ(ANeuralNetworksExecution_setInput(mExecution, 0, nullptr, buffer,
1645 sizeof(float) - 1),
1646 ANEURALNETWORKS_BAD_DATA);
1647 } else {
1648 GTEST_SKIP();
1649 }
1650 }
1651
TEST_F(ValidationTestExecution,SetOutput)1652 TEST_F(ValidationTestExecution, SetOutput) {
1653 char buffer[20];
1654 EXPECT_EQ(ANeuralNetworksExecution_setOutput(nullptr, 0, nullptr, buffer, sizeof(float)),
1655 ANEURALNETWORKS_UNEXPECTED_NULL);
1656 EXPECT_EQ(ANeuralNetworksExecution_setOutput(mExecution, 0, nullptr, nullptr, sizeof(float)),
1657 ANEURALNETWORKS_UNEXPECTED_NULL);
1658
1659 // This should fail, because memory is not the size of a float32.
1660 EXPECT_EQ(ANeuralNetworksExecution_setOutput(mExecution, 0, nullptr, buffer, 20),
1661 ANEURALNETWORKS_BAD_DATA);
1662
1663 // This should fail, as this operand does not exist.
1664 EXPECT_EQ(ANeuralNetworksExecution_setOutput(mExecution, 999, nullptr, buffer, sizeof(float)),
1665 ANEURALNETWORKS_BAD_DATA);
1666
1667 // This should fail, as this operand does not exist.
1668 EXPECT_EQ(ANeuralNetworksExecution_setOutput(mExecution, -1, nullptr, buffer, sizeof(float)),
1669 ANEURALNETWORKS_BAD_DATA);
1670
1671 // These should fail, because the tensor types are invalid.
1672 EXPECT_EQ(ANeuralNetworksExecution_setOutput(mExecution, 0, &kInvalidTensorType1, buffer,
1673 sizeof(float)),
1674 ANEURALNETWORKS_BAD_DATA);
1675 EXPECT_EQ(ANeuralNetworksExecution_setOutput(mExecution, 0, &kInvalidTensorType2, buffer,
1676 sizeof(float)),
1677 ANEURALNETWORKS_BAD_DATA);
1678
1679 // Cannot do this twice.
1680 EXPECT_EQ(ANeuralNetworksExecution_setOutput(mExecution, 0, nullptr, buffer, 8),
1681 ANEURALNETWORKS_NO_ERROR);
1682 EXPECT_EQ(ANeuralNetworksExecution_setOutput(mExecution, 0, nullptr, buffer, 8),
1683 ANEURALNETWORKS_BAD_STATE);
1684 }
1685
TEST_F(ValidationTestExecution,SetOutputEnablePadding)1686 TEST_F(ValidationTestExecution, SetOutputEnablePadding) {
1687 if (__builtin_available(android __NNAPI_FL5_MIN_ANDROID_API__, *)) {
1688 EXPECT_EQ(ANeuralNetworksExecution_enableInputAndOutputPadding(mExecution, true),
1689 ANEURALNETWORKS_NO_ERROR);
1690
1691 // This should fail, because length is less than the size of a float32.
1692 char buffer[20];
1693 EXPECT_EQ(ANeuralNetworksExecution_setOutput(mExecution, 0, nullptr, buffer,
1694 sizeof(float) - 1),
1695 ANEURALNETWORKS_BAD_DATA);
1696 } else {
1697 GTEST_SKIP();
1698 }
1699 }
1700
TEST_F(ValidationTestExecution,SetInputFromMemory)1701 TEST_F(ValidationTestExecution, SetInputFromMemory) {
1702 const size_t memorySize = 20;
1703 #ifdef __ANDROID__
1704 int memoryFd = ASharedMemory_create("nnMemory", memorySize);
1705 #else // __ANDROID__
1706 TemporaryFile tmpFile;
1707 int memoryFd = tmpFile.release();
1708 CHECK_EQ(ftruncate(memoryFd, memorySize), 0);
1709 #endif // __ANDROID__
1710 ASSERT_GT(memoryFd, 0);
1711
1712 ANeuralNetworksMemory* memory;
1713 EXPECT_EQ(ANeuralNetworksMemory_createFromFd(memorySize, PROT_READ | PROT_WRITE, memoryFd, 0,
1714 &memory),
1715 ANEURALNETWORKS_NO_ERROR);
1716
1717 EXPECT_EQ(ANeuralNetworksExecution_setInputFromMemory(nullptr, 0, nullptr, memory, 0,
1718 sizeof(float)),
1719 ANEURALNETWORKS_UNEXPECTED_NULL);
1720 EXPECT_EQ(ANeuralNetworksExecution_setInputFromMemory(mExecution, 0, nullptr, nullptr, 0,
1721 sizeof(float)),
1722 ANEURALNETWORKS_UNEXPECTED_NULL);
1723
1724 // This should fail, because the operand does not exist.
1725 EXPECT_EQ(ANeuralNetworksExecution_setInputFromMemory(mExecution, 999, nullptr, memory, 0,
1726 sizeof(float)),
1727 ANEURALNETWORKS_BAD_DATA);
1728
1729 // This should fail, because the operand does not exist.
1730 EXPECT_EQ(ANeuralNetworksExecution_setInputFromMemory(mExecution, -1, nullptr, memory, 0,
1731 sizeof(float)),
1732 ANEURALNETWORKS_BAD_DATA);
1733
1734 // This should fail, because memory is not the size of a float32.
1735 EXPECT_EQ(ANeuralNetworksExecution_setInputFromMemory(mExecution, 0, nullptr, memory, 0,
1736 memorySize),
1737 ANEURALNETWORKS_BAD_DATA);
1738
1739 // This should fail, because offset is larger than memorySize.
1740 EXPECT_EQ(ANeuralNetworksExecution_setInputFromMemory(mExecution, 0, nullptr, memory,
1741 memorySize + 1, sizeof(float)),
1742 ANEURALNETWORKS_BAD_DATA);
1743
1744 // This should fail, because requested size is larger than the memory.
1745 EXPECT_EQ(ANeuralNetworksExecution_setInputFromMemory(mExecution, 0, nullptr, memory,
1746 memorySize - 3, sizeof(float)),
1747 ANEURALNETWORKS_BAD_DATA);
1748
1749 // These should fail, because the tensor types are invalid.
1750 EXPECT_EQ(ANeuralNetworksExecution_setInputFromMemory(mExecution, 0, &kInvalidTensorType1,
1751 memory, 0, sizeof(float)),
1752 ANEURALNETWORKS_BAD_DATA);
1753 EXPECT_EQ(ANeuralNetworksExecution_setInputFromMemory(mExecution, 0, &kInvalidTensorType2,
1754 memory, 0, sizeof(float)),
1755 ANEURALNETWORKS_BAD_DATA);
1756
1757 // Cannot do this twice.
1758 EXPECT_EQ(ANeuralNetworksExecution_setInputFromMemory(mExecution, 0, nullptr, memory, 0, 8),
1759 ANEURALNETWORKS_NO_ERROR);
1760 EXPECT_EQ(ANeuralNetworksExecution_setInputFromMemory(mExecution, 0, nullptr, memory, 0, 8),
1761 ANEURALNETWORKS_BAD_STATE);
1762 char buffer[memorySize];
1763 EXPECT_EQ(ANeuralNetworksExecution_setInput(mExecution, 0, nullptr, buffer, 8),
1764 ANEURALNETWORKS_BAD_STATE);
1765
1766 // close memory
1767 ANeuralNetworksMemory_free(memory);
1768 close(memoryFd);
1769 }
1770
TEST_F(ValidationTestExecution,SetInputFromMemoryEnablePadding)1771 TEST_F(ValidationTestExecution, SetInputFromMemoryEnablePadding) {
1772 if (__builtin_available(android __NNAPI_FL5_MIN_ANDROID_API__, *)) {
1773 const size_t memorySize = 20;
1774 #ifdef __ANDROID__
1775 int memoryFd = ASharedMemory_create("nnMemory", memorySize);
1776 #else // __ANDROID__
1777 TemporaryFile tmpFile;
1778 int memoryFd = tmpFile.release();
1779 CHECK_EQ(ftruncate(memoryFd, memorySize), 0);
1780 #endif // __ANDROID__
1781 ASSERT_GT(memoryFd, 0);
1782
1783 ANeuralNetworksMemory* memory;
1784 EXPECT_EQ(ANeuralNetworksMemory_createFromFd(memorySize, PROT_READ | PROT_WRITE, memoryFd,
1785 0, &memory),
1786 ANEURALNETWORKS_NO_ERROR);
1787
1788 EXPECT_EQ(ANeuralNetworksExecution_enableInputAndOutputPadding(mExecution, true),
1789 ANEURALNETWORKS_NO_ERROR);
1790
1791 // This should fail, because length is less than the size of a float32.
1792 EXPECT_EQ(ANeuralNetworksExecution_setInputFromMemory(mExecution, 0, nullptr, memory, 0,
1793 sizeof(float) - 1),
1794 ANEURALNETWORKS_BAD_DATA);
1795
1796 // close memory
1797 ANeuralNetworksMemory_free(memory);
1798 close(memoryFd);
1799 } else {
1800 GTEST_SKIP();
1801 }
1802 }
1803
1804 #ifdef __ANDROID__
TEST_F(ValidationTestExecution,SetInputFromAHardwareBufferBlob)1805 TEST_F(ValidationTestExecution, SetInputFromAHardwareBufferBlob) {
1806 const size_t memorySize = 20;
1807
1808 AHardwareBuffer_Desc desc{
1809 .width = memorySize,
1810 .height = 1,
1811 .layers = 1,
1812 .format = AHARDWAREBUFFER_FORMAT_BLOB,
1813 .usage = AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN | AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN,
1814 };
1815
1816 AHardwareBuffer* buffer = nullptr;
1817 ASSERT_EQ(AHardwareBuffer_allocate(&desc, &buffer), 0);
1818
1819 ANeuralNetworksMemory* memory;
1820 EXPECT_EQ(ANeuralNetworksMemory_createFromAHardwareBuffer(buffer, &memory),
1821 ANEURALNETWORKS_NO_ERROR);
1822
1823 // This should fail, because memory is not the size of a float32.
1824 EXPECT_EQ(ANeuralNetworksExecution_setInputFromMemory(mExecution, 0, nullptr, memory, 0,
1825 memorySize),
1826 ANEURALNETWORKS_BAD_DATA);
1827
1828 // This should fail, because offset is larger than memorySize.
1829 EXPECT_EQ(ANeuralNetworksExecution_setInputFromMemory(mExecution, 0, nullptr, memory,
1830 memorySize + 1, sizeof(float)),
1831 ANEURALNETWORKS_BAD_DATA);
1832 // This should fail, because requested size is larger than the memory.
1833 EXPECT_EQ(ANeuralNetworksExecution_setInputFromMemory(mExecution, 0, nullptr, memory,
1834 memorySize - 3, sizeof(float)),
1835 ANEURALNETWORKS_BAD_DATA);
1836
1837 // These should fail, because the tensor types are invalid.
1838 EXPECT_EQ(ANeuralNetworksExecution_setInputFromMemory(mExecution, 0, &kInvalidTensorType1,
1839 memory, 0, sizeof(float)),
1840 ANEURALNETWORKS_BAD_DATA);
1841 EXPECT_EQ(ANeuralNetworksExecution_setInputFromMemory(mExecution, 0, &kInvalidTensorType2,
1842 memory, 0, sizeof(float)),
1843 ANEURALNETWORKS_BAD_DATA);
1844
1845 // close memory
1846 ANeuralNetworksMemory_free(memory);
1847 AHardwareBuffer_release(buffer);
1848 }
1849
TEST_F(ValidationTestExecution,SetInputFromAHardwareBufferBlobEnablePadding)1850 TEST_F(ValidationTestExecution, SetInputFromAHardwareBufferBlobEnablePadding) {
1851 if (__builtin_available(android __NNAPI_FL5_MIN_ANDROID_API__, *)) {
1852 const size_t memorySize = 20;
1853
1854 AHardwareBuffer_Desc desc{
1855 .width = memorySize,
1856 .height = 1,
1857 .layers = 1,
1858 .format = AHARDWAREBUFFER_FORMAT_BLOB,
1859 .usage = AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN |
1860 AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN,
1861 };
1862
1863 AHardwareBuffer* buffer = nullptr;
1864 ASSERT_EQ(AHardwareBuffer_allocate(&desc, &buffer), 0);
1865
1866 ANeuralNetworksMemory* memory;
1867 EXPECT_EQ(ANeuralNetworksMemory_createFromAHardwareBuffer(buffer, &memory),
1868 ANEURALNETWORKS_NO_ERROR);
1869
1870 EXPECT_EQ(ANeuralNetworksExecution_enableInputAndOutputPadding(mExecution, true),
1871 ANEURALNETWORKS_NO_ERROR);
1872
1873 // This should fail, because length is less than the size of a float32.
1874 EXPECT_EQ(ANeuralNetworksExecution_setInputFromMemory(mExecution, 0, nullptr, memory, 0,
1875 sizeof(float) - 1),
1876 ANEURALNETWORKS_BAD_DATA);
1877
1878 // close memory
1879 ANeuralNetworksMemory_free(memory);
1880 AHardwareBuffer_release(buffer);
1881 } else {
1882 GTEST_SKIP();
1883 }
1884 }
1885 #endif // __ANDROID__
1886
TEST_F(ValidationTestExecution,SetOutputFromMemory)1887 TEST_F(ValidationTestExecution, SetOutputFromMemory) {
1888 ANeuralNetworksExecution* execution;
1889 EXPECT_EQ(ANeuralNetworksExecution_create(mCompilation, &execution), ANEURALNETWORKS_NO_ERROR);
1890
1891 const size_t memorySize = 20;
1892 #ifdef __ANDROID__
1893 int memoryFd = ASharedMemory_create("nnMemory", memorySize);
1894 #else // __ANDROID__
1895 TemporaryFile tmpFile;
1896 int memoryFd = tmpFile.release();
1897 CHECK_EQ(ftruncate(memoryFd, memorySize), 0);
1898 #endif // __ANDROID__
1899 ASSERT_GT(memoryFd, 0);
1900
1901 ANeuralNetworksMemory* memory;
1902 EXPECT_EQ(ANeuralNetworksMemory_createFromFd(memorySize, PROT_READ | PROT_WRITE, memoryFd, 0,
1903 &memory),
1904 ANEURALNETWORKS_NO_ERROR);
1905
1906 EXPECT_EQ(ANeuralNetworksExecution_setOutputFromMemory(nullptr, 0, nullptr, memory, 0,
1907 sizeof(float)),
1908 ANEURALNETWORKS_UNEXPECTED_NULL);
1909 EXPECT_EQ(ANeuralNetworksExecution_setOutputFromMemory(execution, 0, nullptr, nullptr, 0,
1910 sizeof(float)),
1911 ANEURALNETWORKS_UNEXPECTED_NULL);
1912
1913 // This should fail, because the operand does not exist.
1914 EXPECT_EQ(ANeuralNetworksExecution_setOutputFromMemory(execution, 999, nullptr, memory, 0,
1915 sizeof(float)),
1916 ANEURALNETWORKS_BAD_DATA);
1917
1918 // This should fail, because the operand does not exist.
1919 EXPECT_EQ(ANeuralNetworksExecution_setOutputFromMemory(execution, -1, nullptr, memory, 0,
1920 sizeof(float)),
1921 ANEURALNETWORKS_BAD_DATA);
1922
1923 // This should fail, because memory is not the size of a float32.
1924 EXPECT_EQ(ANeuralNetworksExecution_setOutputFromMemory(execution, 0, nullptr, memory, 0,
1925 memorySize),
1926 ANEURALNETWORKS_BAD_DATA);
1927
1928 // This should fail, because offset is larger than memorySize.
1929 EXPECT_EQ(ANeuralNetworksExecution_setOutputFromMemory(execution, 0, nullptr, memory,
1930 memorySize + 1, sizeof(float)),
1931 ANEURALNETWORKS_BAD_DATA);
1932
1933 // This should fail, because requested size is larger than the memory.
1934 EXPECT_EQ(ANeuralNetworksExecution_setOutputFromMemory(execution, 0, nullptr, memory,
1935 memorySize - 3, sizeof(float)),
1936 ANEURALNETWORKS_BAD_DATA);
1937
1938 // These should fail, because the tensor types are invalid.
1939 EXPECT_EQ(ANeuralNetworksExecution_setOutputFromMemory(execution, 0, &kInvalidTensorType1,
1940 memory, 0, sizeof(float)),
1941 ANEURALNETWORKS_BAD_DATA);
1942 EXPECT_EQ(ANeuralNetworksExecution_setOutputFromMemory(execution, 0, &kInvalidTensorType2,
1943 memory, 0, sizeof(float)),
1944 ANEURALNETWORKS_BAD_DATA);
1945
1946 // Cannot do this twice.
1947 EXPECT_EQ(ANeuralNetworksExecution_setOutputFromMemory(execution, 0, nullptr, memory, 0, 8),
1948 ANEURALNETWORKS_NO_ERROR);
1949 EXPECT_EQ(ANeuralNetworksExecution_setOutputFromMemory(execution, 0, nullptr, memory, 0, 8),
1950 ANEURALNETWORKS_BAD_STATE);
1951 char buffer[memorySize];
1952 EXPECT_EQ(ANeuralNetworksExecution_setOutput(execution, 0, nullptr, buffer, 8),
1953 ANEURALNETWORKS_BAD_STATE);
1954
1955 // close memory
1956 ANeuralNetworksMemory_free(memory);
1957 ANeuralNetworksExecution_free(execution);
1958 close(memoryFd);
1959 }
1960
TEST_F(ValidationTestExecution,SetOutputFromMemoryEnablePadding)1961 TEST_F(ValidationTestExecution, SetOutputFromMemoryEnablePadding) {
1962 if (__builtin_available(android __NNAPI_FL5_MIN_ANDROID_API__, *)) {
1963 ANeuralNetworksExecution* execution;
1964 EXPECT_EQ(ANeuralNetworksExecution_create(mCompilation, &execution),
1965 ANEURALNETWORKS_NO_ERROR);
1966
1967 const size_t memorySize = 20;
1968 #ifdef __ANDROID__
1969 int memoryFd = ASharedMemory_create("nnMemory", memorySize);
1970 #else // __ANDROID__
1971 TemporaryFile tmpFile;
1972 int memoryFd = tmpFile.release();
1973 CHECK_EQ(ftruncate(memoryFd, memorySize), 0);
1974 #endif // __ANDROID__
1975 ASSERT_GT(memoryFd, 0);
1976
1977 ANeuralNetworksMemory* memory;
1978 EXPECT_EQ(ANeuralNetworksMemory_createFromFd(memorySize, PROT_READ | PROT_WRITE, memoryFd,
1979 0, &memory),
1980 ANEURALNETWORKS_NO_ERROR);
1981
1982 EXPECT_EQ(ANeuralNetworksExecution_enableInputAndOutputPadding(mExecution, true),
1983 ANEURALNETWORKS_NO_ERROR);
1984
1985 // This should fail, because length is less than the size of a float32.
1986 EXPECT_EQ(ANeuralNetworksExecution_setOutputFromMemory(execution, 0, nullptr, memory, 0,
1987 sizeof(float) - 1),
1988 ANEURALNETWORKS_BAD_DATA);
1989
1990 // close memory
1991 ANeuralNetworksMemory_free(memory);
1992 ANeuralNetworksExecution_free(execution);
1993 close(memoryFd);
1994 } else {
1995 GTEST_SKIP();
1996 }
1997 }
1998
1999 #ifdef __ANDROID__
TEST_F(ValidationTestExecution,SetOutputFromAHardwareBufferBlob)2000 TEST_F(ValidationTestExecution, SetOutputFromAHardwareBufferBlob) {
2001 const size_t memorySize = 20;
2002
2003 AHardwareBuffer_Desc desc{
2004 .width = memorySize,
2005 .height = 1,
2006 .layers = 1,
2007 .format = AHARDWAREBUFFER_FORMAT_BLOB,
2008 .usage = AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN | AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN,
2009 };
2010
2011 AHardwareBuffer* buffer = nullptr;
2012 ASSERT_EQ(AHardwareBuffer_allocate(&desc, &buffer), 0);
2013
2014 ANeuralNetworksMemory* memory;
2015 EXPECT_EQ(ANeuralNetworksMemory_createFromAHardwareBuffer(buffer, &memory),
2016 ANEURALNETWORKS_NO_ERROR);
2017
2018 // This should fail, because memory is not the size of a float32.
2019 EXPECT_EQ(ANeuralNetworksExecution_setOutputFromMemory(mExecution, 0, nullptr, memory, 0,
2020 memorySize),
2021 ANEURALNETWORKS_BAD_DATA);
2022
2023 // This should fail, because offset is larger than memorySize.
2024 EXPECT_EQ(ANeuralNetworksExecution_setOutputFromMemory(mExecution, 0, nullptr, memory,
2025 memorySize + 1, sizeof(float)),
2026 ANEURALNETWORKS_BAD_DATA);
2027
2028 // This should fail, because requested size is larger than the memory.
2029 EXPECT_EQ(ANeuralNetworksExecution_setOutputFromMemory(mExecution, 0, nullptr, memory,
2030 memorySize - 3, sizeof(float)),
2031 ANEURALNETWORKS_BAD_DATA);
2032
2033 // These should fail, because the tensor types are invalid.
2034 EXPECT_EQ(ANeuralNetworksExecution_setOutputFromMemory(mExecution, 0, &kInvalidTensorType1,
2035 memory, 0, sizeof(float)),
2036 ANEURALNETWORKS_BAD_DATA);
2037 EXPECT_EQ(ANeuralNetworksExecution_setOutputFromMemory(mExecution, 0, &kInvalidTensorType2,
2038 memory, 0, sizeof(float)),
2039 ANEURALNETWORKS_BAD_DATA);
2040
2041 // close memory
2042 ANeuralNetworksMemory_free(memory);
2043 AHardwareBuffer_release(buffer);
2044 }
2045
TEST_F(ValidationTestExecution,SetOutputFromAHardwareBufferBlobEnablePadding)2046 TEST_F(ValidationTestExecution, SetOutputFromAHardwareBufferBlobEnablePadding) {
2047 if (__builtin_available(android __NNAPI_FL5_MIN_ANDROID_API__, *)) {
2048 const size_t memorySize = 20;
2049
2050 AHardwareBuffer_Desc desc{
2051 .width = memorySize,
2052 .height = 1,
2053 .layers = 1,
2054 .format = AHARDWAREBUFFER_FORMAT_BLOB,
2055 .usage = AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN |
2056 AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN,
2057 };
2058
2059 AHardwareBuffer* buffer = nullptr;
2060 ASSERT_EQ(AHardwareBuffer_allocate(&desc, &buffer), 0);
2061
2062 ANeuralNetworksMemory* memory;
2063 EXPECT_EQ(ANeuralNetworksMemory_createFromAHardwareBuffer(buffer, &memory),
2064 ANEURALNETWORKS_NO_ERROR);
2065
2066 EXPECT_EQ(ANeuralNetworksExecution_enableInputAndOutputPadding(mExecution, true),
2067 ANEURALNETWORKS_NO_ERROR);
2068
2069 // This should fail, because length is less than the size of a float32.
2070 EXPECT_EQ(ANeuralNetworksExecution_setOutputFromMemory(mExecution, 0, nullptr, memory, 0,
2071 sizeof(float) - 1),
2072 ANEURALNETWORKS_BAD_DATA);
2073
2074 // close memory
2075 ANeuralNetworksMemory_free(memory);
2076 AHardwareBuffer_release(buffer);
2077 } else {
2078 GTEST_SKIP();
2079 }
2080 }
2081 #endif // __ANDROID__
2082
TEST_F(ValidationTestExecution,EnablePaddingAfterSetInputOutput)2083 TEST_F(ValidationTestExecution, EnablePaddingAfterSetInputOutput) {
2084 if (__builtin_available(android __NNAPI_FL5_MIN_ANDROID_API__, *)) {
2085 ANeuralNetworksExecution* execution;
2086 char buffer[20];
2087 const size_t memorySize = 20;
2088 #ifdef __ANDROID__
2089 int memoryFd = ASharedMemory_create("nnMemory", memorySize);
2090 #else // __ANDROID__
2091 TemporaryFile tmpFile;
2092 int memoryFd = tmpFile.release();
2093 CHECK_EQ(ftruncate(memoryFd, memorySize), 0);
2094 #endif // __ANDROID__
2095 ASSERT_GT(memoryFd, 0);
2096
2097 ANeuralNetworksMemory* memory;
2098 EXPECT_EQ(ANeuralNetworksMemory_createFromFd(memorySize, PROT_READ | PROT_WRITE, memoryFd,
2099 0, &memory),
2100 ANEURALNETWORKS_NO_ERROR);
2101
2102 // Enable padding after setInput.
2103 EXPECT_EQ(ANeuralNetworksExecution_create(mCompilation, &execution),
2104 ANEURALNETWORKS_NO_ERROR);
2105 EXPECT_EQ(ANeuralNetworksExecution_setInput(execution, 0, nullptr, buffer, 8),
2106 ANEURALNETWORKS_NO_ERROR);
2107 EXPECT_EQ(ANeuralNetworksExecution_enableInputAndOutputPadding(execution, true),
2108 ANEURALNETWORKS_BAD_STATE);
2109 ANeuralNetworksExecution_free(execution);
2110
2111 // Enable padding after setInputFromMemory.
2112 EXPECT_EQ(ANeuralNetworksExecution_create(mCompilation, &execution),
2113 ANEURALNETWORKS_NO_ERROR);
2114 EXPECT_EQ(ANeuralNetworksExecution_setInputFromMemory(execution, 0, nullptr, memory, 0, 8),
2115 ANEURALNETWORKS_NO_ERROR);
2116 EXPECT_EQ(ANeuralNetworksExecution_enableInputAndOutputPadding(execution, true),
2117 ANEURALNETWORKS_BAD_STATE);
2118 ANeuralNetworksExecution_free(execution);
2119
2120 // Enable padding after setOutput.
2121 EXPECT_EQ(ANeuralNetworksExecution_create(mCompilation, &execution),
2122 ANEURALNETWORKS_NO_ERROR);
2123 EXPECT_EQ(ANeuralNetworksExecution_setOutput(execution, 0, nullptr, buffer, 8),
2124 ANEURALNETWORKS_NO_ERROR);
2125 EXPECT_EQ(ANeuralNetworksExecution_enableInputAndOutputPadding(execution, true),
2126 ANEURALNETWORKS_BAD_STATE);
2127 ANeuralNetworksExecution_free(execution);
2128
2129 // Enable padding after setOutputFromMemory.
2130 EXPECT_EQ(ANeuralNetworksExecution_create(mCompilation, &execution),
2131 ANEURALNETWORKS_NO_ERROR);
2132 EXPECT_EQ(ANeuralNetworksExecution_setOutputFromMemory(execution, 0, nullptr, memory, 0, 8),
2133 ANEURALNETWORKS_NO_ERROR);
2134 EXPECT_EQ(ANeuralNetworksExecution_enableInputAndOutputPadding(execution, true),
2135 ANEURALNETWORKS_BAD_STATE);
2136 ANeuralNetworksExecution_free(execution);
2137
2138 // close memory
2139 ANeuralNetworksMemory_free(memory);
2140 close(memoryFd);
2141 } else {
2142 GTEST_SKIP();
2143 }
2144 }
2145
TEST_F(ValidationTestExecutionDeviceMemory,SetInputFromMemory)2146 TEST_F(ValidationTestExecutionDeviceMemory, SetInputFromMemory) {
2147 ANeuralNetworksMemoryDesc* desc;
2148 ASSERT_EQ(ANeuralNetworksMemoryDesc_create(&desc), ANEURALNETWORKS_NO_ERROR);
2149 EXPECT_EQ(ANeuralNetworksMemoryDesc_addInputRole(desc, mCompilation, 0, 1.0f),
2150 ANEURALNETWORKS_NO_ERROR);
2151
2152 // The following output roles are for init/deinit of the device memory.
2153 EXPECT_EQ(ANeuralNetworksMemoryDesc_addOutputRole(desc, mInitCompilation, 0, 1.0f),
2154 ANEURALNETWORKS_NO_ERROR);
2155 EXPECT_EQ(ANeuralNetworksMemoryDesc_addOutputRole(desc, mDeinitCompilation, 0, 1.0f),
2156 ANEURALNETWORKS_NO_ERROR);
2157
2158 EXPECT_EQ(ANeuralNetworksMemoryDesc_finish(desc), ANEURALNETWORKS_NO_ERROR);
2159
2160 ANeuralNetworksMemory* memory;
2161 EXPECT_EQ(ANeuralNetworksMemory_createFromDesc(desc, &memory), ANEURALNETWORKS_NO_ERROR);
2162 ANeuralNetworksMemoryDesc_free(desc);
2163
2164 // Uninitialized memory as input.
2165 executeWithMemoryAsInput(mCompilation, memory, ANEURALNETWORKS_OP_FAILED);
2166
2167 // The memory is deinitialized between setInputFromMemory and compute.
2168 {
2169 // Initialize device memory.
2170 executeWithMemoryAsOutput(mInitCompilation, memory, ANEURALNETWORKS_NO_ERROR);
2171
2172 float data = 0;
2173 ANeuralNetworksExecution* execution = nullptr;
2174 ASSERT_EQ(ANeuralNetworksExecution_create(mCompilation, &execution),
2175 ANEURALNETWORKS_NO_ERROR);
2176 ASSERT_EQ(ANeuralNetworksExecution_setInputFromMemory(execution, 0, nullptr, memory, 0, 0),
2177 ANEURALNETWORKS_NO_ERROR);
2178 ASSERT_EQ(ANeuralNetworksExecution_setOutput(execution, 0, nullptr, &data, sizeof(float)),
2179 ANEURALNETWORKS_NO_ERROR);
2180
2181 // Deinitialize device memory.
2182 executeWithMemoryAsOutput(mDeinitCompilation, memory, ANEURALNETWORKS_OP_FAILED);
2183
2184 // Uninitialized memory as input at compute time.
2185 ASSERT_EQ(ANeuralNetworksExecution_compute(execution), ANEURALNETWORKS_OP_FAILED);
2186 ANeuralNetworksExecution_free(execution);
2187 }
2188
2189 // Initialize device memory.
2190 executeWithMemoryAsOutput(mInitCompilation, memory, ANEURALNETWORKS_NO_ERROR);
2191
2192 // Bad offset and length.
2193 EXPECT_EQ(ANeuralNetworksExecution_setInputFromMemory(mExecution, 0, nullptr, memory, 1, 0),
2194 ANEURALNETWORKS_BAD_DATA);
2195 EXPECT_EQ(ANeuralNetworksExecution_setInputFromMemory(mExecution, 0, nullptr, memory, 0,
2196 sizeof(float)),
2197 ANEURALNETWORKS_BAD_DATA);
2198
2199 // Bad usage -- not configured for this role.
2200 EXPECT_EQ(ANeuralNetworksExecution_setOutputFromMemory(mExecution, 0, nullptr, memory, 0, 0),
2201 ANEURALNETWORKS_BAD_DATA);
2202
2203 // Deinitialize device memory.
2204 executeWithMemoryAsOutput(mDeinitCompilation, memory, ANEURALNETWORKS_OP_FAILED);
2205
2206 // Uninitialized memory as input.
2207 executeWithMemoryAsInput(mCompilation, memory, ANEURALNETWORKS_OP_FAILED);
2208
2209 ANeuralNetworksMemory_free(memory);
2210 }
2211
TEST_F(ValidationTestExecutionDeviceMemory,SetOutputFromMemory)2212 TEST_F(ValidationTestExecutionDeviceMemory, SetOutputFromMemory) {
2213 ANeuralNetworksMemoryDesc* desc;
2214 ASSERT_EQ(ANeuralNetworksMemoryDesc_create(&desc), ANEURALNETWORKS_NO_ERROR);
2215 EXPECT_EQ(ANeuralNetworksMemoryDesc_addOutputRole(desc, mCompilation, 0, 1.0f),
2216 ANEURALNETWORKS_NO_ERROR);
2217 EXPECT_EQ(ANeuralNetworksMemoryDesc_finish(desc), ANEURALNETWORKS_NO_ERROR);
2218
2219 ANeuralNetworksMemory* memory;
2220 EXPECT_EQ(ANeuralNetworksMemory_createFromDesc(desc, &memory), ANEURALNETWORKS_NO_ERROR);
2221 ANeuralNetworksMemoryDesc_free(desc);
2222
2223 // Bad offset and length.
2224 EXPECT_EQ(ANeuralNetworksExecution_setOutputFromMemory(mExecution, 0, nullptr, memory, 1, 0),
2225 ANEURALNETWORKS_BAD_DATA);
2226 EXPECT_EQ(ANeuralNetworksExecution_setOutputFromMemory(mExecution, 0, nullptr, memory, 0,
2227 sizeof(float)),
2228 ANEURALNETWORKS_BAD_DATA);
2229
2230 // Bad usage -- not configured for this role.
2231 EXPECT_EQ(ANeuralNetworksExecution_setInputFromMemory(mExecution, 0, nullptr, memory, 0, 0),
2232 ANEURALNETWORKS_BAD_DATA);
2233
2234 ANeuralNetworksMemory_free(memory);
2235 }
2236
TEST_F(ValidationTestExecutionDeviceMemory,SetInputFromMemory_DynamicShape)2237 TEST_F(ValidationTestExecutionDeviceMemory, SetInputFromMemory_DynamicShape) {
2238 uint32_t dimension = 1, badDimension = 2;
2239 ANeuralNetworksOperandType badType = {
2240 .type = ANEURALNETWORKS_TENSOR_FLOAT32,
2241 .dimensionCount = 1,
2242 .dimensions = &badDimension,
2243 };
2244
2245 ANeuralNetworksMemoryDesc* desc;
2246 ASSERT_EQ(ANeuralNetworksMemoryDesc_create(&desc), ANEURALNETWORKS_NO_ERROR);
2247 EXPECT_EQ(ANeuralNetworksMemoryDesc_addInputRole(desc, mCompilationDynamic, 0, 1.0f),
2248 ANEURALNETWORKS_NO_ERROR);
2249 EXPECT_EQ(ANeuralNetworksMemoryDesc_setDimensions(desc, 1, &dimension),
2250 ANEURALNETWORKS_NO_ERROR);
2251
2252 // The following output role is for init of the device memory.
2253 EXPECT_EQ(ANeuralNetworksMemoryDesc_addOutputRole(desc, mInitCompilation, 0, 1.0f),
2254 ANEURALNETWORKS_NO_ERROR);
2255
2256 EXPECT_EQ(ANeuralNetworksMemoryDesc_finish(desc), ANEURALNETWORKS_NO_ERROR);
2257
2258 ANeuralNetworksMemory* memory;
2259 EXPECT_EQ(ANeuralNetworksMemory_createFromDesc(desc, &memory), ANEURALNETWORKS_NO_ERROR);
2260 ANeuralNetworksMemoryDesc_free(desc);
2261
2262 // Initialize device memory.
2263 executeWithMemoryAsOutput(mInitCompilation, memory, ANEURALNETWORKS_NO_ERROR);
2264
2265 // Incompatible dimensions between updated type and memory.
2266 EXPECT_EQ(ANeuralNetworksExecution_setInputFromMemory(mExecutionDynamic, 0, &badType, memory, 0,
2267 0),
2268 ANEURALNETWORKS_BAD_DATA);
2269
2270 ANeuralNetworksMemory_free(memory);
2271 }
2272
TEST_F(ValidationTestExecutionDeviceMemory,SetOutputFromMemory_DynamicShape)2273 TEST_F(ValidationTestExecutionDeviceMemory, SetOutputFromMemory_DynamicShape) {
2274 uint32_t dimension = 1, badDimension = 2;
2275 ANeuralNetworksOperandType badType = {
2276 .type = ANEURALNETWORKS_TENSOR_FLOAT32,
2277 .dimensionCount = 1,
2278 .dimensions = &badDimension,
2279 };
2280
2281 ANeuralNetworksMemoryDesc* desc;
2282 ASSERT_EQ(ANeuralNetworksMemoryDesc_create(&desc), ANEURALNETWORKS_NO_ERROR);
2283 EXPECT_EQ(ANeuralNetworksMemoryDesc_addOutputRole(desc, mCompilationDynamic, 0, 1.0f),
2284 ANEURALNETWORKS_NO_ERROR);
2285 EXPECT_EQ(ANeuralNetworksMemoryDesc_setDimensions(desc, 1, &dimension),
2286 ANEURALNETWORKS_NO_ERROR);
2287 EXPECT_EQ(ANeuralNetworksMemoryDesc_finish(desc), ANEURALNETWORKS_NO_ERROR);
2288
2289 ANeuralNetworksMemory* memory;
2290 EXPECT_EQ(ANeuralNetworksMemory_createFromDesc(desc, &memory), ANEURALNETWORKS_NO_ERROR);
2291 ANeuralNetworksMemoryDesc_free(desc);
2292
2293 // Incompatible dimensions between updated type and memory.
2294 EXPECT_EQ(ANeuralNetworksExecution_setOutputFromMemory(mExecutionDynamic, 0, &badType, memory,
2295 0, 0),
2296 ANEURALNETWORKS_BAD_DATA);
2297
2298 ANeuralNetworksMemory_free(memory);
2299 }
2300
TEST_F(ValidationTestExecution,Compute)2301 TEST_F(ValidationTestExecution, Compute) {
2302 EXPECT_EQ(ANeuralNetworksExecution_compute(nullptr), ANEURALNETWORKS_UNEXPECTED_NULL);
2303 }
2304
TEST_F(ValidationTestExecution,StartCompute)2305 TEST_F(ValidationTestExecution, StartCompute) {
2306 ANeuralNetworksExecution* execution;
2307 EXPECT_EQ(ANeuralNetworksExecution_create(mCompilation, &execution), ANEURALNETWORKS_NO_ERROR);
2308
2309 ANeuralNetworksEvent* event;
2310 EXPECT_EQ(ANeuralNetworksExecution_startCompute(nullptr, &event),
2311 ANEURALNETWORKS_UNEXPECTED_NULL);
2312 EXPECT_EQ(ANeuralNetworksExecution_startCompute(execution, nullptr),
2313 ANEURALNETWORKS_UNEXPECTED_NULL);
2314
2315 // close memory
2316 ANeuralNetworksExecution_free(execution);
2317 }
2318
TEST_F(ValidationTestExecution,EventWait)2319 TEST_F(ValidationTestExecution, EventWait) {
2320 EXPECT_EQ(ANeuralNetworksEvent_wait(nullptr), ANEURALNETWORKS_UNEXPECTED_NULL);
2321 }
2322
TEST_F(ValidationTest,EventCreateFromSyncFenceFd)2323 TEST_F(ValidationTest, EventCreateFromSyncFenceFd) {
2324 ANeuralNetworksEvent* event;
2325 EXPECT_EQ(ANeuralNetworksEvent_createFromSyncFenceFd(-1, &event), ANEURALNETWORKS_BAD_DATA);
2326 EXPECT_EQ(ANeuralNetworksEvent_createFromSyncFenceFd(1, nullptr),
2327 ANEURALNETWORKS_UNEXPECTED_NULL);
2328 }
2329
TEST_F(ValidationTest,EventGetSyncFenceFd)2330 TEST_F(ValidationTest, EventGetSyncFenceFd) {
2331 int syncFd = -100;
2332 EXPECT_EQ(ANeuralNetworksEvent_getSyncFenceFd(nullptr, &syncFd),
2333 ANEURALNETWORKS_UNEXPECTED_NULL);
2334 EXPECT_EQ(syncFd, -1);
2335 }
2336
TEST_F(ValidationTestExecution,EventGetSyncFenceFdFromStartCompute)2337 TEST_F(ValidationTestExecution, EventGetSyncFenceFdFromStartCompute) {
2338 // Create a valid execution and event first.
2339 ANeuralNetworksExecution* execution;
2340 EXPECT_EQ(ANeuralNetworksExecution_create(mCompilation, &execution), ANEURALNETWORKS_NO_ERROR);
2341 float input0[] = {1.0f, 1.0f}, input1[] = {2.0f, 2.0f}, output0[2];
2342 int32_t input2[] = {0};
2343 EXPECT_EQ(ANeuralNetworksExecution_setInput(execution, 0, nullptr, input0, sizeof(input0)),
2344 ANEURALNETWORKS_NO_ERROR);
2345 EXPECT_EQ(ANeuralNetworksExecution_setInput(execution, 1, nullptr, input1, sizeof(input1)),
2346 ANEURALNETWORKS_NO_ERROR);
2347 EXPECT_EQ(ANeuralNetworksExecution_setInput(execution, 2, nullptr, input2, sizeof(input2)),
2348 ANEURALNETWORKS_NO_ERROR);
2349 EXPECT_EQ(ANeuralNetworksExecution_setOutput(execution, 0, nullptr, output0, sizeof(output0)),
2350 ANEURALNETWORKS_NO_ERROR);
2351 ANeuralNetworksEvent* event = nullptr;
2352 EXPECT_EQ(ANeuralNetworksExecution_startCompute(execution, &event), ANEURALNETWORKS_NO_ERROR);
2353
2354 // The event from startCompute is not backed by sync fence.
2355 int syncFd = -100;
2356 EXPECT_EQ(ANeuralNetworksEvent_getSyncFenceFd(event, &syncFd), ANEURALNETWORKS_BAD_DATA);
2357 EXPECT_EQ(syncFd, -1);
2358
2359 ANeuralNetworksEvent_free(event);
2360 ANeuralNetworksExecution_free(execution);
2361 }
2362
TEST_F(ValidationTestExecution,FencedExecution)2363 TEST_F(ValidationTestExecution, FencedExecution) {
2364 // Create a valid execution and event first.
2365 ANeuralNetworksExecution* execution1;
2366 EXPECT_EQ(ANeuralNetworksExecution_create(mCompilation, &execution1), ANEURALNETWORKS_NO_ERROR);
2367 float input0[] = {1.0f, 1.0f}, input1[] = {2.0f, 2.0f}, output0[2];
2368 int32_t input2[] = {0};
2369 EXPECT_EQ(ANeuralNetworksExecution_setInput(execution1, 0, nullptr, input0, sizeof(input0)),
2370 ANEURALNETWORKS_NO_ERROR);
2371 EXPECT_EQ(ANeuralNetworksExecution_setInput(execution1, 1, nullptr, input1, sizeof(input1)),
2372 ANEURALNETWORKS_NO_ERROR);
2373 EXPECT_EQ(ANeuralNetworksExecution_setInput(execution1, 2, nullptr, input2, sizeof(input2)),
2374 ANEURALNETWORKS_NO_ERROR);
2375 EXPECT_EQ(ANeuralNetworksExecution_setOutput(execution1, 0, nullptr, output0, sizeof(output0)),
2376 ANEURALNETWORKS_NO_ERROR);
2377 ANeuralNetworksEvent* event1 = nullptr;
2378 EXPECT_EQ(ANeuralNetworksExecution_startComputeWithDependencies(execution1, nullptr, 0, 0,
2379 &event1),
2380 ANEURALNETWORKS_NO_ERROR);
2381
2382 EXPECT_EQ(ANeuralNetworksEvent_getSyncFenceFd(event1, nullptr),
2383 ANEURALNETWORKS_UNEXPECTED_NULL);
2384
2385 // The event from startComputeWithDependencie may or may not be backed by a sync fence depending
2386 // on the driver implementation.
2387 int syncFd = -100;
2388 int getSyncFdResult = ANeuralNetworksEvent_getSyncFenceFd(event1, &syncFd);
2389 if (getSyncFdResult == ANEURALNETWORKS_NO_ERROR) {
2390 EXPECT_GE(syncFd, 0);
2391 close(syncFd);
2392 } else {
2393 EXPECT_EQ(getSyncFdResult, ANEURALNETWORKS_BAD_DATA);
2394 EXPECT_EQ(syncFd, -1);
2395 }
2396
2397 // The subsequent execution will wait for the first execution to finish.
2398 ANeuralNetworksExecution* execution2;
2399 ANeuralNetworksEvent* event2 = nullptr;
2400 EXPECT_EQ(ANeuralNetworksExecution_create(mCompilation, &execution2), ANEURALNETWORKS_NO_ERROR);
2401 EXPECT_EQ(
2402 ANeuralNetworksExecution_startComputeWithDependencies(nullptr, &event1, 1, 0, &event2),
2403 ANEURALNETWORKS_UNEXPECTED_NULL);
2404 EXPECT_EQ(ANeuralNetworksExecution_startComputeWithDependencies(execution2, nullptr, 1, 0,
2405 &event2),
2406 ANEURALNETWORKS_UNEXPECTED_NULL);
2407 EXPECT_EQ(ANeuralNetworksExecution_startComputeWithDependencies(execution2, &event1, 1, 0,
2408 nullptr),
2409 ANEURALNETWORKS_UNEXPECTED_NULL);
2410 ANeuralNetworksEvent* wait_for_list[] = {event1, nullptr};
2411 EXPECT_EQ(ANeuralNetworksExecution_startComputeWithDependencies(execution2, wait_for_list, 2, 0,
2412 &event2),
2413 ANEURALNETWORKS_UNEXPECTED_NULL);
2414
2415 ANeuralNetworksEvent_free(event1);
2416 ANeuralNetworksExecution_free(execution1);
2417 ANeuralNetworksExecution_free(execution2);
2418 }
2419
TEST_F(ValidationTestExecution,GetOutputOperandRankAndDimensions)2420 TEST_F(ValidationTestExecution, GetOutputOperandRankAndDimensions) {
2421 ANeuralNetworksExecution* execution;
2422 EXPECT_EQ(ANeuralNetworksExecution_create(mCompilation, &execution), ANEURALNETWORKS_NO_ERROR);
2423
2424 float input0[] = {1.0f, 1.0f}, input1[] = {2.0f, 2.0f}, output0[2];
2425 int32_t input2[] = {0};
2426 EXPECT_EQ(ANeuralNetworksExecution_setInput(execution, 0, nullptr, input0, sizeof(input0)),
2427 ANEURALNETWORKS_NO_ERROR);
2428 EXPECT_EQ(ANeuralNetworksExecution_setInput(execution, 1, nullptr, input1, sizeof(input1)),
2429 ANEURALNETWORKS_NO_ERROR);
2430 EXPECT_EQ(ANeuralNetworksExecution_setInput(execution, 2, nullptr, input2, sizeof(input2)),
2431 ANEURALNETWORKS_NO_ERROR);
2432 EXPECT_EQ(ANeuralNetworksExecution_setOutput(execution, 0, nullptr, output0, sizeof(output0)),
2433 ANEURALNETWORKS_NO_ERROR);
2434
2435 uint32_t rank, dims[4], expectedRank = 1, expectedDims = 2;
2436 // This should fail, because the execution has not yet started to compute.
2437 EXPECT_EQ(ANeuralNetworksExecution_getOutputOperandRank(execution, 0, &rank),
2438 ANEURALNETWORKS_BAD_STATE);
2439 EXPECT_EQ(ANeuralNetworksExecution_getOutputOperandDimensions(execution, 0, dims),
2440 ANEURALNETWORKS_BAD_STATE);
2441
2442 ANeuralNetworksEvent* event;
2443 EXPECT_EQ(ANeuralNetworksExecution_startCompute(execution, &event), ANEURALNETWORKS_NO_ERROR);
2444 EXPECT_EQ(ANeuralNetworksEvent_wait(event), ANEURALNETWORKS_NO_ERROR);
2445
2446 // This should fail, because unexpected nullptr.
2447 EXPECT_EQ(ANeuralNetworksExecution_getOutputOperandRank(nullptr, 0, &rank),
2448 ANEURALNETWORKS_UNEXPECTED_NULL);
2449 EXPECT_EQ(ANeuralNetworksExecution_getOutputOperandDimensions(nullptr, 0, dims),
2450 ANEURALNETWORKS_UNEXPECTED_NULL);
2451 EXPECT_EQ(ANeuralNetworksExecution_getOutputOperandRank(execution, 0, nullptr),
2452 ANEURALNETWORKS_UNEXPECTED_NULL);
2453 EXPECT_EQ(ANeuralNetworksExecution_getOutputOperandDimensions(execution, 0, nullptr),
2454 ANEURALNETWORKS_UNEXPECTED_NULL);
2455
2456 // This should fail, because the operand does not exist.
2457 EXPECT_EQ(ANeuralNetworksExecution_getOutputOperandRank(execution, -1, &rank),
2458 ANEURALNETWORKS_BAD_DATA);
2459 EXPECT_EQ(ANeuralNetworksExecution_getOutputOperandRank(execution, 999, &rank),
2460 ANEURALNETWORKS_BAD_DATA);
2461 EXPECT_EQ(ANeuralNetworksExecution_getOutputOperandDimensions(execution, -1, dims),
2462 ANEURALNETWORKS_BAD_DATA);
2463 EXPECT_EQ(ANeuralNetworksExecution_getOutputOperandDimensions(execution, 999, dims),
2464 ANEURALNETWORKS_BAD_DATA);
2465
2466 EXPECT_EQ(ANeuralNetworksExecution_getOutputOperandRank(execution, 0, &rank),
2467 ANEURALNETWORKS_NO_ERROR);
2468 EXPECT_EQ(ANeuralNetworksExecution_getOutputOperandDimensions(execution, 0, dims),
2469 ANEURALNETWORKS_NO_ERROR);
2470 EXPECT_EQ(rank, expectedRank);
2471 EXPECT_EQ(dims[0], expectedDims);
2472
2473 // close memory
2474 ANeuralNetworksEvent_free(event);
2475 ANeuralNetworksExecution_free(execution);
2476 }
2477
2478 // Regression test for b/146044137.
2479 class ValidationTestDimensionProductOverflow : public ValidationTestExecution {
2480 protected:
createModel()2481 void createModel() override {
2482 uint32_t dimensions[] = {5, 4, 4, 0, 5, 3, 0, 4, 5};
2483 ANeuralNetworksOperandType operandType = {
2484 .type = ANEURALNETWORKS_TENSOR_FLOAT32,
2485 .dimensionCount = std::size(dimensions),
2486 .dimensions = dimensions,
2487 };
2488 addOperand(operandType);
2489 addOperand(operandType);
2490 ASSERT_EQ(addOperation(ANEURALNETWORKS_ABS, {0}, {1}), ANEURALNETWORKS_NO_ERROR);
2491 ASSERT_EQ(identifyInputsAndOutputs({0}, {1}), ANEURALNETWORKS_NO_ERROR);
2492 ASSERT_EQ(modelFinish(), ANEURALNETWORKS_NO_ERROR);
2493 }
2494 };
2495
TEST_F(ValidationTestDimensionProductOverflow,SetInputOrOutput)2496 TEST_F(ValidationTestDimensionProductOverflow, SetInputOrOutput) {
2497 uint32_t dimensions[] = {5, 4, 4, 786433, 5, 3, 16777216, 4, 5};
2498 ANeuralNetworksOperandType operandType = {
2499 .type = ANEURALNETWORKS_TENSOR_FLOAT32,
2500 .dimensionCount = std::size(dimensions),
2501 .dimensions = dimensions,
2502 };
2503 uint8_t buffer[20];
2504 // This should fail, as the new operand type's dimension product overflows
2505 // uint32_t.
2506 EXPECT_EQ(
2507 ANeuralNetworksExecution_setInput(mExecution, 0, &operandType, buffer, sizeof(buffer)),
2508 ANEURALNETWORKS_BAD_DATA);
2509 EXPECT_EQ(
2510 ANeuralNetworksExecution_setOutput(mExecution, 0, &operandType, buffer, sizeof(buffer)),
2511 ANEURALNETWORKS_BAD_DATA);
2512 }
2513
TEST_F(ValidationTestModel,AddOperandDimensionProductOverflow)2514 TEST_F(ValidationTestModel, AddOperandDimensionProductOverflow) {
2515 uint32_t dimensions[] = {5, 4, 4, 786433, 5, 3, 16777216, 4, 5};
2516 ANeuralNetworksOperandType operandType = {
2517 .type = ANEURALNETWORKS_TENSOR_FLOAT32,
2518 .dimensionCount = std::size(dimensions),
2519 .dimensions = dimensions,
2520 };
2521 // This should fail, as the operand type's dimension product overflows uint32_t.
2522 ASSERT_EQ(ANeuralNetworksModel_addOperand(mModel, &operandType), ANEURALNETWORKS_BAD_DATA);
2523 }
2524
2525 class ValidationTestDimensionProductOverflow2 : public ValidationTestExecution {
2526 protected:
createModel()2527 void createModel() override {
2528 addTensorOperand(ANEURALNETWORKS_TENSOR_FLOAT32, {0, 1});
2529 addTensorOperand(ANEURALNETWORKS_TENSOR_FLOAT32, {0, 1});
2530 addTensorOperand(ANEURALNETWORKS_TENSOR_FLOAT32, {0});
2531 addScalarOperand(ANEURALNETWORKS_INT32);
2532 int32_t activation = 0;
2533 ASSERT_EQ(ANeuralNetworksModel_setOperandValue(mModel, 3, &activation, sizeof(activation)),
2534 ANEURALNETWORKS_NO_ERROR);
2535 addTensorOperand(ANEURALNETWORKS_TENSOR_FLOAT32, {0, 0});
2536 ASSERT_EQ(addOperation(ANEURALNETWORKS_FULLY_CONNECTED, {0, 1, 2, 3}, {4}),
2537 ANEURALNETWORKS_NO_ERROR);
2538 ASSERT_EQ(identifyInputsAndOutputs({0, 1, 2}, {4}), ANEURALNETWORKS_NO_ERROR);
2539 ASSERT_EQ(modelFinish(), ANEURALNETWORKS_NO_ERROR);
2540 }
2541 };
2542
TEST_F(ValidationTestDimensionProductOverflow2,DynamicOutputShapeOverflow)2543 TEST_F(ValidationTestDimensionProductOverflow2, DynamicOutputShapeOverflow) {
2544 constexpr uint32_t kLargeDim = 1 << 16;
2545 std::vector<float> inputData(kLargeDim), outputData(kLargeDim);
2546 const uint32_t inputDims[] = {kLargeDim, 1};
2547 const uint32_t biasDims[] = {kLargeDim};
2548 const ANeuralNetworksOperandType inputType = {
2549 .type = ANEURALNETWORKS_TENSOR_FLOAT32,
2550 .dimensionCount = std::size(inputDims),
2551 .dimensions = inputDims,
2552 };
2553 const ANeuralNetworksOperandType biasType = {
2554 .type = ANEURALNETWORKS_TENSOR_FLOAT32,
2555 .dimensionCount = std::size(biasDims),
2556 .dimensions = biasDims,
2557 };
2558 EXPECT_EQ(ANeuralNetworksExecution_setInput(mExecution, 0, &inputType, inputData.data(),
2559 inputData.size() * sizeof(float)),
2560 ANEURALNETWORKS_NO_ERROR);
2561 EXPECT_EQ(ANeuralNetworksExecution_setInput(mExecution, 1, &inputType, inputData.data(),
2562 inputData.size() * sizeof(float)),
2563 ANEURALNETWORKS_NO_ERROR);
2564 EXPECT_EQ(ANeuralNetworksExecution_setInput(mExecution, 2, &biasType, inputData.data(),
2565 inputData.size() * sizeof(float)),
2566 ANEURALNETWORKS_NO_ERROR);
2567 EXPECT_EQ(ANeuralNetworksExecution_setOutput(mExecution, 0, nullptr, outputData.data(),
2568 outputData.size() * sizeof(float)),
2569 ANEURALNETWORKS_NO_ERROR);
2570
2571 // This should fail, because the deduced output data size overflows uint32_t.
2572 EXPECT_NE(ANeuralNetworksExecution_compute(mExecution), ANEURALNETWORKS_NO_ERROR);
2573 }
2574
TEST_F(ValidationTestBurst,BurstComputeNull)2575 TEST_F(ValidationTestBurst, BurstComputeNull) {
2576 EXPECT_EQ(ANeuralNetworksExecution_burstCompute(mExecution, nullptr),
2577 ANEURALNETWORKS_UNEXPECTED_NULL);
2578 EXPECT_EQ(ANeuralNetworksExecution_burstCompute(nullptr, mBurst),
2579 ANEURALNETWORKS_UNEXPECTED_NULL);
2580 }
2581
TEST_F(ValidationTestBurst,BurstComputeBadCompilation)2582 TEST_F(ValidationTestBurst, BurstComputeBadCompilation) {
2583 ANeuralNetworksCompilation* compilation;
2584 ASSERT_EQ(ANeuralNetworksCompilation_create(mModel, &compilation), ANEURALNETWORKS_NO_ERROR);
2585 // NOTE: ANeuralNetworksCompilation_finish not called
2586
2587 ANeuralNetworksBurst* burst;
2588 EXPECT_EQ(ANeuralNetworksBurst_create(compilation, &burst), ANEURALNETWORKS_BAD_STATE);
2589
2590 // close memory
2591 ANeuralNetworksBurst_free(burst);
2592 ANeuralNetworksCompilation_free(compilation);
2593 }
2594
TEST_F(ValidationTestBurst,BurstComputeDifferentCompilations)2595 TEST_F(ValidationTestBurst, BurstComputeDifferentCompilations) {
2596 ANeuralNetworksCompilation* secondCompilation;
2597 ASSERT_EQ(ANeuralNetworksCompilation_create(mModel, &secondCompilation),
2598 ANEURALNETWORKS_NO_ERROR);
2599 ASSERT_EQ(ANeuralNetworksCompilation_finish(secondCompilation), ANEURALNETWORKS_NO_ERROR);
2600
2601 ANeuralNetworksExecution* execution;
2602 EXPECT_EQ(ANeuralNetworksExecution_create(secondCompilation, &execution),
2603 ANEURALNETWORKS_NO_ERROR);
2604
2605 EXPECT_EQ(ANeuralNetworksExecution_burstCompute(execution, mBurst), ANEURALNETWORKS_BAD_DATA);
2606
2607 ANeuralNetworksExecution_free(execution);
2608 ANeuralNetworksCompilation_free(secondCompilation);
2609 }
2610
TEST_F(ValidationTestBurst,BurstComputeConcurrent)2611 TEST_F(ValidationTestBurst, BurstComputeConcurrent) {
2612 ANeuralNetworksExecution* secondExecution;
2613 EXPECT_EQ(ANeuralNetworksExecution_create(mCompilation, &secondExecution),
2614 ANEURALNETWORKS_NO_ERROR);
2615
2616 // set inputs of first execution
2617 float inputA0[] = {1.0f, 1.0f}, inputA1[] = {2.0f, 2.0f}, outputA0[2];
2618 int32_t inputA2[] = {0};
2619 EXPECT_EQ(ANeuralNetworksExecution_setInput(mExecution, 0, nullptr, inputA0, sizeof(inputA0)),
2620 ANEURALNETWORKS_NO_ERROR);
2621 EXPECT_EQ(ANeuralNetworksExecution_setInput(mExecution, 1, nullptr, inputA1, sizeof(inputA1)),
2622 ANEURALNETWORKS_NO_ERROR);
2623 EXPECT_EQ(ANeuralNetworksExecution_setInput(mExecution, 2, nullptr, inputA2, sizeof(inputA2)),
2624 ANEURALNETWORKS_NO_ERROR);
2625 EXPECT_EQ(
2626 ANeuralNetworksExecution_setOutput(mExecution, 0, nullptr, outputA0, sizeof(outputA0)),
2627 ANEURALNETWORKS_NO_ERROR);
2628
2629 // set inputs of second execution
2630 float inputB0[] = {1.0f, 1.0f}, inputB1[] = {2.0f, 2.0f}, outputB0[2];
2631 int32_t inputB2[] = {0};
2632 EXPECT_EQ(ANeuralNetworksExecution_setInput(secondExecution, 0, nullptr, inputB0,
2633 sizeof(inputB0)),
2634 ANEURALNETWORKS_NO_ERROR);
2635 EXPECT_EQ(ANeuralNetworksExecution_setInput(secondExecution, 1, nullptr, inputB1,
2636 sizeof(inputB1)),
2637 ANEURALNETWORKS_NO_ERROR);
2638 EXPECT_EQ(ANeuralNetworksExecution_setInput(secondExecution, 2, nullptr, inputB2,
2639 sizeof(inputB2)),
2640 ANEURALNETWORKS_NO_ERROR);
2641 EXPECT_EQ(ANeuralNetworksExecution_setOutput(secondExecution, 0, nullptr, outputB0,
2642 sizeof(outputB0)),
2643 ANEURALNETWORKS_NO_ERROR);
2644
2645 // Execute on the same burst concurrently. At least one result must be
2646 // ANEURALNETWORKS_NO_ERROR. One may return ANEURALNETWORKS_BAD_STATE if the
2647 // other is already executing on the burst.
2648 auto first = std::async(std::launch::async, [this] {
2649 return ANeuralNetworksExecution_burstCompute(mExecution, mBurst);
2650 });
2651 auto second = std::async(std::launch::async, [this, secondExecution] {
2652 return ANeuralNetworksExecution_burstCompute(secondExecution, mBurst);
2653 });
2654
2655 const int result1 = first.get();
2656 const int result2 = second.get();
2657 EXPECT_TRUE(result1 == ANEURALNETWORKS_BAD_STATE || result1 == ANEURALNETWORKS_NO_ERROR);
2658 EXPECT_TRUE(result2 == ANEURALNETWORKS_BAD_STATE || result2 == ANEURALNETWORKS_NO_ERROR);
2659 EXPECT_TRUE(result1 == ANEURALNETWORKS_NO_ERROR || result2 == ANEURALNETWORKS_NO_ERROR);
2660
2661 ANeuralNetworksExecution_free(secondExecution);
2662 }
2663
2664 // The burst object maintains a local cache of memory objects. Because the burst
2665 // is intended to live for multiple executions, and because memory might be
2666 // created and freed for each execution, burst includes internal mechanisms to
2667 // purge memory objects from its cache that have been freed by the NNAPI client.
2668 // The following two test cases (FreeMemoryBeforeBurst and
2669 // FreeBurstBeforeMemory) ensure that this internal cleanup is tested in both
2670 // freeing orders.
2671 //
2672 // These two test cases explicitly create a new burst object and a new execution
2673 // object so that the order of freeing can be specified. If these tests instead
2674 // relied on the provided mExecution and mBurst, mBurst would always be freed
2675 // before mExecution.
2676
TEST_F(ValidationTestBurst,FreeMemoryBeforeBurst)2677 TEST_F(ValidationTestBurst, FreeMemoryBeforeBurst) {
2678 ANeuralNetworksBurst* burst;
2679 EXPECT_EQ(ANeuralNetworksBurst_create(mCompilation, &burst), ANEURALNETWORKS_NO_ERROR);
2680
2681 // prepare data for execution
2682 float input0[] = {1.0f, 1.0f}, input1[] = {2.0f, 2.0f}, output0[2];
2683 int32_t input2[] = {0};
2684
2685 const size_t memorySize = sizeof(output0);
2686 #ifdef __ANDROID__
2687 int memoryFd = ASharedMemory_create("nnMemory", memorySize);
2688 #else // __ANDROID__
2689 TemporaryFile tmpFile;
2690 int memoryFd = tmpFile.release();
2691 CHECK_EQ(ftruncate(memoryFd, memorySize), 0);
2692 #endif // __ANDROID__
2693 ASSERT_GT(memoryFd, 0);
2694
2695 ANeuralNetworksMemory* memory;
2696 EXPECT_EQ(ANeuralNetworksMemory_createFromFd(memorySize, PROT_READ | PROT_WRITE, memoryFd, 0,
2697 &memory),
2698 ANEURALNETWORKS_NO_ERROR);
2699
2700 // create and configure execution
2701 ANeuralNetworksExecution* execution;
2702 EXPECT_EQ(ANeuralNetworksExecution_create(mCompilation, &execution), ANEURALNETWORKS_NO_ERROR);
2703 EXPECT_EQ(ANeuralNetworksExecution_setInput(execution, 0, nullptr, input0, sizeof(input0)),
2704 ANEURALNETWORKS_NO_ERROR);
2705 EXPECT_EQ(ANeuralNetworksExecution_setInput(execution, 1, nullptr, input1, sizeof(input1)),
2706 ANEURALNETWORKS_NO_ERROR);
2707 EXPECT_EQ(ANeuralNetworksExecution_setInput(execution, 2, nullptr, input2, sizeof(input2)),
2708 ANEURALNETWORKS_NO_ERROR);
2709 EXPECT_EQ(ANeuralNetworksExecution_setOutputFromMemory(execution, 0, nullptr, memory, 0,
2710 sizeof(output0)),
2711 ANEURALNETWORKS_NO_ERROR);
2712
2713 // preform execution to cache memory into burst
2714 EXPECT_EQ(ANeuralNetworksExecution_burstCompute(execution, burst), ANEURALNETWORKS_NO_ERROR);
2715 ANeuralNetworksExecution_free(execution);
2716
2717 // free memory before burst
2718 ANeuralNetworksMemory_free(memory);
2719 ANeuralNetworksBurst_free(burst);
2720
2721 // close memory
2722 close(memoryFd);
2723 }
2724
TEST_F(ValidationTestBurst,FreeBurstBeforeMemory)2725 TEST_F(ValidationTestBurst, FreeBurstBeforeMemory) {
2726 ANeuralNetworksBurst* burst;
2727 EXPECT_EQ(ANeuralNetworksBurst_create(mCompilation, &burst), ANEURALNETWORKS_NO_ERROR);
2728
2729 // prepare data for execution
2730 float input0[] = {1.0f, 1.0f}, input1[] = {2.0f, 2.0f}, output0[2];
2731 int32_t input2[] = {0};
2732 const size_t memorySize = sizeof(output0);
2733 #ifdef __ANDROID__
2734 int memoryFd = ASharedMemory_create("nnMemory", memorySize);
2735 #else // __ANDROID__
2736 TemporaryFile tmpFile;
2737 int memoryFd = tmpFile.release();
2738 CHECK_EQ(ftruncate(memoryFd, memorySize), 0);
2739 #endif // __ANDROID__
2740 ASSERT_GT(memoryFd, 0);
2741
2742 ANeuralNetworksMemory* memory;
2743 EXPECT_EQ(ANeuralNetworksMemory_createFromFd(memorySize, PROT_READ | PROT_WRITE, memoryFd, 0,
2744 &memory),
2745 ANEURALNETWORKS_NO_ERROR);
2746
2747 // create and configure execution
2748 ANeuralNetworksExecution* execution;
2749 EXPECT_EQ(ANeuralNetworksExecution_create(mCompilation, &execution), ANEURALNETWORKS_NO_ERROR);
2750 EXPECT_EQ(ANeuralNetworksExecution_setInput(execution, 0, nullptr, input0, sizeof(input0)),
2751 ANEURALNETWORKS_NO_ERROR);
2752 EXPECT_EQ(ANeuralNetworksExecution_setInput(execution, 1, nullptr, input1, sizeof(input1)),
2753 ANEURALNETWORKS_NO_ERROR);
2754 EXPECT_EQ(ANeuralNetworksExecution_setInput(execution, 2, nullptr, input2, sizeof(input2)),
2755 ANEURALNETWORKS_NO_ERROR);
2756 EXPECT_EQ(ANeuralNetworksExecution_setOutputFromMemory(execution, 0, nullptr, memory, 0,
2757 sizeof(output0)),
2758 ANEURALNETWORKS_NO_ERROR);
2759
2760 // preform execution to cache memory into burst
2761 EXPECT_EQ(ANeuralNetworksExecution_burstCompute(execution, burst), ANEURALNETWORKS_NO_ERROR);
2762 ANeuralNetworksExecution_free(execution);
2763
2764 // free burst before memory
2765 ANeuralNetworksBurst_free(burst);
2766 ANeuralNetworksMemory_free(memory);
2767
2768 // close memory
2769 close(memoryFd);
2770 }
2771
TEST(ValidationTestIntrospection,GetNumDevices)2772 TEST(ValidationTestIntrospection, GetNumDevices) {
2773 uint32_t numDevices = 0;
2774 EXPECT_EQ(ANeuralNetworks_getDeviceCount(&numDevices), ANEURALNETWORKS_NO_ERROR);
2775 EXPECT_EQ(ANeuralNetworks_getDeviceCount(nullptr), ANEURALNETWORKS_UNEXPECTED_NULL);
2776 }
2777
TEST(ValidationTestIntrospection,GetDevice)2778 TEST(ValidationTestIntrospection, GetDevice) {
2779 uint32_t numDevices = 0;
2780 EXPECT_EQ(ANeuralNetworks_getDeviceCount(&numDevices), ANEURALNETWORKS_NO_ERROR);
2781
2782 ANeuralNetworksDevice* device = nullptr;
2783 for (uint32_t i = 0; i < numDevices; i++) {
2784 SCOPED_TRACE(i);
2785 EXPECT_EQ(ANeuralNetworks_getDevice(i, &device), ANEURALNETWORKS_NO_ERROR);
2786 EXPECT_NE(device, nullptr);
2787 }
2788 EXPECT_EQ(ANeuralNetworks_getDevice(0, nullptr), ANEURALNETWORKS_UNEXPECTED_NULL);
2789 EXPECT_EQ(ANeuralNetworks_getDevice(numDevices, &device), ANEURALNETWORKS_BAD_DATA);
2790 }
2791
deviceStringCheck(std::function<int (const ANeuralNetworksDevice *,const char **)> func)2792 static void deviceStringCheck(std::function<int(const ANeuralNetworksDevice*, const char**)> func) {
2793 uint32_t numDevices = 0;
2794 EXPECT_EQ(ANeuralNetworks_getDeviceCount(&numDevices), ANEURALNETWORKS_NO_ERROR);
2795
2796 const char* buffer;
2797 for (uint32_t i = 0; i < numDevices; i++) {
2798 SCOPED_TRACE(i);
2799 ANeuralNetworksDevice* device;
2800 EXPECT_EQ(ANeuralNetworks_getDevice(i, &device), ANEURALNETWORKS_NO_ERROR);
2801 EXPECT_EQ(func(device, &buffer), ANEURALNETWORKS_NO_ERROR);
2802 EXPECT_EQ(func(device, nullptr), ANEURALNETWORKS_UNEXPECTED_NULL);
2803 }
2804 EXPECT_EQ(func(nullptr, &buffer), ANEURALNETWORKS_UNEXPECTED_NULL);
2805 EXPECT_EQ(func(nullptr, nullptr), ANEURALNETWORKS_UNEXPECTED_NULL);
2806 }
2807
TEST(ValidationTestIntrospection,DeviceGetName)2808 TEST(ValidationTestIntrospection, DeviceGetName) {
2809 deviceStringCheck(ANeuralNetworksDevice_getName);
2810 }
2811
TEST(ValidationTestIntrospection,DeviceGetNameUnique)2812 TEST(ValidationTestIntrospection, DeviceGetNameUnique) {
2813 uint32_t numDevices = 0;
2814 EXPECT_EQ(ANeuralNetworks_getDeviceCount(&numDevices), ANEURALNETWORKS_NO_ERROR);
2815
2816 std::set<std::string> deviceNames;
2817 for (uint32_t i = 0; i < numDevices; i++) {
2818 ANeuralNetworksDevice* device = nullptr;
2819 EXPECT_EQ(ANeuralNetworks_getDevice(i, &device), ANEURALNETWORKS_NO_ERROR);
2820 const char* buffer = nullptr;
2821 EXPECT_EQ(ANeuralNetworksDevice_getName(device, &buffer), ANEURALNETWORKS_NO_ERROR);
2822 std::string name(buffer);
2823 EXPECT_EQ(deviceNames.count(name), (uint32_t)0);
2824 deviceNames.insert(name);
2825 }
2826 }
2827
TEST(ValidationTestIntrospection,DeviceGetVersion)2828 TEST(ValidationTestIntrospection, DeviceGetVersion) {
2829 deviceStringCheck(ANeuralNetworksDevice_getVersion);
2830 }
2831
TEST(ValidationTestIntrospection,DeviceGetFeatureLevel)2832 TEST(ValidationTestIntrospection, DeviceGetFeatureLevel) {
2833 uint32_t numDevices = 0;
2834 EXPECT_EQ(ANeuralNetworks_getDeviceCount(&numDevices), ANEURALNETWORKS_NO_ERROR);
2835
2836 int64_t featureLevel;
2837 for (uint32_t i = 0; i < numDevices; i++) {
2838 SCOPED_TRACE(i);
2839 ANeuralNetworksDevice* device;
2840 EXPECT_EQ(ANeuralNetworks_getDevice(i, &device), ANEURALNETWORKS_NO_ERROR);
2841 EXPECT_EQ(ANeuralNetworksDevice_getFeatureLevel(device, &featureLevel),
2842 ANEURALNETWORKS_NO_ERROR);
2843 EXPECT_EQ(ANeuralNetworksDevice_getFeatureLevel(device, nullptr),
2844 ANEURALNETWORKS_UNEXPECTED_NULL);
2845 }
2846 EXPECT_EQ(ANeuralNetworksDevice_getFeatureLevel(nullptr, &featureLevel),
2847 ANEURALNETWORKS_UNEXPECTED_NULL);
2848 EXPECT_EQ(ANeuralNetworksDevice_getFeatureLevel(nullptr, nullptr),
2849 ANEURALNETWORKS_UNEXPECTED_NULL);
2850 }
2851
TEST(ValidationTestIntrospection,DeviceGetType)2852 TEST(ValidationTestIntrospection, DeviceGetType) {
2853 uint32_t numDevices = 0;
2854 EXPECT_EQ(ANeuralNetworks_getDeviceCount(&numDevices), ANEURALNETWORKS_NO_ERROR);
2855
2856 int32_t validTypes[] = {ANEURALNETWORKS_DEVICE_UNKNOWN, ANEURALNETWORKS_DEVICE_OTHER,
2857 ANEURALNETWORKS_DEVICE_CPU, ANEURALNETWORKS_DEVICE_GPU,
2858 ANEURALNETWORKS_DEVICE_ACCELERATOR};
2859 int32_t deviceType;
2860 for (uint32_t i = 0; i < numDevices; i++) {
2861 SCOPED_TRACE(i);
2862 // Initialize the deviceType to be an invalid type.
2863 deviceType = -1;
2864 ANeuralNetworksDevice* device;
2865 EXPECT_EQ(ANeuralNetworks_getDevice(i, &device), ANEURALNETWORKS_NO_ERROR);
2866 EXPECT_EQ(ANeuralNetworksDevice_getType(device, &deviceType), ANEURALNETWORKS_NO_ERROR);
2867 EXPECT_TRUE(std::find(std::begin(validTypes), std::end(validTypes), deviceType) !=
2868 std::end(validTypes));
2869 EXPECT_EQ(ANeuralNetworksDevice_getType(device, nullptr), ANEURALNETWORKS_UNEXPECTED_NULL);
2870 }
2871 EXPECT_EQ(ANeuralNetworksDevice_getType(nullptr, &deviceType), ANEURALNETWORKS_UNEXPECTED_NULL);
2872 EXPECT_EQ(ANeuralNetworksDevice_getType(nullptr, nullptr), ANEURALNETWORKS_UNEXPECTED_NULL);
2873 }
2874
TEST(ValidationTestIntrospection,DeviceWait)2875 TEST(ValidationTestIntrospection, DeviceWait) {
2876 uint32_t numDevices = 0;
2877 EXPECT_EQ(ANeuralNetworks_getDeviceCount(&numDevices), ANEURALNETWORKS_NO_ERROR);
2878
2879 for (uint32_t i = 0; i < numDevices; i++) {
2880 SCOPED_TRACE(i);
2881 ANeuralNetworksDevice* device;
2882 EXPECT_EQ(ANeuralNetworks_getDevice(i, &device), ANEURALNETWORKS_NO_ERROR);
2883 EXPECT_EQ(ANeuralNetworksDevice_wait(device), ANEURALNETWORKS_NO_ERROR);
2884 }
2885 EXPECT_EQ(ANeuralNetworksDevice_wait(nullptr), ANEURALNETWORKS_UNEXPECTED_NULL);
2886 }
2887
2888 class ValidationTestCompilationForDevices_1 : public ValidationTestModel {
2889 protected:
SetUp()2890 virtual void SetUp() override {
2891 ValidationTestModel::SetUp();
2892 createModel();
2893
2894 uint32_t numDevices = 0;
2895 EXPECT_EQ(ANeuralNetworks_getDeviceCount(&numDevices), ANEURALNETWORKS_NO_ERROR);
2896
2897 if (numDevices > 0) {
2898 EXPECT_EQ(ANeuralNetworks_getDevice(0, &mDevice), ANEURALNETWORKS_NO_ERROR);
2899 bool supported = false;
2900 ASSERT_EQ(mNumOperations, static_cast<uint32_t>(1));
2901 EXPECT_EQ(ANeuralNetworksModel_getSupportedOperationsForDevices(mModel, &mDevice, 1,
2902 &supported),
2903 ANEURALNETWORKS_NO_ERROR);
2904 if (supported) {
2905 ASSERT_EQ(ANeuralNetworksCompilation_createForDevices(mModel, &mDevice, 1,
2906 &mCompilation),
2907 ANEURALNETWORKS_NO_ERROR);
2908 }
2909 }
2910 }
2911
TearDown()2912 virtual void TearDown() {
2913 ANeuralNetworksCompilation_free(mCompilation);
2914 ValidationTestModel::TearDown();
2915 }
2916
2917 ANeuralNetworksDevice* mDevice = nullptr;
2918 ANeuralNetworksCompilation* mCompilation = nullptr;
2919 };
2920
2921 // Also see TEST_F(ValidationTestCompilation, SetPreference)
TEST_F(ValidationTestCompilationForDevices_1,SetPreference)2922 TEST_F(ValidationTestCompilationForDevices_1, SetPreference) {
2923 EXPECT_EQ(ANeuralNetworksCompilation_setPreference(nullptr, ANEURALNETWORKS_PREFER_LOW_POWER),
2924 ANEURALNETWORKS_UNEXPECTED_NULL);
2925 if (!mCompilation) {
2926 return;
2927 }
2928 EXPECT_EQ(ANeuralNetworksCompilation_setPreference(mCompilation, 40), ANEURALNETWORKS_BAD_DATA);
2929 }
2930
2931 // Also see TEST_F(ValidationTestCompilation, SetCaching)
TEST_F(ValidationTestCompilationForDevices_1,SetCaching)2932 TEST_F(ValidationTestCompilationForDevices_1, SetCaching) {
2933 std::vector<uint8_t> token(ANEURALNETWORKS_BYTE_SIZE_OF_CACHE_TOKEN, 0);
2934 EXPECT_EQ(ANeuralNetworksCompilation_setCaching(nullptr, NN_TMP_DIR, token.data()),
2935 ANEURALNETWORKS_UNEXPECTED_NULL);
2936 if (!mCompilation) {
2937 return;
2938 }
2939 EXPECT_EQ(ANeuralNetworksCompilation_setCaching(mCompilation, nullptr, token.data()),
2940 ANEURALNETWORKS_UNEXPECTED_NULL);
2941 EXPECT_EQ(ANeuralNetworksCompilation_setCaching(mCompilation, NN_TMP_DIR, nullptr),
2942 ANEURALNETWORKS_UNEXPECTED_NULL);
2943 }
2944
2945 // Also see TEST_F(ValidationTestCompilation, CreateExecution)
TEST_F(ValidationTestCompilationForDevices_1,CreateExecution)2946 TEST_F(ValidationTestCompilationForDevices_1, CreateExecution) {
2947 ANeuralNetworksExecution* execution = nullptr;
2948 EXPECT_EQ(ANeuralNetworksExecution_create(nullptr, &execution),
2949 ANEURALNETWORKS_UNEXPECTED_NULL);
2950 if (!mCompilation) {
2951 return;
2952 }
2953 EXPECT_EQ(ANeuralNetworksExecution_create(mCompilation, nullptr),
2954 ANEURALNETWORKS_UNEXPECTED_NULL);
2955 EXPECT_EQ(ANeuralNetworksExecution_create(mCompilation, &execution), ANEURALNETWORKS_BAD_STATE);
2956 }
2957
2958 // Also see TEST_F(ValidationTestCompilation, Finish)
TEST_F(ValidationTestCompilationForDevices_1,Finish)2959 TEST_F(ValidationTestCompilationForDevices_1, Finish) {
2960 EXPECT_EQ(ANeuralNetworksCompilation_finish(nullptr), ANEURALNETWORKS_UNEXPECTED_NULL);
2961 if (!mCompilation) {
2962 return;
2963 }
2964 EXPECT_EQ(ANeuralNetworksCompilation_finish(mCompilation), ANEURALNETWORKS_NO_ERROR);
2965 EXPECT_EQ(ANeuralNetworksCompilation_setPreference(mCompilation,
2966 ANEURALNETWORKS_PREFER_FAST_SINGLE_ANSWER),
2967 ANEURALNETWORKS_BAD_STATE);
2968 EXPECT_EQ(
2969 ANeuralNetworksCompilation_setPriority(mCompilation, ANEURALNETWORKS_PRIORITY_DEFAULT),
2970 ANEURALNETWORKS_BAD_STATE);
2971 EXPECT_EQ(ANeuralNetworksCompilation_setTimeout(mCompilation, kShortWaitInNanoseconds),
2972 ANEURALNETWORKS_BAD_STATE);
2973 std::vector<uint8_t> token(ANEURALNETWORKS_BYTE_SIZE_OF_CACHE_TOKEN, 0);
2974 EXPECT_EQ(ANeuralNetworksCompilation_setCaching(mCompilation, NN_TMP_DIR, token.data()),
2975 ANEURALNETWORKS_BAD_STATE);
2976 EXPECT_EQ(ANeuralNetworksCompilation_finish(mCompilation), ANEURALNETWORKS_BAD_STATE);
2977 }
2978
2979 // Also see TEST_F(ValidationTestCompilation, SetTimeout)
2980 // Also see TEST_F(ValidationTestCompilationForDevices_2, SetTimeout)
TEST_F(ValidationTestCompilationForDevices_1,SetTimeout)2981 TEST_F(ValidationTestCompilationForDevices_1, SetTimeout) {
2982 if (!mCompilation) {
2983 return;
2984 }
2985
2986 EXPECT_EQ(ANeuralNetworksCompilation_setTimeout(mCompilation, kShortWaitInNanoseconds),
2987 ANEURALNETWORKS_NO_ERROR);
2988
2989 // Attempt to finish
2990 const int n = ANeuralNetworksCompilation_finish(mCompilation);
2991 EXPECT_TRUE(n == ANEURALNETWORKS_NO_ERROR || n == ANEURALNETWORKS_MISSED_DEADLINE_TRANSIENT ||
2992 n == ANEURALNETWORKS_MISSED_DEADLINE_PERSISTENT);
2993 }
2994
TEST_F(ValidationTestCompilationForDevices_1,SetTimeoutMaximum)2995 TEST_F(ValidationTestCompilationForDevices_1, SetTimeoutMaximum) {
2996 if (!mCompilation) {
2997 return;
2998 }
2999
3000 constexpr uint64_t duration = std::numeric_limits<uint64_t>::max();
3001 EXPECT_EQ(ANeuralNetworksCompilation_setTimeout(mCompilation, duration),
3002 ANEURALNETWORKS_NO_ERROR);
3003 EXPECT_EQ(ANeuralNetworksCompilation_finish(mCompilation), ANEURALNETWORKS_NO_ERROR);
3004 }
3005
3006 class ValidationTestCompilationForDevices_2 : public ValidationTestModel {
3007 protected:
SetUp()3008 virtual void SetUp() override {
3009 ValidationTestModel::SetUp();
3010 createModel();
3011
3012 uint32_t numDevices = 0;
3013 EXPECT_EQ(ANeuralNetworks_getDeviceCount(&numDevices), ANEURALNETWORKS_NO_ERROR);
3014
3015 if (numDevices > 1) {
3016 EXPECT_EQ(ANeuralNetworks_getDevice(0, &mDevices[0]), ANEURALNETWORKS_NO_ERROR);
3017 EXPECT_EQ(ANeuralNetworks_getDevice(1, &mDevices[1]), ANEURALNETWORKS_NO_ERROR);
3018 bool supported = false;
3019 ASSERT_EQ(mNumOperations, static_cast<uint32_t>(1));
3020 EXPECT_EQ(ANeuralNetworksModel_getSupportedOperationsForDevices(mModel, mDevices, 2,
3021 &supported),
3022 ANEURALNETWORKS_NO_ERROR);
3023 if (supported) {
3024 ASSERT_EQ(ANeuralNetworksCompilation_createForDevices(mModel, mDevices, 2,
3025 &mCompilation),
3026 ANEURALNETWORKS_NO_ERROR);
3027 }
3028 }
3029 }
3030
TearDown()3031 virtual void TearDown() {
3032 ANeuralNetworksCompilation_free(mCompilation);
3033 ValidationTestModel::TearDown();
3034 }
3035
3036 ANeuralNetworksDevice* mDevices[2] = {nullptr, nullptr};
3037 ANeuralNetworksCompilation* mCompilation = nullptr;
3038 };
3039
3040 // Also see TEST_F(ValidationTestCompilation, SetTimeout)
3041 // Also see TEST_F(ValidationTestCompilationForDevices_1, SetTimeout)
TEST_F(ValidationTestCompilationForDevices_2,SetTimeout)3042 TEST_F(ValidationTestCompilationForDevices_2, SetTimeout) {
3043 EXPECT_EQ(ANeuralNetworksCompilation_setTimeout(nullptr, kShortWaitInNanoseconds),
3044 ANEURALNETWORKS_UNEXPECTED_NULL);
3045 if (!mCompilation) {
3046 return;
3047 }
3048 // Timeouts can only be set on Compilations created from CompilationForDevices with one device
3049 // specified.
3050 EXPECT_EQ(ANeuralNetworksCompilation_setTimeout(mCompilation, kShortWaitInNanoseconds),
3051 ANEURALNETWORKS_BAD_DATA);
3052 }
3053
3054 // Also see TEST_F(ValidationTestCompilation, ExecutionSetTimeout)
3055 // Also see TEST_F(ValidationTestCompilationForDevices_1, ExecutionSetTimeout)
TEST_F(ValidationTestCompilationForDevices_2,ExecutionSetTimeout)3056 TEST_F(ValidationTestCompilationForDevices_2, ExecutionSetTimeout) {
3057 EXPECT_EQ(ANeuralNetworksExecution_setTimeout(nullptr, kShortWaitInNanoseconds),
3058 ANEURALNETWORKS_UNEXPECTED_NULL);
3059
3060 if (!mCompilation) {
3061 return;
3062 }
3063 ASSERT_EQ(ANeuralNetworksCompilation_finish(mCompilation), ANEURALNETWORKS_NO_ERROR);
3064 ANeuralNetworksExecution* execution;
3065 ASSERT_EQ(ANeuralNetworksExecution_create(mCompilation, &execution), ANEURALNETWORKS_NO_ERROR);
3066 // Timeouts can only be set on Compilations created from CompilationForDevices with one device
3067 // specified.
3068 EXPECT_EQ(ANeuralNetworksExecution_setTimeout(execution, kShortWaitInNanoseconds),
3069 ANEURALNETWORKS_BAD_DATA);
3070 ANeuralNetworksExecution_free(execution);
3071 }
3072
3073 // Also see TEST_F(ValidationTestCompilation, ExecutionTiming)
3074 // Also see TEST_F(ValidationTestCompilationForDevices_1, ExecutionTiming)
TEST_F(ValidationTestCompilationForDevices_2,ExecutionTiming)3075 TEST_F(ValidationTestCompilationForDevices_2, ExecutionTiming) {
3076 if (!mCompilation) {
3077 return;
3078 }
3079 ASSERT_EQ(ANeuralNetworksCompilation_finish(mCompilation), ANEURALNETWORKS_NO_ERROR);
3080 ANeuralNetworksExecution* execution;
3081 ASSERT_EQ(ANeuralNetworksExecution_create(mCompilation, &execution), ANEURALNETWORKS_NO_ERROR);
3082 // Cannot setMeasureTiming() if there are two or more devices.
3083 EXPECT_EQ(ANeuralNetworksExecution_setMeasureTiming(execution, false),
3084 ANEURALNETWORKS_BAD_DATA);
3085 EXPECT_EQ(ANeuralNetworksExecution_setMeasureTiming(execution, true), ANEURALNETWORKS_BAD_DATA);
3086
3087 // close memory
3088 ANeuralNetworksExecution_free(execution);
3089 }
3090
3091 class ValidationTestInvalidCompilation : public ValidationTestModel {
3092 protected:
SetUp()3093 virtual void SetUp() override {
3094 ValidationTestModel::SetUp();
3095
3096 // Create a model with an OEM operation
3097 uint32_t dimensions[]{1};
3098 ANeuralNetworksOperandType OEMTensorType{.type = ANEURALNETWORKS_TENSOR_OEM_BYTE,
3099 .dimensionCount = 1,
3100 .dimensions = dimensions};
3101 EXPECT_EQ(ANeuralNetworksModel_addOperand(mModel, &OEMTensorType),
3102 ANEURALNETWORKS_NO_ERROR);
3103 EXPECT_EQ(ANeuralNetworksModel_addOperand(mModel, &OEMTensorType),
3104 ANEURALNETWORKS_NO_ERROR);
3105 ASSERT_EQ(addOperation(ANEURALNETWORKS_OEM_OPERATION, {0}, {1}), ANEURALNETWORKS_NO_ERROR);
3106 ASSERT_EQ(identifyInputsAndOutputs({0}, {1}), ANEURALNETWORKS_NO_ERROR);
3107 ASSERT_EQ(modelFinish(), ANEURALNETWORKS_NO_ERROR);
3108
3109 // Find a device that cannot handle OEM operation and create compilation on that
3110 uint32_t numDevices = 0;
3111 EXPECT_EQ(ANeuralNetworks_getDeviceCount(&numDevices), ANEURALNETWORKS_NO_ERROR);
3112 for (uint32_t i = 0; i < numDevices; i++) {
3113 ANeuralNetworksDevice* device;
3114 EXPECT_EQ(ANeuralNetworks_getDevice(i, &device), ANEURALNETWORKS_NO_ERROR);
3115 bool supported = false;
3116 EXPECT_EQ(ANeuralNetworksModel_getSupportedOperationsForDevices(mModel, &device, 1,
3117 &supported),
3118 ANEURALNETWORKS_NO_ERROR);
3119 if (!supported) {
3120 ASSERT_EQ(ANeuralNetworksCompilation_createForDevices(mModel, &device, 1,
3121 &mInvalidCompilation),
3122 ANEURALNETWORKS_NO_ERROR);
3123 break;
3124 }
3125 }
3126 if (mInvalidCompilation) {
3127 ASSERT_EQ(ANeuralNetworksCompilation_finish(mInvalidCompilation),
3128 ANEURALNETWORKS_BAD_DATA);
3129 }
3130 }
3131
TearDown()3132 virtual void TearDown() {
3133 ANeuralNetworksCompilation_free(mInvalidCompilation);
3134 ValidationTestModel::TearDown();
3135 }
3136
3137 ANeuralNetworksCompilation* mInvalidCompilation = nullptr;
3138 };
3139
TEST_F(ValidationTestInvalidCompilation,GetPreferredMemoryAlignmentAndPadding)3140 TEST_F(ValidationTestInvalidCompilation, GetPreferredMemoryAlignmentAndPadding) {
3141 if (__builtin_available(android __NNAPI_FL5_MIN_ANDROID_API__, *)) {
3142 if (!mInvalidCompilation) {
3143 return;
3144 }
3145 uint32_t result;
3146 EXPECT_EQ(ANeuralNetworksCompilation_getPreferredMemoryAlignmentForInput(
3147 mInvalidCompilation, 0, &result),
3148 ANEURALNETWORKS_BAD_STATE);
3149 EXPECT_EQ(ANeuralNetworksCompilation_getPreferredMemoryPaddingForInput(mInvalidCompilation,
3150 0, &result),
3151 ANEURALNETWORKS_BAD_STATE);
3152 EXPECT_EQ(ANeuralNetworksCompilation_getPreferredMemoryAlignmentForOutput(
3153 mInvalidCompilation, 0, &result),
3154 ANEURALNETWORKS_BAD_STATE);
3155 EXPECT_EQ(ANeuralNetworksCompilation_getPreferredMemoryPaddingForOutput(mInvalidCompilation,
3156 0, &result),
3157 ANEURALNETWORKS_BAD_STATE);
3158 } else {
3159 GTEST_SKIP();
3160 }
3161 }
3162
TEST_F(ValidationTestInvalidCompilation,CreateExecution)3163 TEST_F(ValidationTestInvalidCompilation, CreateExecution) {
3164 if (!mInvalidCompilation) {
3165 return;
3166 }
3167 ANeuralNetworksExecution* execution = nullptr;
3168 EXPECT_EQ(ANeuralNetworksExecution_create(mInvalidCompilation, &execution),
3169 ANEURALNETWORKS_BAD_STATE);
3170 ANeuralNetworksExecution_free(execution);
3171 }
3172
TEST_F(ValidationTestInvalidCompilation,MemoryDescAddRole)3173 TEST_F(ValidationTestInvalidCompilation, MemoryDescAddRole) {
3174 if (!mInvalidCompilation) {
3175 return;
3176 }
3177 ANeuralNetworksMemoryDesc* desc = nullptr;
3178 ASSERT_EQ(ANeuralNetworksMemoryDesc_create(&desc), ANEURALNETWORKS_NO_ERROR);
3179 EXPECT_EQ(ANeuralNetworksMemoryDesc_addInputRole(desc, mInvalidCompilation, 0, 1.0f),
3180 ANEURALNETWORKS_BAD_DATA);
3181 EXPECT_EQ(ANeuralNetworksMemoryDesc_addOutputRole(desc, mInvalidCompilation, 0, 1.0f),
3182 ANEURALNETWORKS_BAD_DATA);
3183 ANeuralNetworksMemoryDesc_free(desc);
3184 }
3185
3186 // Also see TEST_F(ValidationTestCompilation, ExecutionTiming)
3187 // Also see TEST_F(ValidationTestCompilationForDevices_2, ExecutionTiming)
3188 // Also see TEST_F(ValidationTestCompilation, ExecutionUsability)
TEST_F(ValidationTestCompilationForDevices_1,ExecutionTiming)3189 TEST_F(ValidationTestCompilationForDevices_1, ExecutionTiming) {
3190 if (!mCompilation) {
3191 return;
3192 }
3193 ASSERT_EQ(ANeuralNetworksCompilation_finish(mCompilation), ANEURALNETWORKS_NO_ERROR);
3194
3195 enum class ExecutionType : uint32_t { ASYNC, SYNC, BURST, FENCED };
3196 for (auto executionType :
3197 {ExecutionType::ASYNC, ExecutionType::SYNC, ExecutionType::BURST, ExecutionType::FENCED}) {
3198 SCOPED_TRACE(static_cast<uint32_t>(executionType));
3199
3200 ANeuralNetworksExecution* execution;
3201 ASSERT_EQ(ANeuralNetworksExecution_create(mCompilation, &execution),
3202 ANEURALNETWORKS_NO_ERROR);
3203
3204 EXPECT_EQ(ANeuralNetworksExecution_setMeasureTiming(nullptr, false),
3205 ANEURALNETWORKS_UNEXPECTED_NULL);
3206 EXPECT_EQ(ANeuralNetworksExecution_setMeasureTiming(nullptr, true),
3207 ANEURALNETWORKS_UNEXPECTED_NULL);
3208 EXPECT_EQ(ANeuralNetworksExecution_setMeasureTiming(execution, false),
3209 ANEURALNETWORKS_NO_ERROR);
3210 EXPECT_EQ(ANeuralNetworksExecution_setMeasureTiming(execution, true),
3211 ANEURALNETWORKS_NO_ERROR);
3212
3213 float in0[] = {0.0f, 0.0f}, in1[] = {1.0f, 1.0f}, out0[2];
3214 int in2 = 0;
3215 ASSERT_EQ(ANeuralNetworksExecution_setInput(execution, 0, nullptr, &in0, sizeof(in0)),
3216 ANEURALNETWORKS_NO_ERROR);
3217 ASSERT_EQ(ANeuralNetworksExecution_setInput(execution, 1, nullptr, &in1, sizeof(in1)),
3218 ANEURALNETWORKS_NO_ERROR);
3219 ASSERT_EQ(ANeuralNetworksExecution_setInput(execution, 2, nullptr, &in2, sizeof(in2)),
3220 ANEURALNETWORKS_NO_ERROR);
3221 ASSERT_EQ(ANeuralNetworksExecution_setOutput(execution, 0, nullptr, &out0, sizeof(out0)),
3222 ANEURALNETWORKS_NO_ERROR);
3223
3224 // Cannot getDuration until the execution has finished.
3225 uint64_t duration;
3226 EXPECT_EQ(ANeuralNetworksExecution_getDuration(
3227 execution, ANEURALNETWORKS_DURATION_ON_HARDWARE, &duration),
3228 ANEURALNETWORKS_BAD_STATE);
3229 EXPECT_EQ(ANeuralNetworksExecution_getDuration(
3230 execution, ANEURALNETWORKS_DURATION_IN_DRIVER, &duration),
3231 ANEURALNETWORKS_BAD_STATE);
3232
3233 auto testSetTimeoutTooLate = [execution] {
3234 // Cannot setTimeout if the execution has started.
3235 EXPECT_EQ(ANeuralNetworksExecution_setTimeout(execution, kShortWaitInNanoseconds),
3236 ANEURALNETWORKS_BAD_STATE);
3237 };
3238
3239 auto testMeasureTooLate = [execution] {
3240 // Cannot setMeasureTiming if the execution has started.
3241 EXPECT_EQ(ANeuralNetworksExecution_setMeasureTiming(execution, false),
3242 ANEURALNETWORKS_BAD_STATE);
3243 EXPECT_EQ(ANeuralNetworksExecution_setMeasureTiming(execution, true),
3244 ANEURALNETWORKS_BAD_STATE);
3245 };
3246
3247 // Compute.
3248 switch (executionType) {
3249 case ExecutionType::ASYNC: {
3250 ANeuralNetworksEvent* event;
3251 ASSERT_EQ(ANeuralNetworksExecution_startCompute(execution, &event),
3252 ANEURALNETWORKS_NO_ERROR);
3253 testMeasureTooLate();
3254 ASSERT_EQ(ANeuralNetworksEvent_wait(event), ANEURALNETWORKS_NO_ERROR);
3255 testSetTimeoutTooLate();
3256 testMeasureTooLate();
3257 ANeuralNetworksEvent_free(event);
3258 break;
3259 }
3260 case ExecutionType::SYNC: {
3261 ASSERT_EQ(ANeuralNetworksExecution_compute(execution), ANEURALNETWORKS_NO_ERROR);
3262 testSetTimeoutTooLate();
3263 testMeasureTooLate();
3264 break;
3265 }
3266 case ExecutionType::BURST: {
3267 ANeuralNetworksBurst* burst;
3268 ASSERT_EQ(ANeuralNetworksBurst_create(mCompilation, &burst),
3269 ANEURALNETWORKS_NO_ERROR);
3270 ASSERT_EQ(ANeuralNetworksExecution_burstCompute(execution, burst),
3271 ANEURALNETWORKS_NO_ERROR);
3272 testSetTimeoutTooLate();
3273 testMeasureTooLate();
3274 ANeuralNetworksBurst_free(burst);
3275 break;
3276 }
3277 case ExecutionType::FENCED: {
3278 ANeuralNetworksEvent* event = nullptr;
3279 ASSERT_EQ(ANeuralNetworksExecution_startComputeWithDependencies(execution, nullptr,
3280 0, 0, &event),
3281 ANEURALNETWORKS_NO_ERROR);
3282 testMeasureTooLate();
3283 ASSERT_EQ(ANeuralNetworksEvent_wait(event), ANEURALNETWORKS_NO_ERROR);
3284 testSetTimeoutTooLate();
3285 testMeasureTooLate();
3286 ANeuralNetworksEvent_free(event);
3287 break;
3288 }
3289 default:
3290 FAIL() << "Unreachable";
3291 }
3292
3293 auto testDuration = [](ANeuralNetworksExecution* e, int32_t durationCode,
3294 bool nullDuration) {
3295 SCOPED_TRACE(e);
3296 SCOPED_TRACE(durationCode);
3297 SCOPED_TRACE(nullDuration);
3298
3299 // Strictly speaking, a duration COULD have this value, but it is
3300 // exceedingly unlikely. We'll use it as an initial value that we expect
3301 // to be modified by getDuration().
3302 const uint64_t kBogusDuration = UINT64_MAX - 1;
3303
3304 uint64_t duration = kBogusDuration;
3305 uint64_t* durationPtr = nullDuration ? nullptr : &duration;
3306
3307 int expectedResultCode = ANEURALNETWORKS_NO_ERROR;
3308 if (e == nullptr | durationPtr == nullptr) {
3309 expectedResultCode = ANEURALNETWORKS_UNEXPECTED_NULL;
3310 } else if (durationCode < 0 ||
3311 durationCode > ANEURALNETWORKS_FENCED_DURATION_IN_DRIVER) {
3312 expectedResultCode = ANEURALNETWORKS_BAD_DATA;
3313 }
3314
3315 EXPECT_EQ(ANeuralNetworksExecution_getDuration(e, durationCode, durationPtr),
3316 expectedResultCode);
3317 if (expectedResultCode == ANEURALNETWORKS_NO_ERROR) {
3318 EXPECT_NE(duration, kBogusDuration);
3319 }
3320 };
3321
3322 std::vector<ANeuralNetworksExecution*> executions = {nullptr, execution};
3323 std::vector<int32_t> durationCodes = {-1,
3324 ANEURALNETWORKS_DURATION_ON_HARDWARE,
3325 ANEURALNETWORKS_DURATION_IN_DRIVER,
3326 ANEURALNETWORKS_FENCED_DURATION_ON_HARDWARE,
3327 ANEURALNETWORKS_FENCED_DURATION_IN_DRIVER,
3328 ANEURALNETWORKS_FENCED_DURATION_IN_DRIVER + 1};
3329 std::vector<bool> nullDurations = {false, true};
3330 for (auto e : executions) {
3331 for (auto d : durationCodes) {
3332 for (auto n : nullDurations) {
3333 testDuration(e, d, n);
3334 }
3335 }
3336 }
3337
3338 // close memory
3339 ANeuralNetworksExecution_free(execution);
3340 }
3341 }
3342
3343 enum class TimeoutDurationType { SHORT, MAXIMUM };
createTimeoutDuration(TimeoutDurationType type)3344 uint64_t createTimeoutDuration(TimeoutDurationType type) {
3345 switch (type) {
3346 case TimeoutDurationType::SHORT:
3347 return kShortWaitInNanoseconds;
3348 case TimeoutDurationType::MAXIMUM:
3349 return std::numeric_limits<uint64_t>::max();
3350 }
3351 LOG(FATAL) << "Invalid TimeoutDurationType: " << static_cast<int>(type);
3352 return 0;
3353 }
3354
runExecutionSetTimeoutTest(ANeuralNetworksCompilation * compilation,TimeoutDurationType timeoutDurationType)3355 void runExecutionSetTimeoutTest(ANeuralNetworksCompilation* compilation,
3356 TimeoutDurationType timeoutDurationType) {
3357 if (!compilation) {
3358 return;
3359 }
3360 ASSERT_EQ(ANeuralNetworksCompilation_finish(compilation), ANEURALNETWORKS_NO_ERROR);
3361
3362 enum class ExecutionType : uint32_t { ASYNC, SYNC, BURST, FENCED };
3363 for (auto executionType :
3364 {ExecutionType::ASYNC, ExecutionType::SYNC, ExecutionType::BURST, ExecutionType::FENCED}) {
3365 SCOPED_TRACE(static_cast<uint32_t>(executionType));
3366
3367 ANeuralNetworksExecution* execution;
3368 ASSERT_EQ(ANeuralNetworksExecution_create(compilation, &execution),
3369 ANEURALNETWORKS_NO_ERROR);
3370 const auto scoped = android::base::make_scope_guard(
3371 [execution] { ANeuralNetworksExecution_free(execution); });
3372
3373 float in0[] = {0.0f, 0.0f}, in1[] = {1.0f, 1.0f}, out0[2];
3374 int in2 = 0;
3375 ASSERT_EQ(ANeuralNetworksExecution_setInput(execution, 0, nullptr, &in0, sizeof(in0)),
3376 ANEURALNETWORKS_NO_ERROR);
3377 ASSERT_EQ(ANeuralNetworksExecution_setInput(execution, 1, nullptr, &in1, sizeof(in1)),
3378 ANEURALNETWORKS_NO_ERROR);
3379 ASSERT_EQ(ANeuralNetworksExecution_setInput(execution, 2, nullptr, &in2, sizeof(in2)),
3380 ANEURALNETWORKS_NO_ERROR);
3381 ASSERT_EQ(ANeuralNetworksExecution_setOutput(execution, 0, nullptr, &out0, sizeof(out0)),
3382 ANEURALNETWORKS_NO_ERROR);
3383
3384 const uint64_t timeoutDuration = createTimeoutDuration(timeoutDurationType);
3385 EXPECT_EQ(ANeuralNetworksExecution_setTimeout(execution, timeoutDuration),
3386 ANEURALNETWORKS_NO_ERROR);
3387
3388 const auto checkResult = [timeoutDurationType](int n) {
3389 switch (timeoutDurationType) {
3390 case TimeoutDurationType::SHORT:
3391 EXPECT_TRUE(n == ANEURALNETWORKS_NO_ERROR ||
3392 n == ANEURALNETWORKS_MISSED_DEADLINE_TRANSIENT ||
3393 n == ANEURALNETWORKS_MISSED_DEADLINE_PERSISTENT);
3394 return;
3395 case TimeoutDurationType::MAXIMUM:
3396 EXPECT_EQ(n, ANEURALNETWORKS_NO_ERROR);
3397 return;
3398 }
3399 LOG(FATAL) << "Invalid TimeoutDurationType: " << static_cast<int>(timeoutDurationType);
3400 };
3401
3402 // Compute.
3403 switch (executionType) {
3404 case ExecutionType::ASYNC: {
3405 ANeuralNetworksEvent* event = nullptr;
3406 EXPECT_EQ(ANeuralNetworksExecution_startCompute(execution, &event),
3407 ANEURALNETWORKS_NO_ERROR);
3408 checkResult(ANeuralNetworksEvent_wait(event));
3409 ANeuralNetworksEvent_free(event);
3410 break;
3411 }
3412 case ExecutionType::SYNC: {
3413 checkResult(ANeuralNetworksExecution_compute(execution));
3414 break;
3415 }
3416 case ExecutionType::BURST: {
3417 ANeuralNetworksBurst* burst;
3418 ASSERT_EQ(ANeuralNetworksBurst_create(compilation, &burst),
3419 ANEURALNETWORKS_NO_ERROR);
3420 checkResult(ANeuralNetworksExecution_burstCompute(execution, burst));
3421 ANeuralNetworksBurst_free(burst);
3422 break;
3423 }
3424 case ExecutionType::FENCED: {
3425 ANeuralNetworksEvent* event = nullptr;
3426 EXPECT_EQ(ANeuralNetworksExecution_startComputeWithDependencies(execution, nullptr,
3427 0, 0, &event),
3428 ANEURALNETWORKS_NO_ERROR);
3429 checkResult(ANeuralNetworksEvent_wait(event));
3430 ANeuralNetworksEvent_free(event);
3431 break;
3432 }
3433 default:
3434 FAIL() << "Unreachable";
3435 }
3436 }
3437 }
3438
3439 // Also see TEST_F(ValidationTestCompilation, ExecutionSetTimeout)
3440 // Also see TEST_F(ValidationTestCompilationForDevices_2, ExecutionSetTimeout)
TEST_F(ValidationTestCompilationForDevices_1,ExecutionSetTimeout)3441 TEST_F(ValidationTestCompilationForDevices_1, ExecutionSetTimeout) {
3442 runExecutionSetTimeoutTest(mCompilation, TimeoutDurationType::SHORT);
3443 }
3444
TEST_F(ValidationTestCompilationForDevices_1,ExecutionSetTimeoutMaximum)3445 TEST_F(ValidationTestCompilationForDevices_1, ExecutionSetTimeoutMaximum) {
3446 runExecutionSetTimeoutTest(mCompilation, TimeoutDurationType::MAXIMUM);
3447 }
3448
TEST_F(ValidationTest,CreateMemoryDesc)3449 TEST_F(ValidationTest, CreateMemoryDesc) {
3450 EXPECT_EQ(ANeuralNetworksMemoryDesc_create(nullptr), ANEURALNETWORKS_UNEXPECTED_NULL);
3451 }
3452
TEST_F(ValidationTestMemoryDesc,AddInputRole)3453 TEST_F(ValidationTestMemoryDesc, AddInputRole) {
3454 EXPECT_EQ(ANeuralNetworksMemoryDesc_addInputRole(nullptr, mCompilation, 0, 1.0f),
3455 ANEURALNETWORKS_UNEXPECTED_NULL);
3456 EXPECT_EQ(ANeuralNetworksMemoryDesc_addInputRole(mDesc, nullptr, 0, 1.0f),
3457 ANEURALNETWORKS_UNEXPECTED_NULL);
3458
3459 // Unfinished compilation.
3460 EXPECT_EQ(ANeuralNetworksMemoryDesc_addInputRole(mDesc, mCompilation, 0, 1.0f),
3461 ANEURALNETWORKS_BAD_DATA);
3462
3463 ASSERT_EQ(ANeuralNetworksCompilation_finish(mCompilation), ANEURALNETWORKS_NO_ERROR);
3464
3465 // Index out of range.
3466 EXPECT_EQ(ANeuralNetworksMemoryDesc_addInputRole(mDesc, mCompilation, 999, 1.0f),
3467 ANEURALNETWORKS_BAD_DATA);
3468
3469 // Invalid frequency.
3470 EXPECT_EQ(ANeuralNetworksMemoryDesc_addInputRole(mDesc, mCompilation, 0, 10.0f),
3471 ANEURALNETWORKS_BAD_DATA);
3472 EXPECT_EQ(ANeuralNetworksMemoryDesc_addInputRole(mDesc, mCompilation, 0, 0.0f),
3473 ANEURALNETWORKS_BAD_DATA);
3474 EXPECT_EQ(ANeuralNetworksMemoryDesc_addInputRole(mDesc, mCompilation, 0, -1.0f),
3475 ANEURALNETWORKS_BAD_DATA);
3476
3477 // Specify the same operand twice.
3478 EXPECT_EQ(ANeuralNetworksMemoryDesc_addInputRole(mDesc, mCompilation, 0, 1.0f),
3479 ANEURALNETWORKS_NO_ERROR);
3480 EXPECT_EQ(ANeuralNetworksMemoryDesc_addInputRole(mDesc, mCompilation, 0, 1.0f),
3481 ANEURALNETWORKS_BAD_DATA);
3482
3483 // Attempting to modify a finished descriptor.
3484 EXPECT_EQ(ANeuralNetworksMemoryDesc_finish(mDesc), ANEURALNETWORKS_NO_ERROR);
3485 EXPECT_EQ(ANeuralNetworksMemoryDesc_addInputRole(mDesc, mCompilation, 0, 1.0f),
3486 ANEURALNETWORKS_BAD_STATE);
3487 }
3488
TEST_F(ValidationTestMemoryDesc,AddOutputRole)3489 TEST_F(ValidationTestMemoryDesc, AddOutputRole) {
3490 EXPECT_EQ(ANeuralNetworksMemoryDesc_addOutputRole(nullptr, mCompilation, 0, 1.0f),
3491 ANEURALNETWORKS_UNEXPECTED_NULL);
3492 EXPECT_EQ(ANeuralNetworksMemoryDesc_addOutputRole(mDesc, nullptr, 0, 1.0f),
3493 ANEURALNETWORKS_UNEXPECTED_NULL);
3494
3495 // Unfinished compilation.
3496 EXPECT_EQ(ANeuralNetworksMemoryDesc_addOutputRole(mDesc, mCompilation, 0, 1.0f),
3497 ANEURALNETWORKS_BAD_DATA);
3498
3499 ASSERT_EQ(ANeuralNetworksCompilation_finish(mCompilation), ANEURALNETWORKS_NO_ERROR);
3500
3501 // Index out of range.
3502 EXPECT_EQ(ANeuralNetworksMemoryDesc_addOutputRole(mDesc, mCompilation, 999, 1.0f),
3503 ANEURALNETWORKS_BAD_DATA);
3504
3505 // Invalid frequency.
3506 EXPECT_EQ(ANeuralNetworksMemoryDesc_addOutputRole(mDesc, mCompilation, 0, 10.0f),
3507 ANEURALNETWORKS_BAD_DATA);
3508 EXPECT_EQ(ANeuralNetworksMemoryDesc_addOutputRole(mDesc, mCompilation, 0, 0.0f),
3509 ANEURALNETWORKS_BAD_DATA);
3510 EXPECT_EQ(ANeuralNetworksMemoryDesc_addOutputRole(mDesc, mCompilation, 0, -1.0f),
3511 ANEURALNETWORKS_BAD_DATA);
3512
3513 // Specify the same operand twice.
3514 EXPECT_EQ(ANeuralNetworksMemoryDesc_addOutputRole(mDesc, mCompilation, 0, 1.0f),
3515 ANEURALNETWORKS_NO_ERROR);
3516 EXPECT_EQ(ANeuralNetworksMemoryDesc_addOutputRole(mDesc, mCompilation, 0, 1.0f),
3517 ANEURALNETWORKS_BAD_DATA);
3518
3519 // Attempting to modify a finished descriptor.
3520 EXPECT_EQ(ANeuralNetworksMemoryDesc_finish(mDesc), ANEURALNETWORKS_NO_ERROR);
3521 EXPECT_EQ(ANeuralNetworksMemoryDesc_addOutputRole(mDesc, mCompilation, 0, 1.0f),
3522 ANEURALNETWORKS_BAD_STATE);
3523 }
3524
3525 // Creates and compiles a single-operation ADD model with the given operand type.
3526 // The caller is responsible to free the returned model and compilation.
3527 static std::pair<ANeuralNetworksModel*, ANeuralNetworksCompilation*>
createAndCompileAddModelWithType(const ANeuralNetworksOperandType & type)3528 createAndCompileAddModelWithType(const ANeuralNetworksOperandType& type) {
3529 // OperandType for activation scalar.
3530 const ANeuralNetworksOperandType actType = {
3531 .type = ANEURALNETWORKS_INT32, .dimensionCount = 0, .dimensions = nullptr};
3532
3533 ANeuralNetworksModel* model;
3534 EXPECT_EQ(ANeuralNetworksModel_create(&model), ANEURALNETWORKS_NO_ERROR);
3535 EXPECT_EQ(ANeuralNetworksModel_addOperand(model, &type), ANEURALNETWORKS_NO_ERROR);
3536 EXPECT_EQ(ANeuralNetworksModel_addOperand(model, &type), ANEURALNETWORKS_NO_ERROR);
3537 EXPECT_EQ(ANeuralNetworksModel_addOperand(model, &actType), ANEURALNETWORKS_NO_ERROR);
3538 EXPECT_EQ(ANeuralNetworksModel_addOperand(model, &type), ANEURALNETWORKS_NO_ERROR);
3539
3540 const uint32_t inList[] = {0, 1, 2};
3541 const uint32_t outList[] = {3};
3542 EXPECT_EQ(ANeuralNetworksModel_addOperation(model, ANEURALNETWORKS_ADD, 3, inList, 1, outList),
3543 ANEURALNETWORKS_NO_ERROR);
3544 EXPECT_EQ(ANeuralNetworksModel_identifyInputsAndOutputs(model, 3, inList, 1, outList),
3545 ANEURALNETWORKS_NO_ERROR);
3546 EXPECT_EQ(ANeuralNetworksModel_finish(model), ANEURALNETWORKS_NO_ERROR);
3547
3548 ANeuralNetworksCompilation* compilation;
3549 EXPECT_EQ(ANeuralNetworksCompilation_create(model, &compilation), ANEURALNETWORKS_NO_ERROR);
3550 EXPECT_EQ(ANeuralNetworksCompilation_finish(compilation), ANEURALNETWORKS_NO_ERROR);
3551 return {model, compilation};
3552 }
3553
testIncompatibleOperands(const ANeuralNetworksCompilation * compilation,const ANeuralNetworksOperandType & badType)3554 static void testIncompatibleOperands(const ANeuralNetworksCompilation* compilation,
3555 const ANeuralNetworksOperandType& badType) {
3556 const auto [badModel, badCompilation] = createAndCompileAddModelWithType(badType);
3557 {
3558 ANeuralNetworksMemoryDesc* desc = nullptr;
3559 EXPECT_EQ(ANeuralNetworksMemoryDesc_create(&desc), ANEURALNETWORKS_NO_ERROR);
3560 EXPECT_EQ(ANeuralNetworksMemoryDesc_addInputRole(desc, compilation, 0, 1.0f),
3561 ANEURALNETWORKS_NO_ERROR);
3562 EXPECT_EQ(ANeuralNetworksMemoryDesc_addInputRole(desc, badCompilation, 0, 1.0f),
3563 ANEURALNETWORKS_BAD_DATA);
3564 EXPECT_EQ(ANeuralNetworksMemoryDesc_addOutputRole(desc, badCompilation, 0, 1.0f),
3565 ANEURALNETWORKS_BAD_DATA);
3566 ANeuralNetworksMemoryDesc_free(desc);
3567 }
3568 {
3569 ANeuralNetworksMemoryDesc* desc = nullptr;
3570 EXPECT_EQ(ANeuralNetworksMemoryDesc_create(&desc), ANEURALNETWORKS_NO_ERROR);
3571 EXPECT_EQ(ANeuralNetworksMemoryDesc_addOutputRole(desc, compilation, 0, 1.0f),
3572 ANEURALNETWORKS_NO_ERROR);
3573 EXPECT_EQ(ANeuralNetworksMemoryDesc_addInputRole(desc, badCompilation, 0, 1.0f),
3574 ANEURALNETWORKS_BAD_DATA);
3575 EXPECT_EQ(ANeuralNetworksMemoryDesc_addOutputRole(desc, badCompilation, 0, 1.0f),
3576 ANEURALNETWORKS_BAD_DATA);
3577 ANeuralNetworksMemoryDesc_free(desc);
3578 }
3579 ANeuralNetworksCompilation_free(badCompilation);
3580 ANeuralNetworksModel_free(badModel);
3581 }
3582
TEST_F(ValidationTestMemoryDesc,OperandMetadata)3583 TEST_F(ValidationTestMemoryDesc, OperandMetadata) {
3584 const uint32_t dimensions[] = {2};
3585 const uint32_t rank = std::size(dimensions);
3586 const ANeuralNetworksOperandType floatBase = {.type = ANEURALNETWORKS_TENSOR_FLOAT32,
3587 .dimensionCount = rank,
3588 .dimensions = dimensions,
3589 .scale = 0.0f,
3590 .zeroPoint = 0};
3591 const ANeuralNetworksOperandType quantBase = {.type = ANEURALNETWORKS_TENSOR_QUANT8_ASYMM,
3592 .dimensionCount = rank,
3593 .dimensions = dimensions,
3594 .scale = 1.0f,
3595 .zeroPoint = 0};
3596 const auto [floatModel, floatCompilation] = createAndCompileAddModelWithType(floatBase);
3597 const auto [quantModel, quantCompilation] = createAndCompileAddModelWithType(quantBase);
3598
3599 // Different data type.
3600 {
3601 SCOPED_TRACE("Data type");
3602 ANeuralNetworksOperandType wrongType = floatBase;
3603 wrongType.type = ANEURALNETWORKS_TENSOR_FLOAT16;
3604 testIncompatibleOperands(floatCompilation, wrongType);
3605 }
3606
3607 // Different scale.
3608 {
3609 SCOPED_TRACE("Scale");
3610 ANeuralNetworksOperandType wrongScale = quantBase;
3611 wrongScale.scale = 0.5f;
3612 testIncompatibleOperands(quantCompilation, wrongScale);
3613 }
3614
3615 // Different zero point.
3616 {
3617 SCOPED_TRACE("Zero point");
3618 ANeuralNetworksOperandType wrongZeroPoint = quantBase;
3619 wrongZeroPoint.zeroPoint = 128;
3620 testIncompatibleOperands(quantCompilation, wrongZeroPoint);
3621 }
3622
3623 // Different rank.
3624 {
3625 SCOPED_TRACE("Rank");
3626 const uint32_t badDimensions[] = {2, 1};
3627 const uint32_t badRank = std::size(badDimensions);
3628 ANeuralNetworksOperandType wrongRank = quantBase;
3629 wrongRank.dimensionCount = badRank;
3630 wrongRank.dimensions = badDimensions;
3631 testIncompatibleOperands(quantCompilation, wrongRank);
3632 }
3633
3634 // Different dimensions.
3635 {
3636 SCOPED_TRACE("Dimensions");
3637 const uint32_t badDimensions[] = {1};
3638 ANeuralNetworksOperandType wrongDims = quantBase;
3639 wrongDims.dimensions = badDimensions;
3640 testIncompatibleOperands(quantCompilation, wrongDims);
3641 }
3642
3643 ANeuralNetworksCompilation_free(floatCompilation);
3644 ANeuralNetworksCompilation_free(quantCompilation);
3645 ANeuralNetworksModel_free(floatModel);
3646 ANeuralNetworksModel_free(quantModel);
3647 }
3648
3649 // Creates and compiles a single-operation CONV_2D model with channel quant data type of the given
3650 // scales. The caller is responsible to free the returned model and compilation.
3651 static std::pair<ANeuralNetworksModel*, ANeuralNetworksCompilation*>
createAndCompileChannelQuantConvModel(const std::vector<float> & scales)3652 createAndCompileChannelQuantConvModel(const std::vector<float>& scales) {
3653 const uint32_t numChannels = scales.size();
3654
3655 // OperandType for input and output.
3656 const uint32_t inoutDimensions[] = {1, 16, 16, numChannels};
3657 const ANeuralNetworksOperandType inoutType = {
3658 .type = ANEURALNETWORKS_TENSOR_QUANT8_ASYMM,
3659 .dimensionCount = std::size(inoutDimensions),
3660 .dimensions = inoutDimensions,
3661 .scale = 1.0f,
3662 .zeroPoint = 0,
3663 };
3664
3665 // OperandType for filter.
3666 const uint32_t filterDimensions[] = {numChannels, 3, 3, numChannels};
3667 const ANeuralNetworksOperandType filterType = {
3668 .type = ANEURALNETWORKS_TENSOR_QUANT8_SYMM_PER_CHANNEL,
3669 .dimensionCount = std::size(filterDimensions),
3670 .dimensions = filterDimensions,
3671 .scale = 0.0f,
3672 .zeroPoint = 0,
3673 };
3674
3675 // OperandType for bias.
3676 const uint32_t biasDimensions[] = {numChannels};
3677 const ANeuralNetworksOperandType biasType = {
3678 .type = ANEURALNETWORKS_TENSOR_INT32,
3679 .dimensionCount = std::size(biasDimensions),
3680 .dimensions = biasDimensions,
3681 .scale = 0.0f,
3682 .zeroPoint = 0,
3683 };
3684
3685 // OperandType for scalars: implicit padding code, strides, activation.
3686 const ANeuralNetworksOperandType scalarType = {
3687 .type = ANEURALNETWORKS_INT32, .dimensionCount = 0, .dimensions = nullptr};
3688
3689 ANeuralNetworksModel* model;
3690 EXPECT_EQ(ANeuralNetworksModel_create(&model), ANEURALNETWORKS_NO_ERROR);
3691 EXPECT_EQ(ANeuralNetworksModel_addOperand(model, &inoutType), ANEURALNETWORKS_NO_ERROR);
3692 EXPECT_EQ(ANeuralNetworksModel_addOperand(model, &filterType), ANEURALNETWORKS_NO_ERROR);
3693 EXPECT_EQ(ANeuralNetworksModel_addOperand(model, &biasType), ANEURALNETWORKS_NO_ERROR);
3694 EXPECT_EQ(ANeuralNetworksModel_addOperand(model, &scalarType), ANEURALNETWORKS_NO_ERROR);
3695 EXPECT_EQ(ANeuralNetworksModel_addOperand(model, &scalarType), ANEURALNETWORKS_NO_ERROR);
3696 EXPECT_EQ(ANeuralNetworksModel_addOperand(model, &scalarType), ANEURALNETWORKS_NO_ERROR);
3697 EXPECT_EQ(ANeuralNetworksModel_addOperand(model, &scalarType), ANEURALNETWORKS_NO_ERROR);
3698 EXPECT_EQ(ANeuralNetworksModel_addOperand(model, &inoutType), ANEURALNETWORKS_NO_ERROR);
3699
3700 // Set channel quant parameters for the filter tensor.
3701 const ANeuralNetworksSymmPerChannelQuantParams channelQuant = {
3702 .channelDim = 0,
3703 .scaleCount = numChannels,
3704 .scales = scales.data(),
3705 };
3706 EXPECT_EQ(ANeuralNetworksModel_setOperandSymmPerChannelQuantParams(model, 1, &channelQuant),
3707 ANEURALNETWORKS_NO_ERROR);
3708
3709 const uint32_t inList[] = {0, 1, 2, 3, 4, 5, 6};
3710 const uint32_t outList[] = {7};
3711 EXPECT_EQ(ANeuralNetworksModel_addOperation(model, ANEURALNETWORKS_CONV_2D, std::size(inList),
3712 inList, std::size(outList), outList),
3713 ANEURALNETWORKS_NO_ERROR);
3714 EXPECT_EQ(ANeuralNetworksModel_identifyInputsAndOutputs(model, std::size(inList), inList,
3715 std::size(outList), outList),
3716 ANEURALNETWORKS_NO_ERROR);
3717 EXPECT_EQ(ANeuralNetworksModel_finish(model), ANEURALNETWORKS_NO_ERROR);
3718
3719 ANeuralNetworksCompilation* compilation;
3720 EXPECT_EQ(ANeuralNetworksCompilation_create(model, &compilation), ANEURALNETWORKS_NO_ERROR);
3721 EXPECT_EQ(ANeuralNetworksCompilation_finish(compilation), ANEURALNETWORKS_NO_ERROR);
3722 return {model, compilation};
3723 }
3724
TEST_F(ValidationTestMemoryDesc,ExtraParams)3725 TEST_F(ValidationTestMemoryDesc, ExtraParams) {
3726 // Create two compilations with conflict channel quant scales.
3727 const auto [model1, compilation1] = createAndCompileChannelQuantConvModel({1.0f, 1.0f});
3728 const auto [model2, compilation2] = createAndCompileChannelQuantConvModel({0.5f, 0.5f});
3729
3730 ANeuralNetworksMemoryDesc* desc = nullptr;
3731 EXPECT_EQ(ANeuralNetworksMemoryDesc_create(&desc), ANEURALNETWORKS_NO_ERROR);
3732 EXPECT_EQ(ANeuralNetworksMemoryDesc_addInputRole(desc, compilation1, 1, 1.0f),
3733 ANEURALNETWORKS_NO_ERROR);
3734 EXPECT_EQ(ANeuralNetworksMemoryDesc_addInputRole(desc, compilation2, 1, 1.0f),
3735 ANEURALNETWORKS_BAD_DATA);
3736 ANeuralNetworksMemoryDesc_free(desc);
3737
3738 ANeuralNetworksCompilation_free(compilation1);
3739 ANeuralNetworksCompilation_free(compilation2);
3740 ANeuralNetworksModel_free(model1);
3741 ANeuralNetworksModel_free(model2);
3742 }
3743
TEST_F(ValidationTestMemoryDesc,SetDimensions)3744 TEST_F(ValidationTestMemoryDesc, SetDimensions) {
3745 const uint32_t dimensions[] = {2};
3746 const uint32_t badDimensions[] = {3};
3747 const uint32_t rank = std::size(dimensions);
3748 const uint32_t badRankDimensions[] = {2, 1};
3749 const uint32_t badRank = std::size(badRankDimensions);
3750
3751 EXPECT_EQ(ANeuralNetworksMemoryDesc_setDimensions(nullptr, rank, dimensions),
3752 ANEURALNETWORKS_UNEXPECTED_NULL);
3753 EXPECT_EQ(ANeuralNetworksMemoryDesc_setDimensions(mDesc, rank, nullptr),
3754 ANEURALNETWORKS_UNEXPECTED_NULL);
3755
3756 // Incompatible dimensions.
3757 EXPECT_EQ(ANeuralNetworksMemoryDesc_setDimensions(mDesc, rank, dimensions),
3758 ANEURALNETWORKS_NO_ERROR);
3759 EXPECT_EQ(ANeuralNetworksMemoryDesc_setDimensions(mDesc, rank, badDimensions),
3760 ANEURALNETWORKS_BAD_DATA);
3761 EXPECT_EQ(ANeuralNetworksMemoryDesc_setDimensions(mDesc, badRank, badRankDimensions),
3762 ANEURALNETWORKS_BAD_DATA);
3763
3764 // Attempting to modify a finished descriptor.
3765 EXPECT_EQ(ANeuralNetworksCompilation_finish(mCompilation), ANEURALNETWORKS_NO_ERROR);
3766 EXPECT_EQ(ANeuralNetworksMemoryDesc_addInputRole(mDesc, mCompilation, 0, 1.0f),
3767 ANEURALNETWORKS_NO_ERROR);
3768 EXPECT_EQ(ANeuralNetworksMemoryDesc_finish(mDesc), ANEURALNETWORKS_NO_ERROR);
3769 EXPECT_EQ(ANeuralNetworksMemoryDesc_setDimensions(mDesc, rank, dimensions),
3770 ANEURALNETWORKS_BAD_STATE);
3771 }
3772
TEST_F(ValidationTestMemoryDesc,SetScalarDimensionsBeforeAddRole)3773 TEST_F(ValidationTestMemoryDesc, SetScalarDimensionsBeforeAddRole) {
3774 const uint32_t badDimensions[] = {2};
3775 const uint32_t badRank = std::size(badDimensions);
3776
3777 // Set non-zero rank.
3778 EXPECT_EQ(ANeuralNetworksMemoryDesc_setDimensions(mDesc, badRank, badDimensions),
3779 ANEURALNETWORKS_NO_ERROR);
3780
3781 // This should fail because input2 is a scalar.
3782 EXPECT_EQ(ANeuralNetworksCompilation_finish(mCompilation), ANEURALNETWORKS_NO_ERROR);
3783 EXPECT_EQ(ANeuralNetworksMemoryDesc_addInputRole(mDesc, mCompilation, 2, 1.0f),
3784 ANEURALNETWORKS_BAD_DATA);
3785 }
3786
TEST_F(ValidationTestMemoryDesc,SetScalarDimensionsAfterAddRole)3787 TEST_F(ValidationTestMemoryDesc, SetScalarDimensionsAfterAddRole) {
3788 const uint32_t badDimensions[] = {2};
3789 const uint32_t badRank = std::size(badDimensions);
3790
3791 // Input2 is a scalar.
3792 EXPECT_EQ(ANeuralNetworksCompilation_finish(mCompilation), ANEURALNETWORKS_NO_ERROR);
3793 EXPECT_EQ(ANeuralNetworksMemoryDesc_addInputRole(mDesc, mCompilation, 2, 1.0f),
3794 ANEURALNETWORKS_NO_ERROR);
3795
3796 // This should fail because the rank is not zero.
3797 EXPECT_EQ(ANeuralNetworksMemoryDesc_setDimensions(mDesc, 0, nullptr), ANEURALNETWORKS_NO_ERROR);
3798 EXPECT_EQ(ANeuralNetworksMemoryDesc_setDimensions(mDesc, badRank, badDimensions),
3799 ANEURALNETWORKS_BAD_DATA);
3800 }
3801
TEST_F(ValidationTestMemoryDesc,Finish)3802 TEST_F(ValidationTestMemoryDesc, Finish) {
3803 EXPECT_EQ(ANeuralNetworksMemoryDesc_finish(nullptr), ANEURALNETWORKS_UNEXPECTED_NULL);
3804
3805 // No usage is specified.
3806 EXPECT_EQ(ANeuralNetworksMemoryDesc_finish(mDesc), ANEURALNETWORKS_BAD_DATA);
3807
3808 // Finish an already finished descriptor.
3809 EXPECT_EQ(ANeuralNetworksCompilation_finish(mCompilation), ANEURALNETWORKS_NO_ERROR);
3810 EXPECT_EQ(ANeuralNetworksMemoryDesc_addInputRole(mDesc, mCompilation, 0, 1.0f),
3811 ANEURALNETWORKS_NO_ERROR);
3812 EXPECT_EQ(ANeuralNetworksMemoryDesc_finish(mDesc), ANEURALNETWORKS_NO_ERROR);
3813 EXPECT_EQ(ANeuralNetworksMemoryDesc_finish(mDesc), ANEURALNETWORKS_BAD_STATE);
3814 }
3815
TEST_F(ValidationTestMemoryDesc,CreateMemory)3816 TEST_F(ValidationTestMemoryDesc, CreateMemory) {
3817 ANeuralNetworksMemory* memory = nullptr;
3818 EXPECT_EQ(ANeuralNetworksMemory_createFromDesc(nullptr, &memory),
3819 ANEURALNETWORKS_UNEXPECTED_NULL);
3820 EXPECT_EQ(ANeuralNetworksMemory_createFromDesc(mDesc, nullptr),
3821 ANEURALNETWORKS_UNEXPECTED_NULL);
3822
3823 // Unfinished descriptor.
3824 EXPECT_EQ(ANeuralNetworksMemory_createFromDesc(mDesc, &memory), ANEURALNETWORKS_BAD_STATE);
3825
3826 ANeuralNetworksMemory_free(memory);
3827 }
3828
TEST(ValidationTestMemory,CreateFromFd)3829 TEST(ValidationTestMemory, CreateFromFd) {
3830 const size_t memorySize = 20;
3831 #ifdef __ANDROID__
3832 int memoryFd = ASharedMemory_create("nnMemory", memorySize);
3833 #else // __ANDROID__
3834 TemporaryFile tmpFile;
3835 int memoryFd = tmpFile.release();
3836 CHECK_EQ(ftruncate(memoryFd, memorySize), 0);
3837 #endif // __ANDROID__
3838 ASSERT_GT(memoryFd, 0);
3839
3840 EXPECT_EQ(ANeuralNetworksMemory_createFromFd(memorySize, PROT_READ | PROT_WRITE, memoryFd, 0,
3841 nullptr),
3842 ANEURALNETWORKS_UNEXPECTED_NULL);
3843
3844 close(memoryFd);
3845 }
3846
3847 #ifdef __ANDROID__
TEST(ValidationTestMemory,CreateFromAHardwareBuffer)3848 TEST(ValidationTestMemory, CreateFromAHardwareBuffer) {
3849 const size_t memorySize = 20;
3850 AHardwareBuffer_Desc desc{
3851 .width = memorySize,
3852 .height = 1,
3853 .layers = 1,
3854 .format = AHARDWAREBUFFER_FORMAT_BLOB,
3855 .usage = AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN | AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN,
3856 };
3857 AHardwareBuffer* buffer = nullptr;
3858 ASSERT_EQ(AHardwareBuffer_allocate(&desc, &buffer), 0);
3859 EXPECT_EQ(ANeuralNetworksMemory_createFromAHardwareBuffer(buffer, nullptr),
3860 ANEURALNETWORKS_UNEXPECTED_NULL);
3861 AHardwareBuffer_release(buffer);
3862
3863 ANeuralNetworksMemory* memory = nullptr;
3864 EXPECT_EQ(ANeuralNetworksMemory_createFromAHardwareBuffer(nullptr, &memory),
3865 ANEURALNETWORKS_UNEXPECTED_NULL);
3866 }
3867 #endif // __ANDROID__
3868
TEST_F(ValidationTestMemoryDesc,MemoryCopying)3869 TEST_F(ValidationTestMemoryDesc, MemoryCopying) {
3870 uint32_t goodSize = sizeof(float) * 2, badSize1 = sizeof(float), badSize2 = sizeof(float) * 4;
3871 ANeuralNetworksMemory* goodAshmem = createAshmem(goodSize);
3872 ANeuralNetworksMemory* badAshmem1 = createAshmem(badSize1);
3873 ANeuralNetworksMemory* badAshmem2 = createAshmem(badSize2);
3874
3875 const uint32_t goodDimensions[] = {1, 2};
3876 const uint32_t badDimensions1[] = {2};
3877 const uint32_t badDimensions2[] = {2, 1};
3878 const ANeuralNetworksOperandType goodType = {.type = ANEURALNETWORKS_TENSOR_FLOAT32,
3879 .dimensionCount = std::size(goodDimensions),
3880 .dimensions = goodDimensions,
3881 .scale = 0.0f,
3882 .zeroPoint = 0};
3883 const ANeuralNetworksOperandType badType1 = {.type = ANEURALNETWORKS_TENSOR_FLOAT32,
3884 .dimensionCount = std::size(badDimensions1),
3885 .dimensions = badDimensions1,
3886 .scale = 0.0f,
3887 .zeroPoint = 0};
3888 const ANeuralNetworksOperandType badType2 = {.type = ANEURALNETWORKS_TENSOR_FLOAT32,
3889 .dimensionCount = std::size(badDimensions2),
3890 .dimensions = badDimensions2,
3891 .scale = 0.0f,
3892 .zeroPoint = 0};
3893 const auto [goodModel, goodCompilation] = createAndCompileAddModelWithType(goodType);
3894 const auto [badModel1, badCompilation1] = createAndCompileAddModelWithType(badType1);
3895 const auto [badModel2, badCompilation2] = createAndCompileAddModelWithType(badType2);
3896
3897 ANeuralNetworksMemoryDesc* desc = nullptr;
3898 ANeuralNetworksMemory *goodDeviceMemory1 = nullptr, *goodDeviceMemory2 = nullptr;
3899 EXPECT_EQ(ANeuralNetworksMemoryDesc_create(&desc), ANEURALNETWORKS_NO_ERROR);
3900 EXPECT_EQ(ANeuralNetworksMemoryDesc_addInputRole(desc, goodCompilation, 0, 1.0f),
3901 ANEURALNETWORKS_NO_ERROR);
3902 EXPECT_EQ(ANeuralNetworksMemoryDesc_finish(desc), ANEURALNETWORKS_NO_ERROR);
3903 EXPECT_EQ(ANeuralNetworksMemory_createFromDesc(desc, &goodDeviceMemory1),
3904 ANEURALNETWORKS_NO_ERROR);
3905 EXPECT_EQ(ANeuralNetworksMemory_createFromDesc(desc, &goodDeviceMemory2),
3906 ANEURALNETWORKS_NO_ERROR);
3907 ANeuralNetworksMemoryDesc_free(desc);
3908
3909 ANeuralNetworksMemory* badDeviceMemory1 = nullptr;
3910 EXPECT_EQ(ANeuralNetworksMemoryDesc_create(&desc), ANEURALNETWORKS_NO_ERROR);
3911 EXPECT_EQ(ANeuralNetworksMemoryDesc_addInputRole(desc, badCompilation1, 0, 1.0f),
3912 ANEURALNETWORKS_NO_ERROR);
3913 EXPECT_EQ(ANeuralNetworksMemoryDesc_finish(desc), ANEURALNETWORKS_NO_ERROR);
3914 EXPECT_EQ(ANeuralNetworksMemory_createFromDesc(desc, &badDeviceMemory1),
3915 ANEURALNETWORKS_NO_ERROR);
3916 ANeuralNetworksMemoryDesc_free(desc);
3917
3918 ANeuralNetworksMemory* badDeviceMemory2 = nullptr;
3919 EXPECT_EQ(ANeuralNetworksMemoryDesc_create(&desc), ANEURALNETWORKS_NO_ERROR);
3920 EXPECT_EQ(ANeuralNetworksMemoryDesc_addInputRole(desc, badCompilation2, 0, 1.0f),
3921 ANEURALNETWORKS_NO_ERROR);
3922 EXPECT_EQ(ANeuralNetworksMemoryDesc_finish(desc), ANEURALNETWORKS_NO_ERROR);
3923 EXPECT_EQ(ANeuralNetworksMemory_createFromDesc(desc, &badDeviceMemory2),
3924 ANEURALNETWORKS_NO_ERROR);
3925 ANeuralNetworksMemoryDesc_free(desc);
3926
3927 EXPECT_EQ(ANeuralNetworksMemory_copy(nullptr, goodDeviceMemory1),
3928 ANEURALNETWORKS_UNEXPECTED_NULL);
3929 EXPECT_EQ(ANeuralNetworksMemory_copy(goodDeviceMemory1, nullptr),
3930 ANEURALNETWORKS_UNEXPECTED_NULL);
3931
3932 // Ashmem -> Ashmem
3933 // Bad memory size.
3934 EXPECT_EQ(ANeuralNetworksMemory_copy(goodAshmem, badAshmem1), ANEURALNETWORKS_BAD_DATA);
3935 EXPECT_EQ(ANeuralNetworksMemory_copy(goodAshmem, badAshmem2), ANEURALNETWORKS_BAD_DATA);
3936 EXPECT_EQ(ANeuralNetworksMemory_copy(badAshmem1, goodAshmem), ANEURALNETWORKS_BAD_DATA);
3937 EXPECT_EQ(ANeuralNetworksMemory_copy(badAshmem2, goodAshmem), ANEURALNETWORKS_BAD_DATA);
3938
3939 // Ashmem -> Device Memory
3940 // Bad memory size.
3941 EXPECT_EQ(ANeuralNetworksMemory_copy(badAshmem1, goodDeviceMemory1), ANEURALNETWORKS_BAD_DATA);
3942 EXPECT_EQ(ANeuralNetworksMemory_copy(badAshmem2, goodDeviceMemory1), ANEURALNETWORKS_BAD_DATA);
3943
3944 // Device Memory -> Ashmem
3945 // Uninitialized source device memory.
3946 EXPECT_EQ(ANeuralNetworksMemory_copy(goodDeviceMemory1, goodAshmem), ANEURALNETWORKS_BAD_DATA);
3947 // Bad memory size.
3948 EXPECT_EQ(ANeuralNetworksMemory_copy(goodAshmem, goodDeviceMemory1), ANEURALNETWORKS_NO_ERROR);
3949 EXPECT_EQ(ANeuralNetworksMemory_copy(goodDeviceMemory1, badAshmem1), ANEURALNETWORKS_BAD_DATA);
3950 // Uninitialized source device memory (after a failed copy).
3951 EXPECT_EQ(ANeuralNetworksMemory_copy(badAshmem1, goodDeviceMemory1), ANEURALNETWORKS_BAD_DATA);
3952 EXPECT_EQ(ANeuralNetworksMemory_copy(goodDeviceMemory1, goodAshmem), ANEURALNETWORKS_BAD_DATA);
3953 // Bad memory size.
3954 EXPECT_EQ(ANeuralNetworksMemory_copy(goodAshmem, goodDeviceMemory1), ANEURALNETWORKS_NO_ERROR);
3955 EXPECT_EQ(ANeuralNetworksMemory_copy(goodDeviceMemory1, badAshmem2), ANEURALNETWORKS_BAD_DATA);
3956
3957 // Device Memory -> Device Memory
3958 // Uninitialized source device memory.
3959 EXPECT_EQ(ANeuralNetworksMemory_copy(goodDeviceMemory2, goodDeviceMemory1),
3960 ANEURALNETWORKS_BAD_DATA);
3961 // Incompatible rank.
3962 EXPECT_EQ(ANeuralNetworksMemory_copy(goodAshmem, badDeviceMemory1), ANEURALNETWORKS_NO_ERROR);
3963 EXPECT_EQ(ANeuralNetworksMemory_copy(badDeviceMemory1, goodDeviceMemory1),
3964 ANEURALNETWORKS_BAD_DATA);
3965 // Incompatible dimensions.
3966 EXPECT_EQ(ANeuralNetworksMemory_copy(goodAshmem, badDeviceMemory2), ANEURALNETWORKS_NO_ERROR);
3967 EXPECT_EQ(ANeuralNetworksMemory_copy(badDeviceMemory2, goodDeviceMemory1),
3968 ANEURALNETWORKS_BAD_DATA);
3969 // Deinitialized source device memory.
3970 EXPECT_EQ(ANeuralNetworksMemory_copy(goodAshmem, goodDeviceMemory2), ANEURALNETWORKS_NO_ERROR);
3971 EXPECT_EQ(ANeuralNetworksMemory_copy(badAshmem1, goodDeviceMemory2), ANEURALNETWORKS_BAD_DATA);
3972 EXPECT_EQ(ANeuralNetworksMemory_copy(goodDeviceMemory2, goodDeviceMemory1),
3973 ANEURALNETWORKS_BAD_DATA);
3974
3975 ANeuralNetworksMemory_free(goodDeviceMemory1);
3976 ANeuralNetworksMemory_free(goodDeviceMemory2);
3977 ANeuralNetworksMemory_free(badDeviceMemory1);
3978 ANeuralNetworksMemory_free(badDeviceMemory2);
3979 ANeuralNetworksCompilation_free(goodCompilation);
3980 ANeuralNetworksCompilation_free(badCompilation1);
3981 ANeuralNetworksCompilation_free(badCompilation2);
3982 ANeuralNetworksModel_free(goodModel);
3983 ANeuralNetworksModel_free(badModel1);
3984 ANeuralNetworksModel_free(badModel2);
3985 }
3986
3987 #ifndef NNTEST_ONLY_PUBLIC_API
TEST(ValidationTestDevice,GetExtensionSupport)3988 TEST(ValidationTestDevice, GetExtensionSupport) {
3989 bool result;
3990 EXPECT_EQ(ANeuralNetworksDevice_getExtensionSupport(nullptr, kTestExtensionName, &result),
3991 ANEURALNETWORKS_UNEXPECTED_NULL);
3992
3993 uint32_t numDevices = 0;
3994 EXPECT_EQ(ANeuralNetworks_getDeviceCount(&numDevices), ANEURALNETWORKS_NO_ERROR);
3995
3996 for (uint32_t i = 0; i < numDevices; i++) {
3997 SCOPED_TRACE(i);
3998 ANeuralNetworksDevice* device;
3999 EXPECT_EQ(ANeuralNetworks_getDevice(i, &device), ANEURALNETWORKS_NO_ERROR);
4000 EXPECT_EQ(ANeuralNetworksDevice_getExtensionSupport(device, kTestExtensionName, nullptr),
4001 ANEURALNETWORKS_UNEXPECTED_NULL);
4002 EXPECT_EQ(ANeuralNetworksDevice_getExtensionSupport(device, nullptr, &result),
4003 ANEURALNETWORKS_UNEXPECTED_NULL);
4004 EXPECT_EQ(ANeuralNetworksDevice_getExtensionSupport(device, kTestExtensionName, &result),
4005 ANEURALNETWORKS_NO_ERROR);
4006 }
4007 }
4008
4009 constexpr const char* kTestAttributeExtensionName = "com.android.test_attribute_extension";
4010 const uint16_t kAttributeCode = 0;
4011 const uint16_t kAttributeCode2 = 2;
4012 const uint8_t kAttributeValue = 0;
4013
4014 class ValidationTestCompilationExtension : public ValidationTestCompilation {
4015 protected:
SetUp()4016 virtual void SetUp() {
4017 ValidationTestCompilation::SetUp();
4018 EXPECT_TRUE(::android::nn::TypeManager::get()->forTest_registerExtension({
4019 .name = kTestAttributeExtensionName,
4020 .operandTypes = {},
4021 }));
4022 }
4023
TearDown()4024 virtual void TearDown() {
4025 ::android::nn::TypeManager::get()->forTest_reset();
4026 ValidationTestCompilation::TearDown();
4027 }
4028 };
4029
4030 // Also see TEST_F(ValidationTestCompilationExtensionForDevices_1, AddExtensionAttribute)
4031 // Also see TEST_F(ValidationTestCompilationExtensionForDevices_2, AddExtensionAttribute)
TEST_F(ValidationTestCompilationExtension,AddExtensionAttribute)4032 TEST_F(ValidationTestCompilationExtension, AddExtensionAttribute) {
4033 EXPECT_EQ(ANeuralNetworksCompilation_addExtensionAttribute(nullptr, kTestAttributeExtensionName,
4034 kAttributeCode, &kAttributeValue,
4035 sizeof(uint8_t)),
4036 ANEURALNETWORKS_UNEXPECTED_NULL);
4037 EXPECT_EQ(ANeuralNetworksCompilation_addExtensionAttribute(
4038 mCompilation, nullptr, kAttributeCode, &kAttributeValue, sizeof(uint8_t)),
4039 ANEURALNETWORKS_UNEXPECTED_NULL);
4040 EXPECT_EQ(ANeuralNetworksCompilation_addExtensionAttribute(
4041 mCompilation, kTestAttributeExtensionName, kAttributeCode, nullptr,
4042 sizeof(uint8_t)),
4043 ANEURALNETWORKS_UNEXPECTED_NULL);
4044
4045 // ExtensionAttribute can only be added to Compilations created from CompilationForDevices with
4046 // one device specified.
4047 EXPECT_EQ(ANeuralNetworksCompilation_addExtensionAttribute(
4048 mCompilation, kTestAttributeExtensionName, kAttributeCode, &kAttributeValue,
4049 sizeof(uint8_t)),
4050 ANEURALNETWORKS_BAD_DATA);
4051 }
4052
4053 // Also see TEST_F(ValidationTestCompilationExtensionForDevices_1, ExecutionAddExtensionAttribute)
4054 // Also see TEST_F(ValidationTestCompilationExtensionForDevices_2, ExecutionAddExtensionAttribute)
TEST_F(ValidationTestCompilationExtension,ExecutionAddExtensionAttribute)4055 TEST_F(ValidationTestCompilationExtension, ExecutionAddExtensionAttribute) {
4056 EXPECT_EQ(ANeuralNetworksExecution_addExtensionAttribute(nullptr, kTestAttributeExtensionName,
4057 kAttributeCode, &kAttributeValue,
4058 sizeof(uint8_t)),
4059 ANEURALNETWORKS_UNEXPECTED_NULL);
4060
4061 ASSERT_EQ(ANeuralNetworksCompilation_finish(mCompilation), ANEURALNETWORKS_NO_ERROR);
4062 ANeuralNetworksExecution* execution;
4063 ASSERT_EQ(ANeuralNetworksExecution_create(mCompilation, &execution), ANEURALNETWORKS_NO_ERROR);
4064 EXPECT_EQ(ANeuralNetworksExecution_addExtensionAttribute(execution, nullptr, kAttributeCode,
4065 &kAttributeValue, sizeof(uint8_t)),
4066 ANEURALNETWORKS_UNEXPECTED_NULL);
4067 EXPECT_EQ(ANeuralNetworksExecution_addExtensionAttribute(execution, kTestAttributeExtensionName,
4068 kAttributeCode, nullptr,
4069 sizeof(uint8_t)),
4070 ANEURALNETWORKS_UNEXPECTED_NULL);
4071 // ExtensionAttribute can only be added to Executions created from CompilationForDevices with
4072 // one device specified.
4073 EXPECT_EQ(ANeuralNetworksExecution_addExtensionAttribute(execution, kTestAttributeExtensionName,
4074 kAttributeCode, &kAttributeValue,
4075 sizeof(uint8_t)),
4076 ANEURALNETWORKS_BAD_DATA);
4077 ANeuralNetworksExecution_free(execution);
4078 }
4079
4080 class ValidationTestCompilationExtensionForDevices_1
4081 : public ValidationTestCompilationForDevices_1 {
4082 protected:
SetUp()4083 virtual void SetUp() {
4084 ValidationTestCompilationForDevices_1::SetUp();
4085 EXPECT_TRUE(::android::nn::TypeManager::get()->forTest_registerExtension({
4086 .name = kTestAttributeExtensionName,
4087 .operandTypes = {},
4088 }));
4089 }
4090
TearDown()4091 virtual void TearDown() {
4092 ::android::nn::TypeManager::get()->forTest_reset();
4093 ValidationTestCompilationForDevices_1::TearDown();
4094 }
4095 };
4096
4097 // Also see TEST_F(ValidationTestCompilationExtension, AddExtensionAttribute)
4098 // Also see TEST_F(ValidationTestCompilationExtensionForDevices_2, AddExtensionAttribute)
TEST_F(ValidationTestCompilationExtensionForDevices_1,AddExtensionAttribute)4099 TEST_F(ValidationTestCompilationExtensionForDevices_1, AddExtensionAttribute) {
4100 if (!mCompilation) {
4101 return;
4102 }
4103 EXPECT_EQ(ANeuralNetworksCompilation_addExtensionAttribute(
4104 mCompilation, kTestAttributeExtensionName, kAttributeCode, &kAttributeValue,
4105 sizeof(uint8_t)),
4106 ANEURALNETWORKS_NO_ERROR);
4107 // Adding another attribute.
4108 EXPECT_EQ(ANeuralNetworksCompilation_addExtensionAttribute(
4109 mCompilation, kTestAttributeExtensionName, kAttributeCode2, &kAttributeValue,
4110 sizeof(uint8_t)),
4111 ANEURALNETWORKS_NO_ERROR);
4112 // Adding the same attribute twice is illegal.
4113 EXPECT_EQ(ANeuralNetworksCompilation_addExtensionAttribute(
4114 mCompilation, kTestAttributeExtensionName, kAttributeCode, &kAttributeValue,
4115 sizeof(uint8_t)),
4116 ANEURALNETWORKS_BAD_DATA);
4117 // Attempt to finish
4118 const int n = ANeuralNetworksCompilation_finish(mCompilation);
4119 EXPECT_TRUE(n == ANEURALNETWORKS_NO_ERROR);
4120
4121 EXPECT_EQ(ANeuralNetworksCompilation_addExtensionAttribute(
4122 mCompilation, kTestAttributeExtensionName, kAttributeCode, &kAttributeValue,
4123 sizeof(uint8_t)),
4124 ANEURALNETWORKS_BAD_STATE);
4125 }
4126
4127 // Also see TEST_F(ValidationTestCompilationExtension, ExecutionAddExtensionAttribute)
4128 // Also see TEST_F(ValidationTestCompilationExtensionForDevices_2, ExecutionAddExtensionAttribute)
TEST_F(ValidationTestCompilationExtensionForDevices_1,ExecutionAddExtensionAttribute)4129 TEST_F(ValidationTestCompilationExtensionForDevices_1, ExecutionAddExtensionAttribute) {
4130 if (!mCompilation) {
4131 return;
4132 }
4133 ASSERT_EQ(ANeuralNetworksCompilation_finish(mCompilation), ANEURALNETWORKS_NO_ERROR);
4134 ANeuralNetworksExecution* execution;
4135 ASSERT_EQ(ANeuralNetworksExecution_create(mCompilation, &execution), ANEURALNETWORKS_NO_ERROR);
4136
4137 EXPECT_EQ(ANeuralNetworksExecution_addExtensionAttribute(execution, kTestAttributeExtensionName,
4138 kAttributeCode, &kAttributeValue,
4139 sizeof(uint8_t)),
4140 ANEURALNETWORKS_NO_ERROR);
4141 // Adding another attribute.
4142 EXPECT_EQ(ANeuralNetworksExecution_addExtensionAttribute(execution, kTestAttributeExtensionName,
4143 kAttributeCode2, &kAttributeValue,
4144 sizeof(uint8_t)),
4145 ANEURALNETWORKS_NO_ERROR);
4146 // Adding the same attribute twice is illegal.
4147 EXPECT_EQ(ANeuralNetworksExecution_addExtensionAttribute(execution, kTestAttributeExtensionName,
4148 kAttributeCode, &kAttributeValue,
4149 sizeof(uint8_t)),
4150 ANEURALNETWORKS_BAD_DATA);
4151
4152 // start the execution
4153 float in0[] = {0.0f, 0.0f}, in1[] = {1.0f, 1.0f}, out0[2];
4154 int in2 = 0;
4155 ASSERT_EQ(ANeuralNetworksExecution_setInput(execution, 0, nullptr, &in0, sizeof(in0)),
4156 ANEURALNETWORKS_NO_ERROR);
4157 ASSERT_EQ(ANeuralNetworksExecution_setInput(execution, 1, nullptr, &in1, sizeof(in1)),
4158 ANEURALNETWORKS_NO_ERROR);
4159 ASSERT_EQ(ANeuralNetworksExecution_setInput(execution, 2, nullptr, &in2, sizeof(in2)),
4160 ANEURALNETWORKS_NO_ERROR);
4161 ASSERT_EQ(ANeuralNetworksExecution_setOutput(execution, 0, nullptr, &out0, sizeof(out0)),
4162 ANEURALNETWORKS_NO_ERROR);
4163 ASSERT_EQ(ANeuralNetworksExecution_compute(execution), ANEURALNETWORKS_NO_ERROR);
4164
4165 EXPECT_EQ(ANeuralNetworksExecution_addExtensionAttribute(execution, kTestAttributeExtensionName,
4166 kAttributeCode, &kAttributeValue,
4167 sizeof(uint8_t)),
4168 ANEURALNETWORKS_BAD_STATE);
4169
4170 ANeuralNetworksExecution_free(execution);
4171 }
4172
4173 class ValidationTestCompilationExtensionForDevices_2
4174 : public ValidationTestCompilationForDevices_2 {
4175 protected:
SetUp()4176 virtual void SetUp() {
4177 ValidationTestCompilationForDevices_2::SetUp();
4178 EXPECT_TRUE(::android::nn::TypeManager::get()->forTest_registerExtension({
4179 .name = kTestAttributeExtensionName,
4180 .operandTypes = {},
4181 }));
4182 }
4183
TearDown()4184 virtual void TearDown() {
4185 ::android::nn::TypeManager::get()->forTest_reset();
4186 ValidationTestCompilationForDevices_2::TearDown();
4187 }
4188 };
4189
4190 // Also see TEST_F(ValidationTestCompilationExtension, AddExtensionAttribute)
4191 // Also see TEST_F(ValidationTestCompilationExtensionForDevices_1, AddExtensionAttribute)
TEST_F(ValidationTestCompilationExtensionForDevices_2,AddExtensionAttribute)4192 TEST_F(ValidationTestCompilationExtensionForDevices_2, AddExtensionAttribute) {
4193 if (!mCompilation) {
4194 return;
4195 }
4196 // ExtensionAttribute can only be added to Compilations created from CompilationForDevices with
4197 // one device specified.
4198 EXPECT_EQ(ANeuralNetworksCompilation_addExtensionAttribute(
4199 mCompilation, kTestAttributeExtensionName, kAttributeCode, &kAttributeValue,
4200 sizeof(uint8_t)),
4201 ANEURALNETWORKS_BAD_DATA);
4202 }
4203
4204 // Also see TEST_F(ValidationTestCompilationExtension, ExecutionAddExtensionAttribute)
4205 // Also see TEST_F(ValidationTestCompilationExtensionForDevices_1, ExecutionAddExtensionAttribute)
TEST_F(ValidationTestCompilationExtensionForDevices_2,ExecutionAddExtensionAttribute)4206 TEST_F(ValidationTestCompilationExtensionForDevices_2, ExecutionAddExtensionAttribute) {
4207 if (!mCompilation) {
4208 return;
4209 }
4210 ASSERT_EQ(ANeuralNetworksCompilation_finish(mCompilation), ANEURALNETWORKS_NO_ERROR);
4211 ANeuralNetworksExecution* execution;
4212 ASSERT_EQ(ANeuralNetworksExecution_create(mCompilation, &execution), ANEURALNETWORKS_NO_ERROR);
4213
4214 // ExtensionAttribute can only be added to Executions created from CompilationForDevices with
4215 // one device specified.
4216 EXPECT_EQ(ANeuralNetworksExecution_addExtensionAttribute(execution, kTestAttributeExtensionName,
4217 kAttributeCode, &kAttributeValue,
4218 sizeof(uint8_t)),
4219 ANEURALNETWORKS_BAD_DATA);
4220 ANeuralNetworksExecution_free(execution);
4221 }
4222 #endif // NNTEST_ONLY_PUBLIC_API
4223
4224 } // namespace
4225