xref: /aosp_15_r20/external/armnn/python/pyarmnn/test/test_deserializer.py (revision 89c4ff92f2867872bb9e2354d150bf0c8c502810)
1# Copyright © 2020 Arm Ltd and Contributors. All rights reserved.
2# SPDX-License-Identifier: MIT
3import os
4
5import pytest
6import pyarmnn as ann
7import numpy as np
8
9
10@pytest.fixture()
11def parser(shared_data_folder):
12    """
13    Parse and setup the test network to be used for the tests below
14    """
15    parser = ann.IDeserializer()
16    parser.CreateNetworkFromBinary(os.path.join(shared_data_folder, 'mock_model.armnn'))
17
18    yield parser
19
20
21def test_deserializer_swig_destroy():
22    assert ann.IDeserializer.__swig_destroy__, "There is a swig python destructor defined"
23    assert ann.IDeserializer.__swig_destroy__.__name__ == "delete_IDeserializer"
24
25
26def test_check_deserializer_swig_ownership(parser):
27    # Check to see that SWIG has ownership for parser. This instructs SWIG to take
28    # ownership of the return value. This allows the value to be automatically
29    # garbage-collected when it is no longer in use
30    assert parser.thisown
31
32
33def test_deserializer_get_network_input_binding_info(parser):
34    # use 0 as a dummy value for layer_id, which is unused in the actual implementation
35    layer_id = 0
36    input_name = 'input_1'
37
38    input_binding_info = parser.GetNetworkInputBindingInfo(layer_id, input_name)
39
40    tensor = input_binding_info[1]
41    assert tensor.GetDataType() == 2
42    assert tensor.GetNumDimensions() == 4
43    assert tensor.GetNumElements() == 784
44    assert tensor.GetQuantizationOffset() == 128
45    assert tensor.GetQuantizationScale() == 0.007843137718737125
46
47
48def test_deserializer_get_network_output_binding_info(parser):
49    # use 0 as a dummy value for layer_id, which is unused in the actual implementation
50    layer_id = 0
51    output_name = "dense/Softmax"
52
53    output_binding_info1 = parser.GetNetworkOutputBindingInfo(layer_id, output_name)
54
55    # Check the tensor info retrieved from GetNetworkOutputBindingInfo
56    tensor1 = output_binding_info1[1]
57
58    assert tensor1.GetDataType() == 2
59    assert tensor1.GetNumDimensions() == 2
60    assert tensor1.GetNumElements() == 10
61    assert tensor1.GetQuantizationOffset() == 0
62    assert tensor1.GetQuantizationScale() == 0.00390625
63
64
65def test_deserializer_filenotfound_exception(shared_data_folder):
66    parser = ann.IDeserializer()
67
68    with pytest.raises(RuntimeError) as err:
69        parser.CreateNetworkFromBinary(os.path.join(shared_data_folder, 'some_unknown_network.armnn'))
70
71    # Only check for part of the exception since the exception returns
72    # absolute path which will change on different machines.
73    assert 'Cannot read the file' in str(err.value)
74
75
76def test_deserializer_end_to_end(shared_data_folder):
77    parser = ann.IDeserializer()
78
79    network = parser.CreateNetworkFromBinary(os.path.join(shared_data_folder, "mock_model.armnn"))
80
81    # use 0 as a dummy value for layer_id, which is unused in the actual implementation
82    layer_id = 0
83    input_name = 'input_1'
84    output_name = 'dense/Softmax'
85
86    input_binding_info = parser.GetNetworkInputBindingInfo(layer_id, input_name)
87
88    preferred_backends = [ann.BackendId('CpuAcc'), ann.BackendId('CpuRef')]
89
90    options = ann.CreationOptions()
91    runtime = ann.IRuntime(options)
92
93    opt_network, messages = ann.Optimize(network, preferred_backends, runtime.GetDeviceSpec(), ann.OptimizerOptions())
94    assert 0 == len(messages)
95
96    net_id, messages = runtime.LoadNetwork(opt_network)
97    assert "" == messages
98
99    # Load test image data stored in input_lite.npy
100    input_tensor_data = np.load(os.path.join(shared_data_folder, 'deserializer/input_lite.npy'))
101    input_tensors = ann.make_input_tensors([input_binding_info], [input_tensor_data])
102
103    output_tensors = []
104    out_bind_info = parser.GetNetworkOutputBindingInfo(layer_id, output_name)
105    out_tensor_info = out_bind_info[1]
106    out_tensor_id = out_bind_info[0]
107    output_tensors.append((out_tensor_id,
108                           ann.Tensor(out_tensor_info)))
109
110    runtime.EnqueueWorkload(net_id, input_tensors, output_tensors)
111
112    output_vectors = []
113    for index, out_tensor in enumerate(output_tensors):
114        output_vectors.append(out_tensor[1].get_memory_area())
115
116    # Load golden output file for result comparison.
117    expected_outputs = np.load(os.path.join(shared_data_folder, 'deserializer/golden_output_lite.npy'))
118
119    # Check that output matches golden output
120    assert (expected_outputs == output_vectors[0]).all()
121