1 // Copyright 2020 Google LLC
2 //
3 // This source code is licensed under the BSD-style license found in the
4 // LICENSE file in the root directory of this source tree.
5
6 #include <xnnpack.h>
7 #include <xnnpack/memory-planner.h>
8 #include <xnnpack/subgraph.h>
9
10 #include <gtest/gtest.h>
11
TEST(MemoryPlanner,ValueLiveInfo)12 TEST(MemoryPlanner, ValueLiveInfo) {
13 EXPECT_EQ(xnn_status_success, xnn_initialize(nullptr /* allocator */));
14 // Create simple subgraph where it has 2 nodes and 4 tensors as illustrated below:
15 // T0 ----> N0 ----> T2 and T2 ----> N1 ----> T3
16 // T1 ----/ T1 ----/
17 struct xnn_subgraph subgraph;
18 subgraph.num_values = 4;
19 subgraph.num_nodes = 2;
20 struct xnn_node nodes[2];
21 nodes[0].num_inputs = 2;
22 nodes[0].inputs[0] = 0;
23 nodes[0].inputs[1] = 1;
24 nodes[0].num_outputs = 1;
25 nodes[0].outputs[0] = 2;
26
27 nodes[1].num_inputs = 2;
28 nodes[1].inputs[0] = 1;
29 nodes[1].inputs[1] = 2;
30 nodes[1].num_outputs = 1;
31 nodes[1].outputs[0] = 3;
32 subgraph.nodes = nodes;
33
34 struct xnn_value_allocation_tracker tracker;
35 xnn_init_value_allocation_tracker(&tracker, &subgraph);
36
37 EXPECT_EQ(0, tracker.usage[0].first_node);
38 EXPECT_EQ(0, tracker.usage[0].last_node);
39
40 EXPECT_EQ(0, tracker.usage[1].first_node);
41 EXPECT_EQ(1, tracker.usage[1].last_node);
42
43 EXPECT_EQ(0, tracker.usage[2].first_node);
44 EXPECT_EQ(1, tracker.usage[2].last_node);
45
46 EXPECT_EQ(1, tracker.usage[3].first_node);
47 EXPECT_EQ(1, tracker.usage[3].last_node);
48
49 xnn_release_value_allocation_tracker(&tracker);
50 }
51
TEST(MemoryPlanner,MemoryBlocksCoalescing)52 TEST(MemoryPlanner, MemoryBlocksCoalescing) {
53 EXPECT_EQ(xnn_status_success, xnn_initialize(nullptr /* allocator */));
54 struct xnn_subgraph subgraph;
55 subgraph.num_nodes = 0;
56 subgraph.num_values = 5;
57 struct xnn_value_allocation_tracker tracker;
58 xnn_init_value_allocation_tracker(&tracker, &subgraph);
59 // As this is an empty subgraph, we create the following xnn_value_usage stub.
60 tracker.usage[0].first_node = 1,
61 tracker.usage[0].last_node = 1,
62 xnn_add_value_allocation_tracker(&tracker, 0, 56);
63
64 tracker.usage[1].first_node = 0,
65 tracker.usage[1].last_node = 1,
66 xnn_add_value_allocation_tracker(&tracker, 1, 40);
67
68 tracker.usage[2].first_node = 1,
69 tracker.usage[2].last_node = 1,
70 xnn_add_value_allocation_tracker(&tracker, 2, 64);
71
72 tracker.usage[3].first_node = 0,
73 tracker.usage[3].last_node = 0,
74 xnn_add_value_allocation_tracker(&tracker, 3, 152);
75
76 tracker.usage[4].first_node = 1,
77 tracker.usage[4].last_node = 1,
78 xnn_add_value_allocation_tracker(&tracker, 4, 20);
79
80 xnn_plan_value_allocation_tracker(&tracker);
81
82 #if XNN_ENABLE_MEMOPT
83 EXPECT_EQ(192, tracker.mem_arena_size);
84 EXPECT_EQ(64, tracker.usage[0].alloc_offset);
85 EXPECT_EQ(152, tracker.usage[1].alloc_offset);
86 EXPECT_EQ(0, tracker.usage[2].alloc_offset);
87 EXPECT_EQ(0, tracker.usage[3].alloc_offset);
88 EXPECT_EQ(120, tracker.usage[4].alloc_offset);
89 #else
90 EXPECT_EQ(332, tracker.mem_arena_size);
91 EXPECT_EQ(0, tracker.usage[0].alloc_offset);
92 EXPECT_EQ(57, tracker.usage[1].alloc_offset);
93 EXPECT_EQ(96, tracker.usage[2].alloc_offset);
94 EXPECT_EQ(160, tracker.usage[3].alloc_offset);
95 EXPECT_EQ(312, tracker.usage[4].alloc_offset);
96 #endif
97
98 xnn_release_value_allocation_tracker(&tracker);
99 }
100
TEST(MemoryPlanner,GeneralPlanning)101 TEST(MemoryPlanner, GeneralPlanning) {
102 EXPECT_EQ(xnn_status_success, xnn_initialize(nullptr /* allocator */));
103 struct xnn_subgraph subgraph;
104 subgraph.num_nodes = 0;
105 subgraph.num_values = 8;
106 struct xnn_value_allocation_tracker tracker;
107 xnn_init_value_allocation_tracker(&tracker, &subgraph);
108 // As this is an empty subgraph, we create the following xnn_value_usage stub.
109 tracker.usage[0].first_node = 0,
110 tracker.usage[0].last_node = 1,
111 xnn_add_value_allocation_tracker(&tracker, 0, 32);
112
113 tracker.usage[1].first_node = 1,
114 tracker.usage[1].last_node = 4,
115 xnn_add_value_allocation_tracker(&tracker, 1, 28);
116
117 tracker.usage[2].first_node = 2,
118 tracker.usage[2].last_node = 5,
119 xnn_add_value_allocation_tracker(&tracker, 2, 36);
120
121 tracker.usage[3].first_node = 3,
122 tracker.usage[3].last_node = 5,
123 xnn_add_value_allocation_tracker(&tracker, 3, 16);
124
125 tracker.usage[4].first_node = 4,
126 tracker.usage[4].last_node = 5,
127 xnn_add_value_allocation_tracker(&tracker, 4, 8);
128
129 tracker.usage[5].first_node = 5,
130 tracker.usage[5].last_node = 7,
131 xnn_add_value_allocation_tracker(&tracker, 5, 64);
132
133 tracker.usage[6].first_node = 6,
134 tracker.usage[6].last_node = 8,
135 xnn_add_value_allocation_tracker(&tracker, 6, 10);
136
137 tracker.usage[7].first_node = 7,
138 tracker.usage[7].last_node = 8,
139 xnn_add_value_allocation_tracker(&tracker, 7, 40);
140
141 xnn_plan_value_allocation_tracker(&tracker);
142
143 #if XNN_ENABLE_MEMOPT
144 EXPECT_EQ(124, tracker.mem_arena_size);
145 EXPECT_EQ(0, tracker.usage[0].alloc_offset);
146 EXPECT_EQ(32, tracker.usage[1].alloc_offset);
147 EXPECT_EQ(64, tracker.usage[2].alloc_offset);
148 EXPECT_EQ(100, tracker.usage[3].alloc_offset);
149 EXPECT_EQ(116, tracker.usage[4].alloc_offset);
150 EXPECT_EQ(0, tracker.usage[5].alloc_offset);
151 EXPECT_EQ(104, tracker.usage[6].alloc_offset);
152 EXPECT_EQ(64, tracker.usage[7].alloc_offset);
153 #else
154 EXPECT_EQ(234, tracker.mem_arena_size);
155 EXPECT_EQ(0, tracker.usage[0].alloc_offset);
156 EXPECT_EQ(32, tracker.usage[1].alloc_offset);
157 EXPECT_EQ(60, tracker.usage[2].alloc_offset);
158 EXPECT_EQ(96, tracker.usage[3].alloc_offset);
159 EXPECT_EQ(112, tracker.usage[4].alloc_offset);
160 EXPECT_EQ(120, tracker.usage[5].alloc_offset);
161 EXPECT_EQ(184, tracker.usage[6].alloc_offset);
162 EXPECT_EQ(194, tracker.usage[7].alloc_offset);
163 #endif
164
165 xnn_release_value_allocation_tracker(&tracker);
166 }
167