xref: /aosp_15_r20/external/ComputeLibrary/tests/framework/Framework.h (revision c217d954acce2dbc11938adb493fc0abd69584f3)
1 /*
2  * Copyright (c) 2017-2021 Arm Limited.
3  *
4  * SPDX-License-Identifier: MIT
5  *
6  * Permission is hereby granted, free of charge, to any person obtaining a copy
7  * of this software and associated documentation files (the "Software"), to
8  * deal in the Software without restriction, including without limitation the
9  * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
10  * sell copies of the Software, and to permit persons to whom the Software is
11  * furnished to do so, subject to the following conditions:
12  *
13  * The above copyright notice and this permission notice shall be included in all
14  * copies or substantial portions of the Software.
15  *
16  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19  * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21  * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22  * SOFTWARE.
23  */
24 #ifndef ARM_COMPUTE_TEST_FRAMEWORK
25 #define ARM_COMPUTE_TEST_FRAMEWORK
26 
27 #include "DatasetModes.h"
28 #include "Exceptions.h"
29 #include "Profiler.h"
30 #include "TestCase.h"
31 #include "TestCaseFactory.h"
32 #include "TestResult.h"
33 #include "Utils.h"
34 #include "instruments/Instruments.h"
35 #include "printers/Printer.h"
36 
37 #include <algorithm>
38 #include <chrono>
39 #include <map>
40 #include <memory>
41 #include <numeric>
42 #include <ostream>
43 #include <set>
44 #include <sstream>
45 #include <string>
46 #include <vector>
47 
48 namespace arm_compute
49 {
50 namespace test
51 {
52 namespace framework
53 {
54 class TestFilter;
55 
56 /** Framework configuration structure */
57 struct FrameworkConfig
58 {
59     std::vector<framework::InstrumentsDescription> instruments{};               /**< Instrument types that will be used for benchmarking. */
60     std::string                                    name_filter{};               /**< Regular expression to filter tests by name. Only matching tests will be executed. */
61     std::string                                    id_filter{};                 /**< String to match selected test ids. Only matching tests will be executed. */
62     DatasetMode                                    mode{ DatasetMode::ALL };    /**< Dataset mode. */
63     int                                            num_iterations{ 1 };         /**< Number of iterations per test. */
64     float                                          cooldown_sec{ -1.f };        /**< Delay between tests in seconds. */
65     LogLevel                                       log_level{ LogLevel::NONE }; /**< Verbosity of the output. */
66     bool                                           configure_only{ false };     /**< Only configure kernels */
67 };
68 
69 /** Information about a test case.
70  *
71  * A test can be identified either via its id or via its name. Additionally
72  * each test is tagged with the data set mode in which it will be used and
73  * its status.
74  *
75  * @note The mapping between test id and test name is not guaranteed to be
76  * stable. It is subject to change as new test are added.
77  */
78 struct TestInfo
79 {
80     int                     id;     /**< Test ID */
81     std::string             name;   /**< Test name */
82     DatasetMode             mode;   /**< Test data set mode */
83     TestCaseFactory::Status status; /**< Test status */
84 };
85 
86 inline bool operator<(const TestInfo &lhs, const TestInfo &rhs)
87 {
88     return lhs.id < rhs.id;
89 }
90 
91 /** Main framework class.
92  *
93  * Keeps track of the global state, owns all test cases and collects results.
94  */
95 class Framework final
96 {
97 public:
98     /** Access to the singleton.
99      *
100      * @return Unique instance of the framework class.
101      */
102     static Framework &get();
103 
104     /** Supported instrument types for benchmarking.
105      *
106      * @return Set of all available instrument types.
107      */
108     std::set<InstrumentsDescription> available_instruments() const;
109 
110     /** Init the framework.
111      *
112      * @see TestFilter::TestFilter for the format of the string to filter ids.
113      *
114      * @param[in] config Framework configuration meta-data.
115      */
116     void init(const FrameworkConfig &config);
117 
118     /** Add a new test suite.
119      *
120      * @warning Cannot be used at execution time. It can only be used for
121      * registering test cases.
122      *
123      * @param[in] name Name of the added test suite.
124      *
125      * @return Name of the current test suite.
126      */
127     void push_suite(std::string name);
128 
129     /** Remove innermost test suite.
130      *
131      * @warning Cannot be used at execution time. It can only be used for
132      * registering test cases.
133      */
134     void pop_suite();
135 
136     /** Add a test case to the framework.
137      *
138      * @param[in] test_name Name of the new test case.
139      * @param[in] mode      Mode in which to include the test.
140      * @param[in] status    Status of the test case.
141      */
142     template <typename T>
143     void add_test_case(std::string test_name, DatasetMode mode, TestCaseFactory::Status status);
144 
145     /** Add a data test case to the framework.
146      *
147      * @param[in] test_name   Name of the new test case.
148      * @param[in] mode        Mode in which to include the test.
149      * @param[in] status      Status of the test case.
150      * @param[in] description Description of @p data.
151      * @param[in] data        Data that will be used as input to the test.
152      */
153     template <typename T, typename D>
154     void add_data_test_case(std::string test_name, DatasetMode mode, TestCaseFactory::Status status, std::string description, D &&data);
155 
156     /** Add info string for the next expectation/assertion.
157      *
158      * @param[in] info Info string.
159      */
160     void add_test_info(std::string info);
161 
162     /** Clear the collected test info. */
163     void clear_test_info();
164 
165     /** Check if any info has been registered.
166      *
167      * @return True if there is test info.
168      */
169     bool has_test_info() const;
170 
171     /** Print test info.
172      *
173      * @param[out] os Output stream.
174      */
175     void print_test_info(std::ostream &os) const;
176 
177     /** Tell the framework that execution of a test starts.
178      *
179      * @param[in] info Test info.
180      */
181     void log_test_start(const TestInfo &info);
182 
183     /** Tell the framework that a test case is skipped.
184      *
185      * @param[in] info Test info.
186      */
187     void log_test_skipped(const TestInfo &info);
188 
189     /** Tell the framework that a test case finished.
190      *
191      * @param[in] info Test info.
192      */
193     void log_test_end(const TestInfo &info);
194 
195     /** Tell the framework that the currently running test case failed a non-fatal expectation.
196      *
197      * @param[in] error Description of the error.
198      */
199     void log_failed_expectation(const TestError &error);
200 
201     /** Print the debug information that has already been logged
202      *
203      * @param[in] info Description of the log info.
204      */
205     void log_info(const std::string &info);
206 
207     /** Number of iterations per test case.
208      *
209      * @return Number of iterations per test case.
210      */
211     int num_iterations() const;
212 
213     /** Set number of iterations per test case.
214      *
215      * @param[in] num_iterations Number of iterations per test case.
216      */
217     void set_num_iterations(int num_iterations);
218 
219     /** Should errors be caught or thrown by the framework.
220      *
221      * @return True if errors are thrown.
222      */
223     bool throw_errors() const;
224 
225     /** Set whether errors are caught or thrown by the framework.
226      *
227      * @param[in] throw_errors True if errors should be thrown.
228      */
229     void set_throw_errors(bool throw_errors);
230 
231     /** Indicates if test execution is stopped after the first failed test.
232      *
233      * @return True if the execution is going to be stopped after the first failed test.
234      */
235     bool stop_on_error() const;
236 
237     /** Set whether to stop execution after the first failed test.
238      *
239      * @param[in] stop_on_error True if execution is going to be stopped after first failed test.
240      */
241     void set_stop_on_error(bool stop_on_error);
242 
243     /** Indicates if a test should be marked as failed when its assets are missing.
244      *
245      * @return True if a test should be marked as failed when its assets are missing.
246      */
247     bool error_on_missing_assets() const;
248 
249     /** Set whether a test should be considered as failed if its assets cannot be found.
250      *
251      * @param[in] error_on_missing_assets True if a test should be marked as failed when its assets are missing.
252      */
253     void set_error_on_missing_assets(bool error_on_missing_assets);
254 
255     /** Run all enabled test cases.
256      *
257      * @return True if all test cases executed successful.
258      */
259     bool run();
260 
261     /** Set the result for an executed test case.
262      *
263      * @param[in] info   Test info.
264      * @param[in] result Execution result.
265      */
266     void set_test_result(TestInfo info, TestResult result);
267 
268     /** Use the specified printer to output test results from the last run.
269      *
270      * This method can be used if the test results need to be obtained using a
271      * different printer than the one managed by the framework.
272      *
273      * @param[in] printer Printer used to output results.
274      */
275     void print_test_results(Printer &printer) const;
276 
277     /** Factory method to obtain a configured profiler.
278      *
279      * The profiler enables all instruments that have been passed to the @ref
280      * init method.
281      *
282      * @return Configured profiler to collect benchmark results.
283      */
284     Profiler get_profiler() const;
285 
286     /** Set the printer used for the output of test results.
287      *
288      * @param[in] printer Pointer to a printer.
289      */
290     void add_printer(Printer *printer);
291 
292     /** List of @ref TestInfo's.
293      *
294      * @return Vector with all test ids.
295      */
296     std::vector<TestInfo> test_infos() const;
297 
298     /** Get the current logging level
299      *
300      * @return The current logging level.
301      */
302     LogLevel log_level() const;
303     /** Sets instruments info
304      *
305      * @note TODO(COMPMID-2638) : Remove once instruments are transferred outside the framework.
306      *
307      * @param[in] instr_info Instruments info to set
308      */
309     void set_instruments_info(InstrumentsInfo instr_info);
310     /** Get the configure only flag
311      *
312      * @return The current configure only flag.
313      */
314     bool configure_only() const;
315     /** Return whether the new fixture has been called
316      *
317      * @return The current new fixture call flag.
318      */
319     bool new_fixture_call() const;
320     /** Set the new fixture call flag
321      *
322      * @param[in] val Value to set for the flag
323      */
324     void set_new_fixture_call(bool val);
325 
326 private:
327     Framework();
328     ~Framework() = default;
329 
330     Framework(const Framework &) = delete;
331     Framework &operator=(const Framework &) = delete;
332 
333     void run_test(const TestInfo &info, TestCaseFactory &test_factory);
334     std::map<TestResult::Status, int> count_test_results() const;
335 
336     /** Returns the current test suite name.
337      *
338      * @warning Cannot be used at execution time to get the test suite of the
339      * currently executed test case. It can only be used for registering test
340      * cases.
341      *
342      * @return Name of the current test suite.
343      */
344     std::string current_suite_name() const;
345 
346     /* Perform func on all printers */
347     template <typename F>
348     void func_on_all_printers(F &&func);
349 
350     std::vector<std::string>                      _test_suite_name{};
351     std::vector<std::unique_ptr<TestCaseFactory>> _test_factories{};
352     std::map<TestInfo, TestResult> _test_results{};
353     int                    _num_iterations{ 1 };
354     float                  _cooldown_sec{ -1.f };
355     bool                   _throw_errors{ false };
356     bool                   _stop_on_error{ false };
357     bool                   _error_on_missing_assets{ false };
358     std::vector<Printer *> _printers{};
359     bool                   _configure_only{ false };
360     bool                   _new_fixture_call{ false };
361 
362     using create_function = std::unique_ptr<Instrument>();
363     std::map<InstrumentsDescription, create_function *> _available_instruments{};
364 
365     std::set<framework::InstrumentsDescription> _instruments{ std::pair<InstrumentType, ScaleFactor>(InstrumentType::NONE, ScaleFactor::NONE) };
366     std::unique_ptr<TestFilter>                 _test_filter;
367     LogLevel                                    _log_level{ LogLevel::ALL };
368     const TestInfo                             *_current_test_info{ nullptr };
369     TestResult                                 *_current_test_result{ nullptr };
370     std::vector<std::string>                    _test_info{};
371 };
372 
373 template <typename T>
add_test_case(std::string test_name,DatasetMode mode,TestCaseFactory::Status status)374 inline void Framework::add_test_case(std::string test_name, DatasetMode mode, TestCaseFactory::Status status)
375 {
376     _test_factories.emplace_back(std::make_unique<SimpleTestCaseFactory<T>>(current_suite_name(), std::move(test_name), mode, status));
377 }
378 
379 template <typename T, typename D>
add_data_test_case(std::string test_name,DatasetMode mode,TestCaseFactory::Status status,std::string description,D && data)380 inline void Framework::add_data_test_case(std::string test_name, DatasetMode mode, TestCaseFactory::Status status, std::string description, D &&data)
381 {
382     // WORKAROUND for GCC 4.9
383     // The function should get *it which is tuple but that seems to trigger a
384     // bug in the compiler.
385     auto tmp = std::unique_ptr<DataTestCaseFactory<T, decltype(*std::declval<D>())>>(new DataTestCaseFactory<T, decltype(*std::declval<D>())>(current_suite_name(), std::move(test_name), mode, status,
386                                                                                      std::move(description), *data));
387     _test_factories.emplace_back(std::move(tmp));
388 }
389 } // namespace framework
390 } // namespace test
391 } // namespace arm_compute
392 #endif /* ARM_COMPUTE_TEST_FRAMEWORK */
393