blob: 13aa415a45eaef69c88dcae4278c96f45b64522b [file] [log] [blame]
Anthony Barbierdbdab852017-06-23 15:42:00 +01001<!-- HTML header for doxygen 1.8.9.1-->
2<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
3<html xmlns="http://www.w3.org/1999/xhtml">
4<head>
5<meta http-equiv="Content-Type" content="text/xhtml;charset=UTF-8"/>
6<meta http-equiv="X-UA-Compatible" content="IE=9"/>
Jenkinsb9abeae2018-11-22 11:58:08 +00007<meta name="generator" content="Doxygen 1.8.13"/>
Anthony Barbierdbdab852017-06-23 15:42:00 +01008<meta name="robots" content="NOINDEX, NOFOLLOW" /> <!-- Prevent indexing by search engines -->
9<title>Compute Library: Validation and benchmarks tests</title>
10<link href="tabs.css" rel="stylesheet" type="text/css"/>
11<script type="text/javascript" src="jquery.js"></script>
12<script type="text/javascript" src="dynsections.js"></script>
13<link href="navtree.css" rel="stylesheet" type="text/css"/>
14<script type="text/javascript" src="resize.js"></script>
Anthony Barbier8140e1e2017-12-14 23:48:46 +000015<script type="text/javascript" src="navtreedata.js"></script>
Anthony Barbierdbdab852017-06-23 15:42:00 +010016<script type="text/javascript" src="navtree.js"></script>
17<script type="text/javascript">
18 $(document).ready(initResizable);
Anthony Barbierdbdab852017-06-23 15:42:00 +010019</script>
20<link href="search/search.css" rel="stylesheet" type="text/css"/>
Anthony Barbier8140e1e2017-12-14 23:48:46 +000021<script type="text/javascript" src="search/searchdata.js"></script>
Anthony Barbierdbdab852017-06-23 15:42:00 +010022<script type="text/javascript" src="search/search.js"></script>
Anthony Barbierdbdab852017-06-23 15:42:00 +010023<script type="text/x-mathjax-config">
24 MathJax.Hub.Config({
25 extensions: ["tex2jax.js"],
26 jax: ["input/TeX","output/HTML-CSS"],
27});
Anthony Barbier8140e1e2017-12-14 23:48:46 +000028</script><script type="text/javascript" src="http://cdn.mathjax.org/mathjax/latest/MathJax.js"></script>
Anthony Barbierdbdab852017-06-23 15:42:00 +010029<link href="doxygen.css" rel="stylesheet" type="text/css" />
30</head>
31<body>
32<div id="top"><!-- do not remove this div, it is closed by doxygen! -->
33<div id="titlearea">
34<table cellspacing="0" cellpadding="0">
35 <tbody>
36 <tr style="height: 56px;">
37 <td style="padding-left: 0.5em;">
38 <div id="projectname">Compute Library
Jenkinsb9abeae2018-11-22 11:58:08 +000039 &#160;<span id="projectnumber">18.11</span>
Anthony Barbierdbdab852017-06-23 15:42:00 +010040 </div>
41 </td>
42 </tr>
43 </tbody>
44</table>
45</div>
46<!-- end header part -->
Jenkinsb9abeae2018-11-22 11:58:08 +000047<!-- Generated by Doxygen 1.8.13 -->
Anthony Barbierdbdab852017-06-23 15:42:00 +010048<script type="text/javascript">
49var searchBox = new SearchBox("searchBox", "search",false,'Search');
50</script>
Jenkinsb9abeae2018-11-22 11:58:08 +000051<script type="text/javascript" src="menudata.js"></script>
52<script type="text/javascript" src="menu.js"></script>
53<script type="text/javascript">
54$(function() {
55 initMenu('',true,false,'search.php','Search');
56 $(document).ready(function() { init_search(); });
57});
58</script>
59<div id="main-nav"></div>
Anthony Barbierdbdab852017-06-23 15:42:00 +010060</div><!-- top -->
61<div id="side-nav" class="ui-resizable side-nav-resizable">
62 <div id="nav-tree">
63 <div id="nav-tree-contents">
64 <div id="nav-sync" class="sync"></div>
65 </div>
66 </div>
67 <div id="splitbar" style="-moz-user-select:none;"
68 class="ui-resizable-handle">
69 </div>
70</div>
71<script type="text/javascript">
72$(document).ready(function(){initNavTree('tests.xhtml','');});
73</script>
74<div id="doc-content">
75<!-- window showing the filter options -->
76<div id="MSearchSelectWindow"
77 onmouseover="return searchBox.OnSearchSelectShow()"
78 onmouseout="return searchBox.OnSearchSelectHide()"
79 onkeydown="return searchBox.OnSearchSelectKey(event)">
Anthony Barbier8140e1e2017-12-14 23:48:46 +000080</div>
Anthony Barbierdbdab852017-06-23 15:42:00 +010081
82<!-- iframe showing the search results (closed by default) -->
83<div id="MSearchResultsWindow">
84<iframe src="javascript:void(0)" frameborder="0"
85 name="MSearchResults" id="MSearchResults">
86</iframe>
87</div>
88
89<div class="header">
90 <div class="headertitle">
91<div class="title">Validation and benchmarks tests </div> </div>
92</div><!--header-->
93<div class="contents">
94<div class="toc"><h3>Table of Contents</h3>
Anthony Barbier8140e1e2017-12-14 23:48:46 +000095<ul><li class="level1"><a href="#tests_overview">Overview</a><ul><li class="level2"><a href="#tests_overview_structure">Directory structure</a></li>
96<li class="level2"><a href="#tests_overview_fixtures">Fixtures</a><ul><li class="level3"><a href="#tests_overview_fixtures_fixture">Fixture</a></li>
Kaizen8938bd32017-09-28 14:38:23 +010097<li class="level3"><a href="#tests_overview_fixtures_data_fixture">Data fixture</a></li>
Anthony Barbierdbdab852017-06-23 15:42:00 +010098</ul>
99</li>
Kaizen8938bd32017-09-28 14:38:23 +0100100<li class="level2"><a href="#tests_overview_test_cases">Test cases</a><ul><li class="level3"><a href="#tests_overview_test_cases_test_case">Test case</a></li>
101<li class="level3"><a href="#tests_overview_test_cases_fixture_fixture_test_case">Fixture test case</a></li>
102<li class="level3"><a href="#tests_overview_test_cases_fixture_register_fixture_test_case">Registering a fixture as test case</a></li>
103<li class="level3"><a href="#tests_overview_test_cases_data_test_case">Data test case</a></li>
104<li class="level3"><a href="#tests_overview_test_cases_fixture_data_test_case">Fixture data test case</a></li>
105<li class="level3"><a href="#tests_overview_test_cases_register_fixture_data_test_case">Registering a fixture as data test case</a></li>
106</ul>
107</li>
108</ul>
109</li>
110<li class="level1"><a href="#writing_tests">Writing validation tests</a></li>
Jenkinsb3a371b2018-05-23 11:36:53 +0100111<li class="level1"><a href="#tests_running_tests">Running tests</a><ul><li class="level2"><a href="#tests_running_tests_benchmark_and_validation">Benchmarking and validation suites</a><ul><li class="level3"><a href="#tests_running_tests_benchmarking_filter">Filter tests</a></li>
Anthony Barbierdbdab852017-06-23 15:42:00 +0100112<li class="level3"><a href="#tests_running_tests_benchmarking_runtime">Runtime</a></li>
Kaizen8938bd32017-09-28 14:38:23 +0100113<li class="level3"><a href="#tests_running_tests_benchmarking_output">Output</a></li>
Kaizenbf8b01d2017-10-12 14:26:51 +0100114<li class="level3"><a href="#tests_running_tests_benchmarking_mode">Mode</a></li>
115<li class="level3"><a href="#tests_running_tests_benchmarking_instruments">Instruments</a></li>
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000116<li class="level3"><a href="#tests_running_examples">Examples</a></li>
Anthony Barbierdbdab852017-06-23 15:42:00 +0100117</ul>
118</li>
Anthony Barbierdbdab852017-06-23 15:42:00 +0100119</ul>
120</li>
121</ul>
122</div>
Kaizen8938bd32017-09-28 14:38:23 +0100123<div class="textblock"><h1><a class="anchor" id="tests_overview"></a>
124Overview</h1>
125<p>Benchmark and validation tests are based on the same framework to setup and run the tests. In addition to running simple, self-contained test functions the framework supports fixtures and data test cases. The former allows to share common setup routines between various backends thus reducing the amount of duplicated code. The latter can be used to parameterize tests or fixtures with different inputs, e.g. different tensor shapes. One limitation is that tests/fixtures cannot be parameterized based on the data type if static type information is needed within the test (e.g. to validate the results).</p>
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000126<dl class="section note"><dt>Note</dt><dd>By default tests are not built. To enable them you need to add validation_tests=1 and / or benchmark_tests=1 to your SCons line.</dd>
127<dd>
128Tests are not included in the pre-built binary archive, you have to build them from sources.</dd></dl>
Anthony Barbier8140e1e2017-12-14 23:48:46 +0000129<h2><a class="anchor" id="tests_overview_structure"></a>
130Directory structure</h2>
131<pre class="fragment">.
132`-- tests &lt;- Top level test directory. All files in here are shared among validation and benchmark.
133 |-- framework &lt;- Underlying test framework.
134 |-- CL \
135 |-- NEON -&gt; Backend specific files with helper functions etc.
136 |-- benchmark &lt;- Top level directory for the benchmarking files.
137 | |-- fixtures &lt;- Fixtures for benchmark tests.
138 | |-- CL &lt;- OpenCL backend test cases on a function level.
139 | | `-- SYSTEM &lt;- OpenCL system tests, e.g. whole networks
140 | `-- NEON &lt;- Same for NEON
141 | `-- SYSTEM
142 |-- datasets &lt;- Datasets for benchmark and validation tests.
143 |-- main.cpp &lt;- Main entry point for the tests. Currently shared between validation and benchmarking.
144 |-- networks &lt;- Network classes for system level tests.
145 `-- validation -&gt; Top level directory for validation files.
146 |-- CPP -&gt; C++ reference code
147 |-- CL \
148 |-- NEON -&gt; Backend specific test cases
149 `-- fixtures -&gt; Fixtures shared among all backends. Used to setup target function and tensors.
150</pre><h2><a class="anchor" id="tests_overview_fixtures"></a>
Kaizen8938bd32017-09-28 14:38:23 +0100151Fixtures</h2>
152<p>Fixtures can be used to share common setup, teardown or even run tasks among multiple test cases. For that purpose a fixture can define a <code>setup</code>, <code>teardown</code> and <code>run</code> method. Additionally the constructor and destructor might also be customized.</p>
153<p>An instance of the fixture is created immediately before the actual test is executed. After construction the <a class="el" href="classarm__compute_1_1test_1_1framework_1_1_fixture.xhtml#a4fc01d736fe50cf5b977f755b675f11d">framework::Fixture::setup</a> method is called. Then the test function or the fixtures <code>run</code> method is invoked. After test execution the <a class="el" href="classarm__compute_1_1test_1_1framework_1_1_fixture.xhtml#a4adab6322a0276f34a7d656d49fc865c">framework::Fixture::teardown</a> method is called and lastly the fixture is destructed.</p>
154<h3><a class="anchor" id="tests_overview_fixtures_fixture"></a>
155Fixture</h3>
Anthony Barbier8140e1e2017-12-14 23:48:46 +0000156<p>Fixtures for non-parameterized test are straightforward. The custom fixture class has to inherit from <a class="el" href="classarm__compute_1_1test_1_1framework_1_1_fixture.xhtml">framework::Fixture</a> and choose to implement any of the <code>setup</code>, <code>teardown</code> or <code>run</code> methods. None of the methods takes any arguments or returns anything. </p><pre class="fragment">class CustomFixture : public framework::Fixture
Kaizen8938bd32017-09-28 14:38:23 +0100157{
158 void setup()
159 {
160 _ptr = malloc(4000);
161 }
162
163 void run()
164 {
165 ARM_COMPUTE_ASSERT(_ptr != nullptr);
166 }
167
168 void teardown()
169 {
170 free(_ptr);
171 }
172
173 void *_ptr;
174};
175</pre><h3><a class="anchor" id="tests_overview_fixtures_data_fixture"></a>
176Data fixture</h3>
Anthony Barbier8140e1e2017-12-14 23:48:46 +0000177<p>The advantage of a parameterized fixture is that arguments can be passed to the setup method at runtime. To make this possible the setup method has to be a template with a type parameter for every argument (though the template parameter doesn't have to be used). All other methods remain the same. </p><pre class="fragment">class CustomFixture : public framework::Fixture
Kaizen8938bd32017-09-28 14:38:23 +0100178{
179#ifdef ALTERNATIVE_DECLARATION
180 template &lt;typename ...&gt;
181 void setup(size_t size)
182 {
183 _ptr = malloc(size);
184 }
185#else
186 template &lt;typename T&gt;
187 void setup(T size)
188 {
189 _ptr = malloc(size);
190 }
191#endif
192
193 void run()
194 {
195 ARM_COMPUTE_ASSERT(_ptr != nullptr);
196 }
197
198 void teardown()
199 {
200 free(_ptr);
201 }
202
203 void *_ptr;
204};
205</pre><h2><a class="anchor" id="tests_overview_test_cases"></a>
206Test cases</h2>
207<p>All following commands can be optionally prefixed with <code>EXPECTED_FAILURE_</code> or <code>DISABLED_</code>.</p>
208<h3><a class="anchor" id="tests_overview_test_cases_test_case"></a>
209Test case</h3>
210<p>A simple test case function taking no inputs and having no (shared) state.</p>
211<ul>
212<li>First argument is the name of the test case (has to be unique within the enclosing test suite).</li>
213<li>Second argument is the dataset mode in which the test will be active.</li>
214</ul>
215<pre class="fragment">TEST_CASE(TestCaseName, DatasetMode::PRECOMMIT)
216{
217 ARM_COMPUTE_ASSERT_EQUAL(1 + 1, 2);
218}
219</pre><h3><a class="anchor" id="tests_overview_test_cases_fixture_fixture_test_case"></a>
220Fixture test case</h3>
221<p>A simple test case function taking no inputs that inherits from a fixture. The test case will have access to all public and protected members of the fixture. Only the setup and teardown methods of the fixture will be used. The body of this function will be used as test function.</p>
222<ul>
223<li>First argument is the name of the test case (has to be unique within the enclosing test suite).</li>
224<li>Second argument is the class name of the fixture.</li>
225<li>Third argument is the dataset mode in which the test will be active.</li>
226</ul>
227<pre class="fragment">class FixtureName : public framework::Fixture
228{
229 public:
230 void setup() override
231 {
232 _one = 1;
233 }
234
235 protected:
236 int _one;
237};
238
239FIXTURE_TEST_CASE(TestCaseName, FixtureName, DatasetMode::PRECOMMIT)
240{
241 ARM_COMPUTE_ASSERT_EQUAL(_one + 1, 2);
242}
243</pre><h3><a class="anchor" id="tests_overview_test_cases_fixture_register_fixture_test_case"></a>
244Registering a fixture as test case</h3>
245<p>Allows to use a fixture directly as test case. Instead of defining a new test function the run method of the fixture will be executed.</p>
246<ul>
247<li>First argument is the name of the test case (has to be unique within the enclosing test suite).</li>
248<li>Second argument is the class name of the fixture.</li>
249<li>Third argument is the dataset mode in which the test will be active.</li>
250</ul>
251<pre class="fragment">class FixtureName : public framework::Fixture
252{
253 public:
254 void setup() override
255 {
256 _one = 1;
257 }
258
259 void run() override
260 {
261 ARM_COMPUTE_ASSERT_EQUAL(_one + 1, 2);
262 }
263
264 protected:
265 int _one;
266};
267
268REGISTER_FIXTURE_TEST_CASE(TestCaseName, FixtureName, DatasetMode::PRECOMMIT);
269</pre><h3><a class="anchor" id="tests_overview_test_cases_data_test_case"></a>
270Data test case</h3>
271<p>A parameterized test case function that has no (shared) state. The dataset will be used to generate versions of the test case with different inputs.</p>
272<ul>
273<li>First argument is the name of the test case (has to be unique within the enclosing test suite).</li>
274<li>Second argument is the dataset mode in which the test will be active.</li>
275<li>Third argument is the dataset.</li>
276<li>Further arguments specify names of the arguments to the test function. The number must match the arity of the dataset.</li>
277</ul>
278<pre class="fragment">DATA_TEST_CASE(TestCaseName, DatasetMode::PRECOMMIT, framework::make("Numbers", {1, 2, 3}), num)
279{
280 ARM_COMPUTE_ASSERT(num &lt; 4);
281}
282</pre><h3><a class="anchor" id="tests_overview_test_cases_fixture_data_test_case"></a>
283Fixture data test case</h3>
284<p>A parameterized test case that inherits from a fixture. The test case will have access to all public and protected members of the fixture. Only the setup and teardown methods of the fixture will be used. The setup method of the fixture needs to be a template and has to accept inputs from the dataset as arguments. The body of this function will be used as test function. The dataset will be used to generate versions of the test case with different inputs.</p>
285<ul>
286<li>First argument is the name of the test case (has to be unique within the enclosing test suite).</li>
287<li>Second argument is the class name of the fixture.</li>
288<li>Third argument is the dataset mode in which the test will be active.</li>
289<li>Fourth argument is the dataset.</li>
290</ul>
291<pre class="fragment">class FixtureName : public framework::Fixture
292{
293 public:
294 template &lt;typename T&gt;
295 void setup(T num)
296 {
297 _num = num;
298 }
299
300 protected:
301 int _num;
302};
303
304FIXTURE_DATA_TEST_CASE(TestCaseName, FixtureName, DatasetMode::PRECOMMIT, framework::make("Numbers", {1, 2, 3}))
305{
306 ARM_COMPUTE_ASSERT(_num &lt; 4);
307}
308</pre><h3><a class="anchor" id="tests_overview_test_cases_register_fixture_data_test_case"></a>
309Registering a fixture as data test case</h3>
310<p>Allows to use a fixture directly as parameterized test case. Instead of defining a new test function the run method of the fixture will be executed. The setup method of the fixture needs to be a template and has to accept inputs from the dataset as arguments. The dataset will be used to generate versions of the test case with different inputs.</p>
311<ul>
312<li>First argument is the name of the test case (has to be unique within the enclosing test suite).</li>
313<li>Second argument is the class name of the fixture.</li>
314<li>Third argument is the dataset mode in which the test will be active.</li>
315<li>Fourth argument is the dataset.</li>
316</ul>
317<pre class="fragment">class FixtureName : public framework::Fixture
318{
319 public:
320 template &lt;typename T&gt;
321 void setup(T num)
322 {
323 _num = num;
324 }
325
326 void run() override
327 {
328 ARM_COMPUTE_ASSERT(_num &lt; 4);
329 }
330
331 protected:
332 int _num;
333};
334
335REGISTER_FIXTURE_DATA_TEST_CASE(TestCaseName, FixtureName, DatasetMode::PRECOMMIT, framework::make("Numbers", {1, 2, 3}));
336</pre><h1><a class="anchor" id="writing_tests"></a>
337Writing validation tests</h1>
338<p>Before starting a new test case have a look at the existing ones. They should provide a good overview how test cases are structured.</p>
339<ul>
340<li>The C++ reference needs to be added to <code>tests/validation/CPP/</code>. The reference function is typically a template parameterized by the underlying value type of the <code><a class="el" href="classarm__compute_1_1test_1_1_simple_tensor.xhtml" title="Simple tensor object that stores elements in a consecutive chunk of memory. ">SimpleTensor</a></code>. This makes it easy to specialise for different data types.</li>
341<li>If all backends have a common interface it makes sense to share the setup code. This can be done by adding a fixture in <code>tests/validation/fixtures/</code>. Inside of the <code>setup</code> method of a fixture the tensors can be created and initialised and the function can be configured and run. The actual test will only have to validate the results. To be shared among multiple backends the fixture class is usually a template that accepts the specific types (data, tensor class, function class etc.) as parameters.</li>
342<li>The actual test cases need to be added for each backend individually. Typically the will be multiple tests for different data types and for different execution modes, e.g. precommit and nightly.</li>
343</ul>
Anthony Barbierdbdab852017-06-23 15:42:00 +0100344<h1><a class="anchor" id="tests_running_tests"></a>
345Running tests</h1>
Jenkinsb3a371b2018-05-23 11:36:53 +0100346<h2><a class="anchor" id="tests_running_tests_benchmark_and_validation"></a>
347Benchmarking and validation suites</h2>
Anthony Barbierdbdab852017-06-23 15:42:00 +0100348<h3><a class="anchor" id="tests_running_tests_benchmarking_filter"></a>
349Filter tests</h3>
Anthony Barbier8140e1e2017-12-14 23:48:46 +0000350<p>All tests can be run by invoking </p><pre class="fragment">./arm_compute_benchmark ./data
Anthony Barbierdbdab852017-06-23 15:42:00 +0100351</pre><p>where <code>./data</code> contains the assets needed by the tests.</p>
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000352<p>If only a subset of the tests has to be executed the <code>--filter</code> option takes a regular expression to select matching tests. </p><pre class="fragment">./arm_compute_benchmark --filter='^NEON/.*AlexNet' ./data
353</pre><dl class="section note"><dt>Note</dt><dd>Filtering will be much faster if the regular expression starts from the start ("^") or end ("$") of the line.</dd></dl>
354<p>Additionally each test has a test id which can be used as a filter, too. However, the test id is not guaranteed to be stable when new tests are added. Only for a specific build the same the test will keep its id. </p><pre class="fragment">./arm_compute_benchmark --filter-id=10 ./data
Anthony Barbier8140e1e2017-12-14 23:48:46 +0000355</pre><p>All available tests can be displayed with the <code>--list-tests</code> switch. </p><pre class="fragment">./arm_compute_benchmark --list-tests
Kaizen8938bd32017-09-28 14:38:23 +0100356</pre><p>More options can be found in the <code>--help</code> message.</p>
357<h3><a class="anchor" id="tests_running_tests_benchmarking_runtime"></a>
Anthony Barbierdbdab852017-06-23 15:42:00 +0100358Runtime</h3>
Kaizen8938bd32017-09-28 14:38:23 +0100359<p>By default every test is run once on a single thread. The number of iterations can be controlled via the <code>--iterations</code> option and the number of threads via <code>--threads</code>.</p>
360<h3><a class="anchor" id="tests_running_tests_benchmarking_output"></a>
361Output</h3>
362<p>By default the benchmarking results are printed in a human readable format on the command line. The colored output can be disabled via <code>--no-color-output</code>. As an alternative output format JSON is supported and can be selected via <code>--log-format=json</code>. To write the output to a file instead of stdout the <code>--log-file</code> option can be used.</p>
Kaizenbf8b01d2017-10-12 14:26:51 +0100363<h3><a class="anchor" id="tests_running_tests_benchmarking_mode"></a>
364Mode</h3>
365<p>Tests contain different datasets of different sizes, some of which will take several hours to run. You can select which datasets to use by using the <code>--mode</code> option, we recommed you use <code>--mode=precommit</code> to start with.</p>
366<h3><a class="anchor" id="tests_running_tests_benchmarking_instruments"></a>
367Instruments</h3>
368<p>You can use the <code>--instruments</code> option to select one or more instruments to measure the execution time of the benchmark tests.</p>
369<p><code>PMU</code> will try to read the CPU PMU events from the kernel (They need to be enabled on your platform)</p>
370<p><code>MALI</code> will try to collect Mali hardware performance counters. (You need to have a recent enough Mali driver)</p>
Anthony Barbierf45d5a92018-01-24 16:23:15 +0000371<p><code>WALL_CLOCK_TIMER</code> will measure time using <code>gettimeofday</code>: this should work on all platforms.</p>
372<p>You can pass a combinations of these instruments: <code>--instruments=PMU,MALI,WALL_CLOCK_TIMER</code></p>
373<dl class="section note"><dt>Note</dt><dd>You need to make sure the instruments have been selected at compile time using the <code>pmu=1</code> or <code>mali=1</code> scons options.</dd></dl>
374<h3><a class="anchor" id="tests_running_examples"></a>
375Examples</h3>
376<p>To run all the precommit validation tests: </p><pre class="fragment">LD_LIBRARY_PATH=. ./arm_compute_validation --mode=precommit
377</pre><p>To run the OpenCL precommit validation tests: </p><pre class="fragment">LD_LIBRARY_PATH=. ./arm_compute_validation --mode=precommit --filter="^CL.*"
378</pre><p>To run the NEON precommit benchmark tests with PMU and Wall Clock timer in miliseconds instruments enabled: </p><pre class="fragment">LD_LIBRARY_PATH=. ./arm_compute_benchmark --mode=precommit --filter="^NEON.*" --instruments="pmu,wall_clock_timer_ms" --iterations=10
379</pre><p>To run the OpenCL precommit benchmark tests with OpenCL kernel timers in miliseconds enabled: </p><pre class="fragment">LD_LIBRARY_PATH=. ./arm_compute_benchmark --mode=precommit --filter="^CL.*" --instruments="opencl_timer_ms" --iterations=10</pre> </div></div><!-- contents -->
Anthony Barbierdbdab852017-06-23 15:42:00 +0100380</div><!-- doc-content -->
381<!-- start footer part -->
382<div id="nav-path" class="navpath"><!-- id is needed for treeview function! -->
383 <ul>
Jenkinsb9abeae2018-11-22 11:58:08 +0000384 <li class="footer">Generated on Thu Nov 22 2018 11:57:44 for Compute Library by
Anthony Barbierdbdab852017-06-23 15:42:00 +0100385 <a href="http://www.doxygen.org/index.html">
Jenkinsb9abeae2018-11-22 11:58:08 +0000386 <img class="footer" src="doxygen.png" alt="doxygen"/></a> 1.8.13 </li>
Anthony Barbierdbdab852017-06-23 15:42:00 +0100387 </ul>
388</div>
389</body>
390</html>