Yunlian Jiang | 4578ec1 | 2013-05-22 14:54:39 -0700 | [diff] [blame] | 1 | #!/usr/bin/python |
| 2 | |
| 3 | # Copyright (c) 2011 The Chromium OS Authors. All rights reserved. |
| 4 | # Use of this source code is governed by a BSD-style license that can be |
| 5 | # found in the LICENSE file. |
| 6 | |
cmtice | c490e07 | 2014-06-13 15:38:45 -0700 | [diff] [blame] | 7 | import hashlib |
| 8 | import mock |
| 9 | import mock_instance |
| 10 | import os |
| 11 | import tempfile |
Yunlian Jiang | 4578ec1 | 2013-05-22 14:54:39 -0700 | [diff] [blame] | 12 | import unittest |
| 13 | |
cmtice | c490e07 | 2014-06-13 15:38:45 -0700 | [diff] [blame] | 14 | import image_checksummer |
| 15 | import machine_manager |
Yunlian Jiang | 4578ec1 | 2013-05-22 14:54:39 -0700 | [diff] [blame] | 16 | |
cmtice | c490e07 | 2014-06-13 15:38:45 -0700 | [diff] [blame] | 17 | from label import MockLabel |
| 18 | from results_cache import CacheConditions |
Yunlian Jiang | 4578ec1 | 2013-05-22 14:54:39 -0700 | [diff] [blame] | 19 | from results_cache import Result |
| 20 | from results_cache import ResultsCache |
cmtice | c490e07 | 2014-06-13 15:38:45 -0700 | [diff] [blame] | 21 | from results_cache import TelemetryResult |
Yunlian Jiang | 0d1a9f3 | 2015-12-09 10:47:11 -0800 | [diff] [blame] | 22 | from cros_utils import command_executer |
| 23 | from cros_utils import logger |
| 24 | from cros_utils import misc |
Yunlian Jiang | 4578ec1 | 2013-05-22 14:54:39 -0700 | [diff] [blame] | 25 | |
cmtice | d96e457 | 2015-05-19 16:19:25 -0700 | [diff] [blame] | 26 | OUTPUT = """CMD (True): ./test_that.sh --remote=172.17.128.241 --board=lumpy LibCBench |
Yunlian Jiang | 4578ec1 | 2013-05-22 14:54:39 -0700 | [diff] [blame] | 27 | CMD (None): cd /usr/local/google/home/yunlian/gd/src/build/images/lumpy/latest/../../../../..; cros_sdk -- ./in_chroot_cmd6X7Cxu.sh |
cmtice | d96e457 | 2015-05-19 16:19:25 -0700 | [diff] [blame] | 28 | Identity added: /tmp/test_that.PO1234567/autotest_key (/tmp/test_that.PO1234567/autotest_key) |
Yunlian Jiang | 4578ec1 | 2013-05-22 14:54:39 -0700 | [diff] [blame] | 29 | INFO : Using emerged autotests already installed at /build/lumpy/usr/local/autotest. |
| 30 | |
| 31 | INFO : Running the following control files 1 times: |
| 32 | INFO : * 'client/site_tests/platform_LibCBench/control' |
| 33 | |
| 34 | INFO : Running client test client/site_tests/platform_LibCBench/control |
cmtice | d96e457 | 2015-05-19 16:19:25 -0700 | [diff] [blame] | 35 | ./server/autoserv -m 172.17.128.241 --ssh-port 22 -c client/site_tests/platform_LibCBench/control -r /tmp/test_that.PO1234567/platform_LibCBench --test-retry=0 --args |
Yunlian Jiang | 4578ec1 | 2013-05-22 14:54:39 -0700 | [diff] [blame] | 36 | ERROR:root:import statsd failed, no stats will be reported. |
cmtice | d96e457 | 2015-05-19 16:19:25 -0700 | [diff] [blame] | 37 | 14:20:22 INFO | Results placed in /tmp/test_that.PO1234567/platform_LibCBench |
Yunlian Jiang | 4578ec1 | 2013-05-22 14:54:39 -0700 | [diff] [blame] | 38 | 14:20:22 INFO | Processing control file |
| 39 | 14:20:23 INFO | Starting master ssh connection '/usr/bin/ssh -a -x -N -o ControlMaster=yes -o ControlPath=/tmp/_autotmp_VIIP67ssh-master/socket -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -o BatchMode=yes -o ConnectTimeout=30 -o ServerAliveInterval=180 -o ServerAliveCountMax=3 -o ConnectionAttempts=4 -o Protocol=2 -l root -p 22 172.17.128.241' |
| 40 | 14:20:23 ERROR| [stderr] Warning: Permanently added '172.17.128.241' (RSA) to the list of known hosts. |
| 41 | 14:20:23 INFO | INFO ---- ---- kernel=3.8.11 localtime=May 22 14:20:23 timestamp=1369257623 |
| 42 | 14:20:23 INFO | Installing autotest on 172.17.128.241 |
| 43 | 14:20:23 INFO | Using installation dir /usr/local/autotest |
| 44 | 14:20:23 WARNI| No job_repo_url for <remote host: 172.17.128.241> |
| 45 | 14:20:23 INFO | Could not install autotest using the packaging system: No repos to install an autotest client from. Trying other methods |
| 46 | 14:20:23 INFO | Installation of autotest completed |
| 47 | 14:20:24 WARNI| No job_repo_url for <remote host: 172.17.128.241> |
| 48 | 14:20:24 INFO | Executing /usr/local/autotest/bin/autotest /usr/local/autotest/control phase 0 |
| 49 | 14:20:24 INFO | Entered autotestd_monitor. |
| 50 | 14:20:24 INFO | Finished launching tail subprocesses. |
| 51 | 14:20:24 INFO | Finished waiting on autotestd to start. |
| 52 | 14:20:26 INFO | START ---- ---- timestamp=1369257625 localtime=May 22 14:20:25 |
| 53 | 14:20:26 INFO | START platform_LibCBench platform_LibCBench timestamp=1369257625 localtime=May 22 14:20:25 |
| 54 | 14:20:30 INFO | GOOD platform_LibCBench platform_LibCBench timestamp=1369257630 localtime=May 22 14:20:30 completed successfully |
| 55 | 14:20:30 INFO | END GOOD platform_LibCBench platform_LibCBench timestamp=1369257630 localtime=May 22 14:20:30 |
| 56 | 14:20:31 INFO | END GOOD ---- ---- timestamp=1369257630 localtime=May 22 14:20:30 |
| 57 | 14:20:31 INFO | Got lock of exit_code_file. |
| 58 | 14:20:31 INFO | Released lock of exit_code_file and closed it. |
| 59 | OUTPUT: ============================== |
| 60 | OUTPUT: Current time: 2013-05-22 14:20:32.818831 Elapsed: 0:01:30 ETA: Unknown |
| 61 | Done: 0% [ ] |
| 62 | OUTPUT: Thread Status: |
| 63 | RUNNING: 1 ('ttt: LibCBench (1)' 0:01:21) |
| 64 | Machine Status: |
| 65 | Machine Thread Lock Status Checksum |
| 66 | 172.17.128.241 ttt: LibCBench (1) True RUNNING 3ba9f2ecbb222f20887daea5583d86ba |
| 67 | |
| 68 | OUTPUT: ============================== |
| 69 | 14:20:33 INFO | Killing child processes. |
| 70 | 14:20:33 INFO | Client complete |
| 71 | 14:20:33 INFO | Finished processing control file |
| 72 | 14:20:33 INFO | Starting master ssh connection '/usr/bin/ssh -a -x -N -o ControlMaster=yes -o ControlPath=/tmp/_autotmp_aVJUgmssh-master/socket -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -o BatchMode=yes -o ConnectTimeout=30 -o ServerAliveInterval=180 -o ServerAliveCountMax=3 -o ConnectionAttempts=4 -o Protocol=2 -l root -p 22 172.17.128.241' |
| 73 | 14:20:33 ERROR| [stderr] Warning: Permanently added '172.17.128.241' (RSA) to the list of known hosts. |
| 74 | |
| 75 | INFO : Test results: |
| 76 | ------------------------------------------------------------------- |
| 77 | platform_LibCBench [ PASSED ] |
| 78 | platform_LibCBench/platform_LibCBench [ PASSED ] |
| 79 | platform_LibCBench/platform_LibCBench b_malloc_big1__0_ 0.00375231466667 |
| 80 | platform_LibCBench/platform_LibCBench b_malloc_big2__0_ 0.002951359 |
| 81 | platform_LibCBench/platform_LibCBench b_malloc_bubble__0_ 0.015066374 |
| 82 | platform_LibCBench/platform_LibCBench b_malloc_sparse__0_ 0.015053784 |
| 83 | platform_LibCBench/platform_LibCBench b_malloc_thread_local__0_ 0.01138439 |
| 84 | platform_LibCBench/platform_LibCBench b_malloc_thread_stress__0_ 0.0367894733333 |
| 85 | platform_LibCBench/platform_LibCBench b_malloc_tiny1__0_ 0.000768474333333 |
| 86 | platform_LibCBench/platform_LibCBench b_malloc_tiny2__0_ 0.000581407333333 |
| 87 | platform_LibCBench/platform_LibCBench b_pthread_create_serial1__0_ 0.0291785246667 |
| 88 | platform_LibCBench/platform_LibCBench b_pthread_createjoin_serial1__0_ 0.031907936 |
| 89 | platform_LibCBench/platform_LibCBench b_pthread_createjoin_serial2__0_ 0.043485347 |
| 90 | platform_LibCBench/platform_LibCBench b_pthread_uselesslock__0_ 0.0294113346667 |
| 91 | platform_LibCBench/platform_LibCBench b_regex_compile____a_b_c__d_b__ 0.00529833933333 |
| 92 | platform_LibCBench/platform_LibCBench b_regex_search____a_b_c__d_b__ 0.00165455066667 |
| 93 | platform_LibCBench/platform_LibCBench b_regex_search___a_25_b__ 0.0496191923333 |
| 94 | platform_LibCBench/platform_LibCBench b_stdio_putcgetc__0_ 0.100005711667 |
| 95 | platform_LibCBench/platform_LibCBench b_stdio_putcgetc_unlocked__0_ 0.0371443833333 |
| 96 | platform_LibCBench/platform_LibCBench b_string_memset__0_ 0.00275405066667 |
| 97 | platform_LibCBench/platform_LibCBench b_string_strchr__0_ 0.00456903 |
| 98 | platform_LibCBench/platform_LibCBench b_string_strlen__0_ 0.044893587 |
| 99 | platform_LibCBench/platform_LibCBench b_string_strstr___aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaac__ 0.118360778 |
| 100 | platform_LibCBench/platform_LibCBench b_string_strstr___aaaaaaaaaaaaaaaaaaaaaaaaac__ 0.068957325 |
| 101 | platform_LibCBench/platform_LibCBench b_string_strstr___aaaaaaaaaaaaaacccccccccccc__ 0.0135694476667 |
| 102 | platform_LibCBench/platform_LibCBench b_string_strstr___abcdefghijklmnopqrstuvwxyz__ 0.0134553343333 |
| 103 | platform_LibCBench/platform_LibCBench b_string_strstr___azbycxdwevfugthsirjqkplomn__ 0.0133123556667 |
| 104 | platform_LibCBench/platform_LibCBench b_utf8_bigbuf__0_ 0.0473772253333 |
| 105 | platform_LibCBench/platform_LibCBench b_utf8_onebyone__0_ 0.130938538333 |
| 106 | ------------------------------------------------------------------- |
| 107 | Total PASS: 2/2 (100%) |
| 108 | |
| 109 | INFO : Elapsed time: 0m16s |
| 110 | """ |
| 111 | |
| 112 | error = """ |
cmtice | d96e457 | 2015-05-19 16:19:25 -0700 | [diff] [blame] | 113 | ERROR: Identity added: /tmp/test_that.Z4Ld/autotest_key (/tmp/test_that.Z4Ld/autotest_key) |
Yunlian Jiang | 4578ec1 | 2013-05-22 14:54:39 -0700 | [diff] [blame] | 114 | INFO : Using emerged autotests already installed at /build/lumpy/usr/local/autotest. |
| 115 | INFO : Running the following control files 1 times: |
| 116 | INFO : * 'client/site_tests/platform_LibCBench/control' |
| 117 | INFO : Running client test client/site_tests/platform_LibCBench/control |
| 118 | INFO : Test results: |
| 119 | INFO : Elapsed time: 0m18s |
| 120 | """ |
| 121 | |
| 122 | |
| 123 | keyvals = {'': 'PASS', 'b_stdio_putcgetc__0_': '0.100005711667', 'b_string_strstr___azbycxdwevfugthsirjqkplomn__': '0.0133123556667', 'b_malloc_thread_local__0_': '0.01138439', 'b_string_strlen__0_': '0.044893587', 'b_malloc_sparse__0_': '0.015053784', 'b_string_memset__0_': '0.00275405066667', 'platform_LibCBench': 'PASS', 'b_pthread_uselesslock__0_': '0.0294113346667', 'b_string_strchr__0_': '0.00456903', 'b_pthread_create_serial1__0_': '0.0291785246667', 'b_string_strstr___aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaac__': '0.118360778', 'b_string_strstr___aaaaaaaaaaaaaacccccccccccc__': '0.0135694476667', 'b_pthread_createjoin_serial1__0_': '0.031907936', 'b_malloc_thread_stress__0_': '0.0367894733333', 'b_regex_search____a_b_c__d_b__': '0.00165455066667', 'b_malloc_bubble__0_': '0.015066374', 'b_malloc_big2__0_': '0.002951359', 'b_stdio_putcgetc_unlocked__0_': '0.0371443833333', 'b_pthread_createjoin_serial2__0_': '0.043485347', 'b_regex_search___a_25_b__': '0.0496191923333', 'b_utf8_bigbuf__0_': '0.0473772253333', 'b_malloc_big1__0_': '0.00375231466667', 'b_regex_compile____a_b_c__d_b__': '0.00529833933333', 'b_string_strstr___aaaaaaaaaaaaaaaaaaaaaaaaac__': '0.068957325', 'b_malloc_tiny2__0_': '0.000581407333333', 'b_utf8_onebyone__0_': '0.130938538333', 'b_malloc_tiny1__0_': '0.000768474333333', 'b_string_strstr___abcdefghijklmnopqrstuvwxyz__': '0.0134553343333'} |
| 124 | |
cmtice | c490e07 | 2014-06-13 15:38:45 -0700 | [diff] [blame] | 125 | |
| 126 | TMP_DIR1 = '/tmp/tmpAbcXyz' |
| 127 | |
Yunlian Jiang | 4578ec1 | 2013-05-22 14:54:39 -0700 | [diff] [blame] | 128 | class MockResult(Result): |
| 129 | |
cmtice | c454cee | 2014-04-09 10:58:51 -0700 | [diff] [blame] | 130 | def __init__(self, logger, label, logging_level): |
| 131 | super(MockResult, self).__init__(logger, label, logging_level) |
Yunlian Jiang | 4578ec1 | 2013-05-22 14:54:39 -0700 | [diff] [blame] | 132 | |
| 133 | def _FindFilesInResultsDir(self, find_args): |
cmtice | c490e07 | 2014-06-13 15:38:45 -0700 | [diff] [blame] | 134 | return '' |
Yunlian Jiang | 4578ec1 | 2013-05-22 14:54:39 -0700 | [diff] [blame] | 135 | |
cmtice | c454cee | 2014-04-09 10:58:51 -0700 | [diff] [blame] | 136 | def _GetKeyvals(self, show_all_results): |
Yunlian Jiang | 4578ec1 | 2013-05-22 14:54:39 -0700 | [diff] [blame] | 137 | return keyvals |
| 138 | |
| 139 | |
| 140 | class ResultTest(unittest.TestCase): |
cmtice | c490e07 | 2014-06-13 15:38:45 -0700 | [diff] [blame] | 141 | |
| 142 | mock_label = MockLabel('mock_label', 'chromeos_image', '/tmp', 'lumpy', |
| 143 | 'remote', 'image_args', 'cache_dir', False) |
| 144 | mock_logger = mock.Mock(spec=logger.Logger) |
| 145 | mock_cmd_exec = mock.Mock(spec=command_executer.CommandExecuter) |
| 146 | |
Yunlian Jiang | 4578ec1 | 2013-05-22 14:54:39 -0700 | [diff] [blame] | 147 | def testCreateFromRun(self): |
cmtice | c490e07 | 2014-06-13 15:38:45 -0700 | [diff] [blame] | 148 | result = MockResult.CreateFromRun(logger.GetLogger(), 'average', |
cmtice | c454cee | 2014-04-09 10:58:51 -0700 | [diff] [blame] | 149 | self.mock_label, |
cmtice | c490e07 | 2014-06-13 15:38:45 -0700 | [diff] [blame] | 150 | OUTPUT, error, 0, True, 0) |
Yunlian Jiang | 4578ec1 | 2013-05-22 14:54:39 -0700 | [diff] [blame] | 151 | self.assertEqual(result.keyvals, keyvals) |
Yunlian Jiang | 9e53333 | 2013-09-30 15:45:46 -0700 | [diff] [blame] | 152 | self.assertEqual(result.chroot_results_dir, |
cmtice | d96e457 | 2015-05-19 16:19:25 -0700 | [diff] [blame] | 153 | '/tmp/test_that.PO1234567/platform_LibCBench') |
Yunlian Jiang | 9e53333 | 2013-09-30 15:45:46 -0700 | [diff] [blame] | 154 | self.assertEqual(result.results_dir, |
cmtice | d96e457 | 2015-05-19 16:19:25 -0700 | [diff] [blame] | 155 | '/tmp/chroot/tmp/test_that.PO1234567/platform_LibCBench') |
Yunlian Jiang | 4578ec1 | 2013-05-22 14:54:39 -0700 | [diff] [blame] | 156 | self.assertEqual(result.retval, 0) |
| 157 | |
cmtice | c490e07 | 2014-06-13 15:38:45 -0700 | [diff] [blame] | 158 | |
| 159 | |
| 160 | def setUp(self): |
| 161 | self.result = Result(self.mock_logger, self.mock_label, 'average', |
| 162 | self.mock_cmd_exec) |
| 163 | |
| 164 | @mock.patch.object(os.path, 'isdir') |
| 165 | @mock.patch.object(command_executer.CommandExecuter, 'RunCommand') |
| 166 | @mock.patch.object(command_executer.CommandExecuter, 'CopyFiles') |
| 167 | def test_copy_files_to(self, mock_copyfiles, mock_runcmd, mock_isdir): |
| 168 | |
| 169 | files = ['src_file_1', 'src_file_2', 'src_file_3'] |
| 170 | dest_dir = '/tmp/test' |
| 171 | self.mock_cmd_exec.RunCommand = mock_runcmd |
| 172 | self.mock_cmd_exec.CopyFiles = mock_copyfiles |
| 173 | |
| 174 | mock_copyfiles.return_value = 0 |
| 175 | |
| 176 | #test 1. dest_dir exists; CopyFiles returns 0. |
| 177 | mock_isdir.return_value = True |
| 178 | self.result._CopyFilesTo(dest_dir, files) |
| 179 | self.assertEqual(mock_runcmd.call_count, 0) |
| 180 | self.assertEqual(mock_copyfiles.call_count, 3) |
| 181 | first_args = mock_copyfiles.call_args_list[0][0] |
| 182 | second_args = mock_copyfiles.call_args_list[1][0] |
| 183 | third_args = mock_copyfiles.call_args_list[2][0] |
| 184 | self.assertEqual(first_args, ('src_file_1', '/tmp/test/src_file_1.0')) |
| 185 | self.assertEqual(second_args, ('src_file_2', '/tmp/test/src_file_2.0')) |
| 186 | self.assertEqual(third_args, ('src_file_3', '/tmp/test/src_file_3.0')) |
| 187 | |
| 188 | mock_runcmd.reset_mock() |
| 189 | mock_copyfiles.reset_mock() |
| 190 | #test 2. dest_dir does not exist; CopyFiles returns 0. |
| 191 | mock_isdir.return_value = False |
| 192 | self.result._CopyFilesTo(dest_dir, files) |
| 193 | self.assertEqual(mock_runcmd.call_count, 3) |
| 194 | self.assertEqual(mock_copyfiles.call_count, 3) |
| 195 | self.assertEqual(mock_runcmd.call_args_list[0], |
| 196 | mock_runcmd.call_args_list[1]) |
| 197 | self.assertEqual(mock_runcmd.call_args_list[0], |
| 198 | mock_runcmd.call_args_list[2]) |
| 199 | self.assertEqual(mock_runcmd.call_args_list[0][0], ('mkdir -p /tmp/test',)) |
| 200 | |
| 201 | #test 3. CopyFiles returns 1 (fails). |
| 202 | mock_copyfiles.return_value = 1 |
| 203 | self.assertRaises(Exception, self.result._CopyFilesTo, dest_dir, files) |
| 204 | |
| 205 | |
| 206 | @mock.patch.object (Result, '_CopyFilesTo') |
| 207 | def test_copy_results_to(self, mock_CopyFilesTo): |
| 208 | perf_data_files = ['/tmp/perf.data.0', '/tmp/perf.data.1', |
| 209 | '/tmp/perf.data.2'] |
| 210 | perf_report_files = ['/tmp/perf.report.0', '/tmp/perf.report.1', |
| 211 | '/tmp/perf.report.2'] |
| 212 | |
| 213 | self.result.perf_data_files = perf_data_files |
| 214 | self.result.perf_report_files = perf_report_files |
| 215 | |
| 216 | self.result._CopyFilesTo = mock_CopyFilesTo |
| 217 | self.result.CopyResultsTo('/tmp/results/') |
| 218 | self.assertEqual(mock_CopyFilesTo.call_count, 2) |
| 219 | self.assertEqual(len(mock_CopyFilesTo.call_args_list), 2) |
| 220 | self.assertEqual(mock_CopyFilesTo.call_args_list[0][0], |
| 221 | ('/tmp/results/', perf_data_files)) |
| 222 | self.assertEqual(mock_CopyFilesTo.call_args_list[1][0], |
| 223 | ('/tmp/results/', perf_report_files)) |
| 224 | |
| 225 | |
| 226 | def test_get_new_keyvals(self): |
| 227 | kv_dict = {} |
| 228 | |
| 229 | def FakeGetDataMeasurementsFiles(): |
| 230 | filename = os.path.join(os.getcwd(), 'unittest_keyval_file.txt') |
| 231 | return [filename] |
| 232 | |
| 233 | self.result._GetDataMeasurementsFiles = FakeGetDataMeasurementsFiles |
| 234 | kv_dict2, udict = self.result._GetNewKeyvals(kv_dict) |
| 235 | self.assertEqual(kv_dict2, |
| 236 | {u'Box2D__Box2D': 4775, u'Mandreel__Mandreel': 6620, |
| 237 | u'Gameboy__Gameboy': 9901, u'Crypto__Crypto': 8737, |
| 238 | u'telemetry_page_measurement_results__num_errored': 0, |
| 239 | u'telemetry_page_measurement_results__num_failed': 0, |
| 240 | u'PdfJS__PdfJS': 6455, u'Total__Score': 7918, |
| 241 | u'EarleyBoyer__EarleyBoyer': 14340, |
| 242 | u'MandreelLatency__MandreelLatency': 5188, |
| 243 | u'CodeLoad__CodeLoad': 6271, u'DeltaBlue__DeltaBlue': 14401, |
| 244 | u'Typescript__Typescript': 9815, |
| 245 | u'SplayLatency__SplayLatency': 7653, u'zlib__zlib': 16094, |
| 246 | u'Richards__Richards': 10358, u'RegExp__RegExp': 1765, |
| 247 | u'NavierStokes__NavierStokes': 9815, u'Splay__Splay': 4425, |
| 248 | u'RayTrace__RayTrace': 16600}) |
| 249 | self.assertEqual(udict, |
| 250 | {u'Box2D__Box2D': u'score', u'Mandreel__Mandreel': u'score', |
| 251 | u'Gameboy__Gameboy': u'score', u'Crypto__Crypto': u'score', |
| 252 | u'telemetry_page_measurement_results__num_errored': u'count', |
| 253 | u'telemetry_page_measurement_results__num_failed': u'count', |
| 254 | u'PdfJS__PdfJS': u'score', u'Total__Score': u'score', |
| 255 | u'EarleyBoyer__EarleyBoyer': u'score', |
| 256 | u'MandreelLatency__MandreelLatency': u'score', |
| 257 | u'CodeLoad__CodeLoad': u'score', |
| 258 | u'DeltaBlue__DeltaBlue': u'score', |
| 259 | u'Typescript__Typescript': u'score', |
| 260 | u'SplayLatency__SplayLatency': u'score', u'zlib__zlib': u'score', |
| 261 | u'Richards__Richards': u'score', u'RegExp__RegExp': u'score', |
| 262 | u'NavierStokes__NavierStokes': u'score', |
| 263 | u'Splay__Splay': u'score', u'RayTrace__RayTrace': u'score'}) |
| 264 | |
| 265 | |
| 266 | def test_append_telemetry_units(self): |
| 267 | kv_dict = {u'Box2D__Box2D': 4775, u'Mandreel__Mandreel': 6620, |
| 268 | u'Gameboy__Gameboy': 9901, u'Crypto__Crypto': 8737, |
| 269 | u'PdfJS__PdfJS': 6455, u'Total__Score': 7918, |
| 270 | u'EarleyBoyer__EarleyBoyer': 14340, |
| 271 | u'MandreelLatency__MandreelLatency': 5188, |
| 272 | u'CodeLoad__CodeLoad': 6271, u'DeltaBlue__DeltaBlue': 14401, |
| 273 | u'Typescript__Typescript': 9815, |
| 274 | u'SplayLatency__SplayLatency': 7653, u'zlib__zlib': 16094, |
| 275 | u'Richards__Richards': 10358, u'RegExp__RegExp': 1765, |
| 276 | u'NavierStokes__NavierStokes': 9815, u'Splay__Splay': 4425, |
| 277 | u'RayTrace__RayTrace': 16600} |
| 278 | units_dict = {u'Box2D__Box2D': u'score', u'Mandreel__Mandreel': u'score', |
| 279 | u'Gameboy__Gameboy': u'score', u'Crypto__Crypto': u'score', |
| 280 | u'PdfJS__PdfJS': u'score', u'Total__Score': u'score', |
| 281 | u'EarleyBoyer__EarleyBoyer': u'score', |
| 282 | u'MandreelLatency__MandreelLatency': u'score', |
| 283 | u'CodeLoad__CodeLoad': u'score', |
| 284 | u'DeltaBlue__DeltaBlue': u'score', |
| 285 | u'Typescript__Typescript': u'score', |
| 286 | u'SplayLatency__SplayLatency': u'score', |
| 287 | u'zlib__zlib': u'score', |
| 288 | u'Richards__Richards': u'score', u'RegExp__RegExp': u'score', |
| 289 | u'NavierStokes__NavierStokes': u'score', |
| 290 | u'Splay__Splay': u'score', u'RayTrace__RayTrace': u'score'} |
| 291 | |
| 292 | results_dict = self.result._AppendTelemetryUnits(kv_dict, units_dict) |
| 293 | self.assertEqual(results_dict, |
| 294 | {u'Box2D__Box2D': [4775, u'score'], |
| 295 | u'Splay__Splay': [4425, u'score'], |
| 296 | u'Gameboy__Gameboy': [9901, u'score'], |
| 297 | u'Crypto__Crypto': [8737, u'score'], |
| 298 | u'PdfJS__PdfJS': [6455, u'score'], |
| 299 | u'Total__Score': [7918, u'score'], |
| 300 | u'EarleyBoyer__EarleyBoyer': [14340, u'score'], |
| 301 | u'MandreelLatency__MandreelLatency': [5188, u'score'], |
| 302 | u'DeltaBlue__DeltaBlue': [14401, u'score'], |
| 303 | u'SplayLatency__SplayLatency': [7653, u'score'], |
| 304 | u'Mandreel__Mandreel': [6620, u'score'], |
| 305 | u'Richards__Richards': [10358, u'score'], |
| 306 | u'zlib__zlib': [16094, u'score'], |
| 307 | u'CodeLoad__CodeLoad': [6271, u'score'], |
| 308 | u'Typescript__Typescript': [9815, u'score'], |
| 309 | u'RegExp__RegExp': [1765, u'score'], |
| 310 | u'RayTrace__RayTrace': [16600, u'score'], |
| 311 | u'NavierStokes__NavierStokes': [9815, u'score']}) |
| 312 | |
| 313 | |
| 314 | @mock.patch.object (misc, 'GetInsideChrootPath') |
| 315 | @mock.patch.object (tempfile, 'mkdtemp') |
| 316 | @mock.patch.object (command_executer.CommandExecuter, 'RunCommand') |
| 317 | @mock.patch.object (command_executer.CommandExecuter, 'ChrootRunCommand') |
| 318 | def test_get_keyvals(self, mock_chrootruncmd, mock_runcmd, mock_mkdtemp, |
| 319 | mock_getpath): |
| 320 | |
| 321 | self.kv_dict = {} |
| 322 | self.call_GetNewKeyvals = False |
| 323 | |
| 324 | def reset(): |
| 325 | self.kv_dict = {} |
| 326 | self.call_GetNewKeyvals = False |
| 327 | mock_chrootruncmd.reset_mock() |
| 328 | mock_runcmd.reset_mock() |
| 329 | mock_mkdtemp.reset_mock() |
| 330 | mock_getpath.reset_mock() |
| 331 | |
| 332 | def FakeGetNewKeyvals(kv_dict): |
| 333 | self.kv_dict = kv_dict |
| 334 | self.call_GetNewKeyvals = True |
| 335 | return_kvdict = { 'first_time' : 680, 'Total' : 10} |
| 336 | return_udict = { 'first_time' : 'ms', 'Total' : 'score'} |
| 337 | return return_kvdict, return_udict |
| 338 | |
| 339 | |
| 340 | mock_mkdtemp.return_value = TMP_DIR1 |
| 341 | mock_chrootruncmd.return_value = ['', |
| 342 | ('%s,PASS\n%s/telemetry_Crosperf,PASS\n') |
| 343 | % (TMP_DIR1, TMP_DIR1), |
| 344 | ''] |
| 345 | mock_getpath.return_value = TMP_DIR1 |
| 346 | self.result._ce.ChrootRunCommand = mock_chrootruncmd |
| 347 | self.result._ce.RunCommand = mock_runcmd |
| 348 | self.result._GetNewKeyvals = FakeGetNewKeyvals |
| 349 | self.result.suite = 'telemetry_Crosperf' |
| 350 | self.result.results_dir = '/tmp/test_that_resultsNmq' |
| 351 | |
| 352 | # Test 1. no self._temp_dir. |
| 353 | res = self.result._GetKeyvals(True) |
| 354 | self.assertTrue(self.call_GetNewKeyvals) |
| 355 | self.assertEqual(self.kv_dict, { '': 'PASS', 'telemetry_Crosperf': 'PASS' }) |
| 356 | self.assertEqual(mock_runcmd.call_count, 1) |
| 357 | self.assertEqual(mock_runcmd.call_args_list[0][0], |
| 358 | ('cp -r /tmp/test_that_resultsNmq/* %s' % TMP_DIR1,)) |
| 359 | self.assertEqual(mock_chrootruncmd.call_count, 1) |
| 360 | self.assertEqual(mock_chrootruncmd.call_args_list[0][0], |
| 361 | ('/tmp', |
| 362 | ('python generate_test_report --no-color --csv %s') % |
| 363 | TMP_DIR1)) |
| 364 | self.assertEqual(mock_getpath.call_count, 1) |
| 365 | self.assertEqual(mock_mkdtemp.call_count, 1) |
| 366 | self.assertEqual(res, {'Total': [10, 'score'], 'first_time': [680, 'ms']}) |
| 367 | |
| 368 | |
| 369 | # Test 2. self._temp_dir |
| 370 | reset() |
| 371 | mock_chrootruncmd.return_value = ['', |
| 372 | ('/tmp/tmpJCajRG,PASS\n/tmp/tmpJCajRG/' |
| 373 | 'telemetry_Crosperf,PASS\n'), |
| 374 | ''] |
| 375 | mock_getpath.return_value = '/tmp/tmpJCajRG' |
| 376 | self.result._temp_dir = '/tmp/tmpJCajRG' |
| 377 | res = self.result._GetKeyvals(True) |
| 378 | self.assertEqual(mock_runcmd.call_count, 0) |
| 379 | self.assertEqual(mock_mkdtemp.call_count, 0) |
| 380 | self.assertEqual(mock_chrootruncmd.call_count, 1) |
| 381 | self.assertTrue(self.call_GetNewKeyvals) |
| 382 | self.assertEqual(self.kv_dict, { '': 'PASS', 'telemetry_Crosperf': 'PASS' }) |
| 383 | self.assertEqual(res, {'Total': [10, 'score'], 'first_time': [680, 'ms']}) |
| 384 | |
| 385 | # Test 3. suite != telemetry_Crosperf. Normally this would be for |
| 386 | # running non-Telemetry autotests, such as BootPerfServer. In this test |
| 387 | # case, the keyvals we have set up were returned from a Telemetry test run; |
| 388 | # so this pass is basically testing that we don't append the units to the |
| 389 | # test results (which we do for Telemetry autotest runs). |
| 390 | reset() |
| 391 | self.result.suite = '' |
| 392 | res = self.result._GetKeyvals(True) |
| 393 | self.assertEqual(res, {'Total': 10, 'first_time': 680 }) |
| 394 | |
| 395 | |
| 396 | def test_get_results_dir(self): |
| 397 | |
| 398 | self.result.out = '' |
| 399 | self.assertRaises(Exception, self.result._GetResultsDir) |
| 400 | |
| 401 | self.result.out = OUTPUT |
| 402 | resdir = self.result._GetResultsDir() |
| 403 | self.assertEqual(resdir, |
cmtice | d96e457 | 2015-05-19 16:19:25 -0700 | [diff] [blame] | 404 | '/tmp/test_that.PO1234567/platform_LibCBench') |
cmtice | c490e07 | 2014-06-13 15:38:45 -0700 | [diff] [blame] | 405 | |
| 406 | |
| 407 | @mock.patch.object (command_executer.CommandExecuter, 'RunCommand') |
| 408 | def test_find_files_in_results_dir(self, mock_runcmd): |
| 409 | |
| 410 | self.result.results_dir = None |
| 411 | res = self.result._FindFilesInResultsDir('-name perf.data') |
| 412 | self.assertIsNone(res) |
| 413 | |
| 414 | self.result._ce.RunCommand = mock_runcmd |
| 415 | self.result.results_dir = '/tmp/test_results' |
| 416 | mock_runcmd.return_value = [0, '/tmp/test_results/perf.data', ''] |
| 417 | res = self.result._FindFilesInResultsDir('-name perf.data') |
| 418 | self.assertEqual(mock_runcmd.call_count, 1) |
| 419 | self.assertEqual(mock_runcmd.call_args_list[0][0], |
| 420 | ('find /tmp/test_results -name perf.data',)) |
| 421 | self.assertEqual(res, '/tmp/test_results/perf.data') |
| 422 | |
| 423 | mock_runcmd.reset_mock() |
| 424 | mock_runcmd.return_value = [1, '', ''] |
| 425 | self.assertRaises(Exception, self.result._FindFilesInResultsDir, |
| 426 | '-name perf.data') |
| 427 | |
| 428 | |
| 429 | |
| 430 | @mock.patch.object (Result, '_FindFilesInResultsDir') |
| 431 | def test_get_perf_data_files(self, mock_findfiles): |
| 432 | self.args = None |
| 433 | |
| 434 | mock_findfiles.return_value = 'line1\nline1\n' |
| 435 | self.result._FindFilesInResultsDir = mock_findfiles |
| 436 | res = self.result._GetPerfDataFiles() |
| 437 | self.assertEqual(res, ['line1', 'line1']) |
| 438 | self.assertEqual(mock_findfiles.call_args_list[0][0], ('-name perf.data',)) |
| 439 | |
| 440 | |
| 441 | def test_get_perf_report_files(self): |
| 442 | self.args = None |
| 443 | |
| 444 | def FakeFindFiles(find_args): |
| 445 | self.args = find_args |
| 446 | return 'line1\nline1\n' |
| 447 | |
| 448 | self.result._FindFilesInResultsDir = FakeFindFiles |
| 449 | res = self.result._GetPerfReportFiles() |
| 450 | self.assertEqual(res, ['line1', 'line1']) |
| 451 | self.assertEqual(self.args, '-name perf.data.report') |
| 452 | |
| 453 | |
| 454 | def test_get_data_measurement_files(self): |
| 455 | self.args = None |
| 456 | |
| 457 | def FakeFindFiles(find_args): |
| 458 | self.args = find_args |
| 459 | return 'line1\nline1\n' |
| 460 | |
| 461 | self.result._FindFilesInResultsDir = FakeFindFiles |
| 462 | res = self.result._GetDataMeasurementsFiles() |
| 463 | self.assertEqual(res, ['line1', 'line1']) |
| 464 | self.assertEqual(self.args, '-name perf_measurements') |
| 465 | |
| 466 | |
| 467 | @mock.patch.object (misc, 'GetInsideChrootPath') |
| 468 | @mock.patch.object (command_executer.CommandExecuter, 'ChrootRunCommand') |
| 469 | def test_generate_perf_report_files(self, mock_chrootruncmd, mock_getpath): |
| 470 | fake_file = '/usr/chromeos/chroot/tmp/results/fake_file' |
| 471 | self.result.perf_data_files = ['/tmp/results/perf.data'] |
| 472 | self.result._board = 'lumpy' |
| 473 | mock_getpath.return_value = fake_file |
| 474 | self.result._ce.ChrootRunCommand = mock_chrootruncmd |
| 475 | tmp = self.result._GeneratePerfReportFiles() |
| 476 | self.assertEqual(tmp, ['/tmp/chroot%s' % fake_file]) |
| 477 | self.assertEqual(mock_chrootruncmd.call_args_list[0][0], |
| 478 | ('/tmp', |
| 479 | ('/tmp/perf.static report -n --symfs /build/lumpy ' |
| 480 | '--vmlinux /build/lumpy/usr/lib/debug/boot/vmlinux ' |
| 481 | '--kallsyms /build/lumpy/boot/System.map-* -i ' |
| 482 | '%s --stdio > %s') % (fake_file, fake_file))) |
| 483 | |
| 484 | |
| 485 | |
| 486 | @mock.patch.object (misc, 'GetOutsideChrootPath') |
| 487 | def test_populate_from_run(self, mock_getpath): |
| 488 | |
| 489 | def FakeGetResultsDir(): |
| 490 | self.callGetResultsDir = True |
| 491 | return '/tmp/results_dir' |
| 492 | |
| 493 | def FakeGetPerfDataFiles(): |
| 494 | self.callGetPerfDataFiles = True |
| 495 | return [] |
| 496 | |
| 497 | def FakeGetPerfReportFiles(): |
| 498 | self.callGetPerfReportFiles = True |
| 499 | return [] |
| 500 | |
| 501 | def FakeProcessResults(show_results): |
| 502 | self.callProcessResults = True |
| 503 | |
| 504 | mock.get_path = '/tmp/chromeos/tmp/results_dir' |
| 505 | self.result._chromeos_root = '/tmp/chromeos' |
| 506 | |
| 507 | self.callGetResultsDir = False |
| 508 | self.callGetPerfDataFiles = False |
| 509 | self.callGetPerfReportFiles = False |
| 510 | self.callProcessResults = False |
| 511 | |
| 512 | self.result._GetResultsDir = FakeGetResultsDir |
| 513 | self.result._GetPerfDataFiles = FakeGetPerfDataFiles |
| 514 | self.result._GeneratePerfReportFiles = FakeGetPerfReportFiles |
| 515 | self.result._ProcessResults = FakeProcessResults |
| 516 | |
| 517 | self.result._PopulateFromRun(OUTPUT, '', 0, True, 'test', |
| 518 | 'telemetry_Crosperf') |
| 519 | self.assertTrue(self.callGetResultsDir) |
| 520 | self.assertTrue(self.callGetPerfDataFiles) |
| 521 | self.assertTrue(self.callGetPerfReportFiles) |
| 522 | self.assertTrue(self.callProcessResults) |
| 523 | |
| 524 | def test_process_results(self): |
| 525 | |
| 526 | def FakeGetKeyvals(show_all): |
| 527 | if show_all: |
| 528 | return { 'first_time' : 680, 'Total' : 10} |
| 529 | else: |
| 530 | return { 'Total' : 10} |
| 531 | |
| 532 | def FakeGatherPerfResults(): |
| 533 | self.callGatherPerfResults = True |
| 534 | |
| 535 | self.callGatherPerfResults = False |
| 536 | |
| 537 | self.result._GetKeyvals = FakeGetKeyvals |
| 538 | self.result._GatherPerfResults = FakeGatherPerfResults |
| 539 | |
| 540 | self.result.retval = 0 |
| 541 | self.result._ProcessResults(True) |
| 542 | self.assertTrue(self.callGatherPerfResults) |
| 543 | self.assertEqual(len(self.result.keyvals), 3) |
| 544 | self.assertEqual(self.result.keyvals, |
| 545 | { 'first_time' : 680, 'Total' : 10, 'retval' : 0 }) |
| 546 | |
| 547 | self.result.retval = 1 |
| 548 | self.result._ProcessResults(False) |
| 549 | self.assertEqual(len(self.result.keyvals), 2) |
| 550 | self.assertEqual(self.result.keyvals, |
| 551 | { 'Total' : 10, 'retval' : 1 }) |
| 552 | |
| 553 | |
| 554 | @mock.patch.object (misc, 'GetInsideChrootPath') |
| 555 | @mock.patch.object (command_executer.CommandExecuter, 'ChrootRunCommand') |
| 556 | def test_populate_from_cache_dir(self, mock_runchrootcmd, mock_getpath): |
| 557 | |
| 558 | def FakeMkdtemp(dir=''): |
| 559 | return self.tmpdir |
| 560 | |
| 561 | current_path = os.getcwd() |
| 562 | cache_dir = os.path.join(current_path, 'test_cache/test_input') |
| 563 | self.result._ce = command_executer.GetCommandExecuter(log_level='average') |
| 564 | self.result._ce.ChrootRunCommand = mock_runchrootcmd |
| 565 | mock_runchrootcmd.return_value = ['', |
| 566 | ('%s,PASS\n%s/\telemetry_Crosperf,PASS\n') |
| 567 | % (TMP_DIR1, TMP_DIR1), |
| 568 | ''] |
| 569 | mock_getpath.return_value = TMP_DIR1 |
| 570 | self.tmpdir = tempfile.mkdtemp() |
| 571 | save_real_mkdtemp = tempfile.mkdtemp |
| 572 | tempfile.mkdtemp = FakeMkdtemp |
| 573 | |
| 574 | self.result._PopulateFromCacheDir(cache_dir, True, 'sunspider', |
| 575 | 'telemetry_Crosperf') |
| 576 | self.assertEqual(self.result.keyvals, |
| 577 | {u'Total__Total': [444.0, u'ms'], |
| 578 | u'regexp-dna__regexp-dna': [16.2, u'ms'], |
| 579 | u'telemetry_page_measurement_results__num_failed': |
| 580 | [0, u'count'], |
| 581 | u'telemetry_page_measurement_results__num_errored': |
| 582 | [0, u'count'], |
| 583 | u'string-fasta__string-fasta': [23.2, u'ms'], |
| 584 | u'crypto-sha1__crypto-sha1': [11.6, u'ms'], |
| 585 | u'bitops-3bit-bits-in-byte__bitops-3bit-bits-in-byte': |
| 586 | [3.2, u'ms'], |
| 587 | u'access-nsieve__access-nsieve': [7.9, u'ms'], |
| 588 | u'bitops-nsieve-bits__bitops-nsieve-bits': [9.4, u'ms'], |
| 589 | u'string-validate-input__string-validate-input': |
| 590 | [19.3, u'ms'], |
| 591 | u'3d-raytrace__3d-raytrace': [24.7, u'ms'], |
| 592 | u'3d-cube__3d-cube': [28.0, u'ms'], |
| 593 | u'string-unpack-code__string-unpack-code': [46.7, u'ms'], |
| 594 | u'date-format-tofte__date-format-tofte': [26.3, u'ms'], |
| 595 | u'math-partial-sums__math-partial-sums': [22.0, u'ms'], |
| 596 | '\telemetry_Crosperf': ['PASS', ''], |
| 597 | u'crypto-aes__crypto-aes': [15.2, u'ms'], |
| 598 | u'bitops-bitwise-and__bitops-bitwise-and': [8.4, u'ms'], |
| 599 | u'crypto-md5__crypto-md5': [10.5, u'ms'], |
| 600 | u'string-tagcloud__string-tagcloud': [52.8, u'ms'], |
| 601 | u'access-nbody__access-nbody': [8.5, u'ms'], |
| 602 | 'retval': 0, |
| 603 | u'math-spectral-norm__math-spectral-norm': [6.6, u'ms'], |
| 604 | u'math-cordic__math-cordic': [8.7, u'ms'], |
| 605 | u'access-binary-trees__access-binary-trees': [4.5, u'ms'], |
| 606 | u'controlflow-recursive__controlflow-recursive': |
| 607 | [4.4, u'ms'], |
| 608 | u'access-fannkuch__access-fannkuch': [17.8, u'ms'], |
| 609 | u'string-base64__string-base64': [16.0, u'ms'], |
| 610 | u'date-format-xparb__date-format-xparb': [20.9, u'ms'], |
| 611 | u'3d-morph__3d-morph': [22.1, u'ms'], |
| 612 | u'bitops-bits-in-byte__bitops-bits-in-byte': [9.1, u'ms'] |
| 613 | }) |
| 614 | |
| 615 | |
| 616 | # Clean up after test. |
| 617 | tempfile.mkdtemp = save_real_mkdtemp |
| 618 | command = 'rm -Rf %s' % self.tmpdir |
| 619 | self.result._ce.RunCommand(command) |
| 620 | |
| 621 | |
| 622 | @mock.patch.object (misc, 'GetRoot') |
| 623 | @mock.patch.object (command_executer.CommandExecuter, 'RunCommand') |
| 624 | def test_cleanup(self, mock_runcmd, mock_getroot): |
| 625 | |
| 626 | # Test 1. 'rm_chroot_tmp' is True; self.results_dir exists; |
| 627 | # self._temp_dir exists; results_dir name contains 'test_that_results_'. |
| 628 | mock_getroot.return_value = ['/tmp/tmp_AbcXyz', 'test_that_results_fake'] |
| 629 | self.result._ce.RunCommand = mock_runcmd |
| 630 | self.result.results_dir = 'test_results_dir' |
| 631 | self.result._temp_dir = 'test_temp_dir' |
| 632 | self.result.CleanUp(True) |
| 633 | self.assertEqual(mock_getroot.call_count, 1) |
| 634 | self.assertEqual(mock_runcmd.call_count, 2) |
| 635 | self.assertEqual(mock_runcmd.call_args_list[0][0], |
| 636 | ('rm -rf test_results_dir',)) |
| 637 | self.assertEqual(mock_runcmd.call_args_list[1][0], |
| 638 | ('rm -rf test_temp_dir',)) |
| 639 | |
| 640 | # Test 2. Same, except ath results_dir name does not contain |
| 641 | # 'test_that_results_' |
| 642 | mock_getroot.reset_mock() |
| 643 | mock_runcmd.reset_mock() |
| 644 | mock_getroot.return_value = ['/tmp/tmp_AbcXyz', 'other_results_fake'] |
| 645 | self.result._ce.RunCommand = mock_runcmd |
| 646 | self.result.results_dir = 'test_results_dir' |
| 647 | self.result._temp_dir = 'test_temp_dir' |
| 648 | self.result.CleanUp(True) |
| 649 | self.assertEqual(mock_getroot.call_count, 1) |
| 650 | self.assertEqual(mock_runcmd.call_count, 2) |
| 651 | self.assertEqual(mock_runcmd.call_args_list[0][0], |
| 652 | ('rm -rf /tmp/tmp_AbcXyz',)) |
| 653 | self.assertEqual(mock_runcmd.call_args_list[1][0], |
| 654 | ('rm -rf test_temp_dir',)) |
| 655 | |
| 656 | # Test 3. mock_getroot returns nothing; 'rm_chroot_tmp' is False. |
| 657 | mock_getroot.reset_mock() |
| 658 | mock_runcmd.reset_mock() |
| 659 | self.result.CleanUp(False) |
| 660 | self.assertEqual(mock_getroot.call_count, 0) |
| 661 | self.assertEqual(mock_runcmd.call_count, 1) |
| 662 | self.assertEqual(mock_runcmd.call_args_list[0][0], |
| 663 | ('rm -rf test_temp_dir',)) |
| 664 | |
| 665 | # Test 4. 'rm_chroot_tmp' is True, but result_dir & _temp_dir are None. |
| 666 | mock_getroot.reset_mock() |
| 667 | mock_runcmd.reset_mock() |
| 668 | self.result.results_dir = None |
| 669 | self.result._temp_dir = None |
| 670 | self.result.CleanUp(True) |
| 671 | self.assertEqual(mock_getroot.call_count, 0) |
| 672 | self.assertEqual(mock_runcmd.call_count, 0) |
| 673 | |
| 674 | |
| 675 | @mock.patch.object (misc, 'GetInsideChrootPath') |
| 676 | @mock.patch.object (command_executer.CommandExecuter, 'ChrootRunCommand') |
| 677 | def test_store_to_cache_dir(self, mock_chrootruncmd, mock_getpath): |
| 678 | |
| 679 | def FakeMkdtemp(dir=''): |
| 680 | return self.tmpdir |
| 681 | |
| 682 | current_path = os.getcwd() |
| 683 | cache_dir = os.path.join(current_path, 'test_cache/test_output') |
| 684 | |
| 685 | self.result._ce = command_executer.GetCommandExecuter(log_level='average') |
| 686 | self.result.out = OUTPUT |
| 687 | self.result.err = error |
| 688 | self.result.retval = 0 |
| 689 | self.tmpdir = tempfile.mkdtemp() |
| 690 | if not os.path.exists(self.tmpdir): |
| 691 | os.makedirs(self.tmpdir) |
| 692 | self.result.results_dir = os.path.join(os.getcwd(), 'test_cache') |
| 693 | save_real_mkdtemp = tempfile.mkdtemp |
| 694 | tempfile.mkdtemp = FakeMkdtemp |
| 695 | |
| 696 | mock_mm = machine_manager.MockMachineManager('/tmp/chromeos_root', 0, |
cmtice | d96e457 | 2015-05-19 16:19:25 -0700 | [diff] [blame] | 697 | 'average', '') |
cmtice | c490e07 | 2014-06-13 15:38:45 -0700 | [diff] [blame] | 698 | mock_mm.machine_checksum_string['mock_label'] = 'fake_machine_checksum123' |
| 699 | |
| 700 | self.result.StoreToCacheDir(cache_dir, mock_mm) |
| 701 | |
| 702 | # Check that the correct things were written to the 'cache'. |
| 703 | test_dir = os.path.join(os.getcwd(), 'test_cache/test_output') |
| 704 | base_dir = os.path.join(os.getcwd(), 'test_cache/compare_output') |
| 705 | self.assertTrue(os.path.exists(os.path.join(test_dir, 'autotest.tbz2'))) |
| 706 | self.assertTrue(os.path.exists(os.path.join(test_dir, 'machine.txt'))) |
| 707 | self.assertTrue(os.path.exists(os.path.join(test_dir, 'results.txt'))) |
| 708 | |
| 709 | f1 = os.path.join(test_dir, 'machine.txt') |
| 710 | f2 = os.path.join(base_dir, 'machine.txt') |
| 711 | cmd = 'diff %s %s' % (f1, f2) |
| 712 | [_, out, _] = self.result._ce.RunCommand(cmd, return_output=True) |
| 713 | self.assertEqual(len(out), 0) |
| 714 | |
| 715 | f1 = os.path.join(test_dir, 'results.txt') |
| 716 | f2 = os.path.join(base_dir, 'results.txt') |
| 717 | cmd = 'diff %s %s' % (f1, f2) |
| 718 | [_, out, _] = self.result._ce.RunCommand(cmd, return_output=True) |
| 719 | self.assertEqual(len(out), 0) |
| 720 | |
| 721 | # Clean up after test. |
| 722 | tempfile.mkdtemp = save_real_mkdtemp |
| 723 | command = 'rm %s/*' % test_dir |
| 724 | self.result._ce.RunCommand(command) |
| 725 | |
| 726 | |
| 727 | TELEMETRY_RESULT_KEYVALS = {'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html math-cordic (ms)': '11.4', 'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html access-nbody (ms)': '6.9', 'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html access-fannkuch (ms)': '26.3', 'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html math-spectral-norm (ms)': '6.3', 'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html bitops-nsieve-bits (ms)': '9.3', 'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html math-partial-sums (ms)': '32.8', 'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html regexp-dna (ms)': '16.1', 'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html 3d-cube (ms)': '42.7', 'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html crypto-md5 (ms)': '10.8', 'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html crypto-sha1 (ms)': '12.4', 'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html string-tagcloud (ms)': '47.2', 'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html string-fasta (ms)': '36.3', 'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html access-binary-trees (ms)': '7.3', 'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html date-format-xparb (ms)': '138.1', 'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html crypto-aes (ms)': '19.2', 'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html Total (ms)': '656.5', 'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html string-base64 (ms)': '17.5', 'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html string-validate-input (ms)': '24.8', 'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html 3d-raytrace (ms)': '28.7', 'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html controlflow-recursive (ms)': '5.3', 'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html bitops-bits-in-byte (ms)': '9.8', 'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html 3d-morph (ms)': '50.2', 'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html bitops-bitwise-and (ms)': '8.8', 'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html access-nsieve (ms)': '8.6', 'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html date-format-tofte (ms)': '31.2', 'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html bitops-3bit-bits-in-byte (ms)': '3.5', 'retval': 0, 'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html string-unpack-code (ms)': '45.0'} |
| 728 | |
| 729 | PURE_TELEMETRY_OUTPUT = """page_name,3d-cube (ms),3d-morph (ms),3d-raytrace (ms),Total (ms),access-binary-trees (ms),access-fannkuch (ms),access-nbody (ms),access-nsieve (ms),bitops-3bit-bits-in-byte (ms),bitops-bits-in-byte (ms),bitops-bitwise-and (ms),bitops-nsieve-bits (ms),controlflow-recursive (ms),crypto-aes (ms),crypto-md5 (ms),crypto-sha1 (ms),date-format-tofte (ms),date-format-xparb (ms),math-cordic (ms),math-partial-sums (ms),math-spectral-norm (ms),regexp-dna (ms),string-base64 (ms),string-fasta (ms),string-tagcloud (ms),string-unpack-code (ms),string-validate-input (ms)\r\nhttp://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html,42.7,50.2,28.7,656.5,7.3,26.3,6.9,8.6,3.5,9.8,8.8,9.3,5.3,19.2,10.8,12.4,31.2,138.1,11.4,32.8,6.3,16.1,17.5,36.3,47.2,45.0,24.8\r\n""" |
| 730 | |
| 731 | class TelemetryResultTest(unittest.TestCase): |
| 732 | |
| 733 | mock_logger = mock.Mock(spec=logger.Logger) |
| 734 | mock_cmd_exec = mock.Mock(spec=command_executer.CommandExecuter) |
| 735 | mock_label = MockLabel('mock_label', 'chromeos_image', '/tmp', 'lumpy', |
| 736 | 'remote', 'image_args', 'cache_dir', False) |
| 737 | |
| 738 | def test_populate_from_run(self): |
| 739 | |
| 740 | def FakeProcessResults(): |
| 741 | self.callFakeProcessResults = True |
| 742 | |
| 743 | self.callFakeProcessResults = False |
| 744 | self.result = TelemetryResult(self.mock_logger, self.mock_label, |
| 745 | 'average', self.mock_cmd_exec) |
| 746 | self.result._ProcessResults = FakeProcessResults |
| 747 | self.result._PopulateFromRun(OUTPUT, error, 3, False, 'fake_test', |
| 748 | 'telemetry_Crosperf') |
| 749 | self.assertTrue(self.callFakeProcessResults) |
| 750 | self.assertEqual(self.result.out, OUTPUT) |
| 751 | self.assertEqual(self.result.err, error) |
| 752 | self.assertEqual(self.result.retval, 3) |
| 753 | |
| 754 | |
| 755 | def test_populate_from_cache_dir_and_process_results(self): |
| 756 | |
| 757 | self.result = TelemetryResult(self.mock_logger, self.mock_label, |
| 758 | 'average') |
| 759 | current_path = os.getcwd() |
| 760 | cache_dir = os.path.join(current_path, |
| 761 | 'test_cache/test_puretelemetry_input') |
| 762 | self.result._PopulateFromCacheDir(cache_dir) |
| 763 | self.assertEqual(self.result.out, PURE_TELEMETRY_OUTPUT) |
| 764 | self.assertEqual(self.result.err, '') |
| 765 | self.assertEqual(self.result.retval, 0) |
| 766 | self.assertEqual(self.result.keyvals, TELEMETRY_RESULT_KEYVALS) |
| 767 | |
| 768 | |
| 769 | class ResultsCacheTest(unittest.TestCase): |
| 770 | |
| 771 | mock_logger = mock.Mock(spec=logger.Logger) |
| 772 | mock_label = MockLabel('mock_label', 'chromeos_image', '/tmp', 'lumpy', |
| 773 | 'remote', 'image_args', 'cache_dir', False) |
| 774 | def setUp(self): |
| 775 | self.results_cache = ResultsCache() |
| 776 | |
| 777 | |
| 778 | mock_mm = machine_manager.MockMachineManager('/tmp/chromeos_root', 0, |
cmtice | d96e457 | 2015-05-19 16:19:25 -0700 | [diff] [blame] | 779 | 'average', '') |
cmtice | c490e07 | 2014-06-13 15:38:45 -0700 | [diff] [blame] | 780 | mock_mm.machine_checksum_string['mock_label'] = 'fake_machine_checksum123' |
| 781 | |
| 782 | self.results_cache.Init(self.mock_label.chromeos_image, |
| 783 | self.mock_label.chromeos_root, |
| 784 | 'sunspider', |
| 785 | 1, # benchmark_run.iteration, |
| 786 | '', # benchmark_run.test_args, |
| 787 | '', # benchmark_run.profiler_args, |
| 788 | mock_mm, |
| 789 | self.mock_label.board, |
| 790 | [CacheConditions.CACHE_FILE_EXISTS, |
| 791 | CacheConditions.CHECKSUMS_MATCH], |
| 792 | self.mock_logger, |
| 793 | 'average', |
| 794 | self.mock_label, |
cmtice | 1a22436 | 2014-10-16 15:49:56 -0700 | [diff] [blame] | 795 | '', # benchmark_run.share_cache |
cmtice | c490e07 | 2014-06-13 15:38:45 -0700 | [diff] [blame] | 796 | 'telemetry_Crosperf', |
Ting-Yuan Huang | bc2d3d1 | 2015-07-02 13:09:03 +0800 | [diff] [blame] | 797 | True, # benchmark_run.show_all_results |
| 798 | False) # benchmark_run.run_local |
cmtice | c490e07 | 2014-06-13 15:38:45 -0700 | [diff] [blame] | 799 | |
| 800 | |
| 801 | @mock.patch.object (image_checksummer.ImageChecksummer, 'Checksum') |
| 802 | def test_get_cache_dir_for_write(self, mock_checksum): |
| 803 | |
| 804 | def FakeGetMachines(label): |
| 805 | m1 = machine_manager.MockCrosMachine('lumpy1.cros', |
| 806 | self.results_cache.chromeos_root, |
| 807 | 'average') |
| 808 | m2 = machine_manager.MockCrosMachine('lumpy2.cros', |
| 809 | self.results_cache.chromeos_root, |
| 810 | 'average') |
| 811 | return [m1, m2] |
| 812 | |
| 813 | |
| 814 | mock_checksum.return_value = 'FakeImageChecksumabc123' |
| 815 | self.results_cache.machine_manager.GetMachines = FakeGetMachines |
| 816 | self.results_cache.machine_manager.machine_checksum['mock_label'] = \ |
| 817 | 'FakeMachineChecksumabc987' |
| 818 | # Based on the label, benchmark and machines, get the directory in which |
| 819 | # to store the cache information for this test run. |
| 820 | result_path = self.results_cache._GetCacheDirForWrite() |
| 821 | # Verify that the returned directory is correct (since the label |
| 822 | # contained a cache_dir, named 'cache_dir', that's what is expected in |
| 823 | # the result, rather than '~/cros_scratch'). |
| 824 | comp_path = os.path.join(os.getcwd(), |
| 825 | 'cache_dir/54524606abaae4fdf7b02f49f7ae7127_' |
| 826 | 'sunspider_1_7215ee9c7d9dc229d2921a40e899ec5f_' |
| 827 | 'FakeImageChecksumabc123_FakeMachineChecksum' |
| 828 | 'abc987__6') |
| 829 | self.assertEqual(result_path, comp_path) |
| 830 | |
| 831 | |
| 832 | def test_form_cache_dir(self): |
| 833 | # This is very similar to the previous test (_FormCacheDir is called |
| 834 | # from _GetCacheDirForWrite). |
| 835 | cache_key_list = ('54524606abaae4fdf7b02f49f7ae7127', 'sunspider', '1', |
| 836 | '7215ee9c7d9dc229d2921a40e899ec5f', |
| 837 | 'FakeImageChecksumabc123', '*', '*', '6') |
| 838 | path = self.results_cache._FormCacheDir(cache_key_list) |
| 839 | self.assertEqual(len(path), 1) |
| 840 | path1 = path[0] |
| 841 | test_dirname = ('54524606abaae4fdf7b02f49f7ae7127_sunspider_1_7215ee9' |
| 842 | 'c7d9dc229d2921a40e899ec5f_FakeImageChecksumabc123_*_*_6') |
| 843 | comp_path = os.path.join(os.getcwd(), 'cache_dir', test_dirname) |
| 844 | self.assertEqual(path1, comp_path) |
| 845 | |
| 846 | |
| 847 | @mock.patch.object (image_checksummer.ImageChecksummer, 'Checksum') |
| 848 | def test_get_cache_key_list(self, mock_checksum): |
| 849 | # This tests the mechanism that generates the various pieces of the |
| 850 | # cache directory name, based on various conditions. |
| 851 | |
| 852 | def FakeGetMachines(label): |
| 853 | m1 = machine_manager.MockCrosMachine('lumpy1.cros', |
| 854 | self.results_cache.chromeos_root, |
| 855 | 'average') |
| 856 | m2 = machine_manager.MockCrosMachine('lumpy2.cros', |
| 857 | self.results_cache.chromeos_root, |
| 858 | 'average') |
| 859 | return [m1, m2] |
| 860 | |
| 861 | |
| 862 | mock_checksum.return_value = 'FakeImageChecksumabc123' |
| 863 | self.results_cache.machine_manager.GetMachines = FakeGetMachines |
| 864 | self.results_cache.machine_manager.machine_checksum['mock_label'] = \ |
| 865 | 'FakeMachineChecksumabc987' |
| 866 | |
| 867 | # Test 1. Generating cache name for reading (not writing). |
| 868 | key_list = self.results_cache._GetCacheKeyList(True) |
| 869 | self.assertEqual(key_list[0], '*') # Machine checksum value, for read. |
| 870 | self.assertEqual(key_list[1], 'sunspider') |
| 871 | self.assertEqual(key_list[2], '1') |
| 872 | self.assertEqual(key_list[3], '7215ee9c7d9dc229d2921a40e899ec5f') |
| 873 | self.assertEqual(key_list[4], 'FakeImageChecksumabc123') |
| 874 | self.assertEqual(key_list[5], '*') |
| 875 | self.assertEqual(key_list[6], '*') |
| 876 | self.assertEqual(key_list[7], '6') |
| 877 | |
| 878 | # Test 2. Generating cache name for writing, with local image type. |
| 879 | key_list = self.results_cache._GetCacheKeyList(False) |
| 880 | self.assertEqual(key_list[0], '54524606abaae4fdf7b02f49f7ae7127') |
| 881 | self.assertEqual(key_list[1], 'sunspider') |
| 882 | self.assertEqual(key_list[2], '1') |
| 883 | self.assertEqual(key_list[3], '7215ee9c7d9dc229d2921a40e899ec5f') |
| 884 | self.assertEqual(key_list[4], 'FakeImageChecksumabc123') |
| 885 | self.assertEqual(key_list[5], 'FakeMachineChecksumabc987') |
| 886 | self.assertEqual(key_list[6], '') |
| 887 | self.assertEqual(key_list[7], '6') |
| 888 | |
| 889 | # Test 3. Generating cache name for writing, with trybot image type. |
| 890 | self.results_cache.label.image_type = 'trybot' |
| 891 | key_list = self.results_cache._GetCacheKeyList(False) |
| 892 | self.assertEqual(key_list[0], '54524606abaae4fdf7b02f49f7ae7127') |
| 893 | self.assertEqual(key_list[3], '7215ee9c7d9dc229d2921a40e899ec5f') |
| 894 | self.assertEqual(key_list[4], '54524606abaae4fdf7b02f49f7ae7127') |
| 895 | self.assertEqual(key_list[5], 'FakeMachineChecksumabc987') |
| 896 | |
| 897 | # Test 4. Generating cache name for writing, with official image type. |
| 898 | self.results_cache.label.image_type = 'official' |
| 899 | key_list = self.results_cache._GetCacheKeyList(False) |
| 900 | self.assertEqual(key_list[0], '54524606abaae4fdf7b02f49f7ae7127') |
| 901 | self.assertEqual(key_list[1], 'sunspider') |
| 902 | self.assertEqual(key_list[2], '1') |
| 903 | self.assertEqual(key_list[3], '7215ee9c7d9dc229d2921a40e899ec5f') |
| 904 | self.assertEqual(key_list[4], '*') |
| 905 | self.assertEqual(key_list[5], 'FakeMachineChecksumabc987') |
| 906 | self.assertEqual(key_list[6], '') |
| 907 | self.assertEqual(key_list[7], '6') |
| 908 | |
| 909 | # Test 5. Generating cache name for writing, with local image type, and |
| 910 | # specifying that the image path must match the cached image path. |
| 911 | self.results_cache.label.image_type = 'local' |
| 912 | self.results_cache.cache_conditions.append(CacheConditions.IMAGE_PATH_MATCH) |
| 913 | key_list = self.results_cache._GetCacheKeyList(False) |
| 914 | self.assertEqual(key_list[0], '54524606abaae4fdf7b02f49f7ae7127') |
| 915 | self.assertEqual(key_list[3], '7215ee9c7d9dc229d2921a40e899ec5f') |
| 916 | self.assertEqual(key_list[4], 'FakeImageChecksumabc123') |
| 917 | self.assertEqual(key_list[5], 'FakeMachineChecksumabc987') |
| 918 | |
| 919 | |
| 920 | @mock.patch.object (command_executer.CommandExecuter, 'RunCommand') |
| 921 | @mock.patch.object (os.path, 'isdir') |
| 922 | @mock.patch.object (Result, 'CreateFromCacheHit') |
| 923 | def test_read_result(self, mock_create, mock_isdir, mock_runcmd): |
| 924 | |
| 925 | self.fakeCacheReturnResult = None |
| 926 | def FakeGetCacheDirForRead(): |
| 927 | return self.fakeCacheReturnResult |
| 928 | |
| 929 | def FakeGetCacheDirForWrite(): |
| 930 | return self.fakeCacheReturnResult |
| 931 | |
| 932 | mock_cmd_exec = mock.Mock(spec=command_executer.CommandExecuter) |
| 933 | fake_result = Result(self.mock_logger, self.mock_label, 'average', |
| 934 | mock_cmd_exec) |
| 935 | fake_result.retval = 0 |
| 936 | |
| 937 | # Set up results_cache _GetCacheDirFor{Read,Write} to return |
| 938 | # self.fakeCacheReturnResult, which is initially None (see above). |
| 939 | # So initially, no cache dir is returned. |
| 940 | self.results_cache._GetCacheDirForRead = FakeGetCacheDirForRead |
| 941 | self.results_cache._GetCacheDirForWrite = FakeGetCacheDirForWrite |
| 942 | |
| 943 | mock_isdir.return_value = True |
| 944 | save_cc = [CacheConditions.CACHE_FILE_EXISTS, |
| 945 | CacheConditions.CHECKSUMS_MATCH] |
| 946 | self.results_cache.cache_conditions.append(CacheConditions.FALSE) |
| 947 | |
| 948 | # Test 1. CacheCondition.FALSE, which means do not read from the cache. |
| 949 | # (force re-running of test). Result should be None. |
| 950 | res = self.results_cache.ReadResult() |
| 951 | self.assertIsNone(res) |
| 952 | self.assertEqual(mock_runcmd.call_count, 1) |
| 953 | |
| 954 | # Test 2. Remove CacheCondition.FALSE. Result should still be None, |
| 955 | # because _GetCacheDirForRead is returning None at the moment. |
| 956 | mock_runcmd.reset_mock() |
| 957 | self.results_cache.cache_conditions = save_cc |
| 958 | res = self.results_cache.ReadResult() |
| 959 | self.assertIsNone(res) |
| 960 | self.assertEqual(mock_runcmd.call_count, 0) |
| 961 | |
| 962 | # Test 3. Now set up cache dir to be returned by _GetCacheDirForRead. |
| 963 | # Since cache_dir is found, will call Result.CreateFromCacheHit, which |
| 964 | # which will actually all our mock_create and should return fake_result. |
| 965 | self.fakeCacheReturnResult = 'fake/cache/dir' |
| 966 | mock_create.return_value = fake_result |
| 967 | res = self.results_cache.ReadResult() |
| 968 | self.assertEqual(mock_runcmd.call_count, 0) |
| 969 | self.assertEqual(res, fake_result) |
| 970 | |
| 971 | # Test 4. os.path.isdir(cache_dir) will now return false, so result |
| 972 | # should be None again (no cache found). |
| 973 | mock_isdir.return_value = False |
| 974 | res = self.results_cache.ReadResult() |
| 975 | self.assertEqual(mock_runcmd.call_count, 0) |
| 976 | self.assertIsNone(res) |
| 977 | |
| 978 | # Test 5. os.path.isdir returns true, but mock_create now returns None |
| 979 | # (the call to CreateFromCacheHit returns None), so overal result is None. |
| 980 | mock_isdir.return_value = True |
| 981 | mock_create.return_value = None |
| 982 | res = self.results_cache.ReadResult() |
| 983 | self.assertEqual(mock_runcmd.call_count, 0) |
| 984 | self.assertIsNone(res) |
| 985 | |
| 986 | # Test 6. Everything works 'as expected', result should be fake_result. |
| 987 | mock_create.return_value = fake_result |
| 988 | res = self.results_cache.ReadResult() |
| 989 | self.assertEqual(mock_runcmd.call_count, 0) |
| 990 | self.assertEqual(res, fake_result) |
| 991 | |
| 992 | # Test 7. The run failed; result should be None. |
| 993 | mock_create.return_value = fake_result |
| 994 | fake_result.retval = 1 |
| 995 | self.results_cache.cache_conditions.append(CacheConditions.RUN_SUCCEEDED) |
| 996 | res = self.results_cache.ReadResult() |
| 997 | self.assertEqual(mock_runcmd.call_count, 0) |
| 998 | self.assertIsNone(res) |
| 999 | |
| 1000 | |
| 1001 | if __name__ == '__main__': |
Yunlian Jiang | 4578ec1 | 2013-05-22 14:54:39 -0700 | [diff] [blame] | 1002 | unittest.main() |