mbligh | 2e4e5df | 2007-11-05 17:22:46 +0000 | [diff] [blame] | 1 | #!/usr/bin/python |
| 2 | |
| 3 | """ |
| 4 | Selects all rows and columns that satisfy the condition specified |
| 5 | and draws the matrix. There is a seperate SQL query made for every (x,y) |
| 6 | in the matrix. |
| 7 | """ |
| 8 | |
mbligh | 2e4e5df | 2007-11-05 17:22:46 +0000 | [diff] [blame] | 9 | print "Content-type: text/html\n" |
mbligh | 2e4e5df | 2007-11-05 17:22:46 +0000 | [diff] [blame] | 10 | |
jadmanski | 22ab069 | 2008-05-01 22:22:51 +0000 | [diff] [blame^] | 11 | import sys, os, urllib, cgi, cgitb, re, datetime, time |
mbligh | 2e4e5df | 2007-11-05 17:22:46 +0000 | [diff] [blame] | 12 | |
jadmanski | 22ab069 | 2008-05-01 22:22:51 +0000 | [diff] [blame^] | 13 | total_wall_time_start = time.time() |
| 14 | |
| 15 | import common |
| 16 | from autotest_lib.tko import display, frontend, db, query_lib |
| 17 | from autotest_lib.client.bin import kernel_versions |
mbligh | 12eebfa | 2008-01-03 02:01:53 +0000 | [diff] [blame] | 18 | |
mbligh | 190a81d | 2007-11-05 20:40:38 +0000 | [diff] [blame] | 19 | html_header = """\ |
| 20 | <form action="compose_query.cgi" method="get"> |
| 21 | <table border="0"> |
| 22 | <tr> |
| 23 | <td>Column: </td> |
| 24 | <td>Row: </td> |
| 25 | <td>Condition: </td> |
mbligh | 44710b6 | 2008-03-07 00:25:43 +0000 | [diff] [blame] | 26 | <td align="center"> |
| 27 | <a href="http://test.kernel.org/autotest/AutotestTKOCondition">Help</a> |
| 28 | </td> |
mbligh | 190a81d | 2007-11-05 20:40:38 +0000 | [diff] [blame] | 29 | </tr> |
| 30 | <tr> |
| 31 | <td> |
| 32 | <SELECT NAME="columns"> |
| 33 | %s |
| 34 | </SELECT> |
| 35 | </td> |
| 36 | <td> |
| 37 | <SELECT NAME="rows"> |
| 38 | %s |
| 39 | </SELECT> |
| 40 | </td> |
| 41 | <td> |
mbligh | 44710b6 | 2008-03-07 00:25:43 +0000 | [diff] [blame] | 42 | <input type="text" name="condition" size="30" value="%s"> |
mbligh | be25745 | 2008-04-16 23:29:13 +0000 | [diff] [blame] | 43 | <input type="hidden" name="title" value="%s"> |
mbligh | 190a81d | 2007-11-05 20:40:38 +0000 | [diff] [blame] | 44 | </td> |
| 45 | <td align="center"><input type="submit" value="Submit"> |
| 46 | </td> |
| 47 | </tr> |
| 48 | </table> |
| 49 | </form> |
| 50 | """ |
| 51 | |
mbligh | 12eebfa | 2008-01-03 02:01:53 +0000 | [diff] [blame] | 52 | |
mbligh | 5dd503b | 2008-01-03 16:35:27 +0000 | [diff] [blame] | 53 | next_field = { |
mbligh | 44710b6 | 2008-03-07 00:25:43 +0000 | [diff] [blame] | 54 | 'machine_group': 'hostname', |
| 55 | 'hostname': 'tag', |
| 56 | 'tag': 'tag', |
mbligh | 5dd503b | 2008-01-03 16:35:27 +0000 | [diff] [blame] | 57 | |
mbligh | 44710b6 | 2008-03-07 00:25:43 +0000 | [diff] [blame] | 58 | 'kernel': 'test', |
| 59 | 'test': 'label', |
| 60 | 'label': 'tag', |
mbligh | 5dd503b | 2008-01-03 16:35:27 +0000 | [diff] [blame] | 61 | |
mbligh | 44710b6 | 2008-03-07 00:25:43 +0000 | [diff] [blame] | 62 | 'reason': 'tag', |
| 63 | 'user': 'tag', |
| 64 | 'status': 'tag', |
| 65 | |
mbligh | 5bb5586 | 2008-04-16 23:09:31 +0000 | [diff] [blame] | 66 | 'time': 'tag', |
| 67 | 'time_daily': 'time', |
mbligh | 5dd503b | 2008-01-03 16:35:27 +0000 | [diff] [blame] | 68 | } |
| 69 | |
| 70 | |
mbligh | 11204f8 | 2008-04-16 23:30:07 +0000 | [diff] [blame] | 71 | def parse_field(form, form_field, field_default): |
| 72 | if not form_field in form: |
| 73 | return field_default |
mbligh | 12eebfa | 2008-01-03 02:01:53 +0000 | [diff] [blame] | 74 | field_input = form[form_field].value.lower() |
mbligh | 2ba3e73 | 2008-01-16 01:30:19 +0000 | [diff] [blame] | 75 | if field_input and field_input in frontend.test_view_field_dict: |
mbligh | 12eebfa | 2008-01-03 02:01:53 +0000 | [diff] [blame] | 76 | return field_input |
mbligh | 11204f8 | 2008-04-16 23:30:07 +0000 | [diff] [blame] | 77 | return field_default |
mbligh | 12eebfa | 2008-01-03 02:01:53 +0000 | [diff] [blame] | 78 | |
| 79 | |
| 80 | def parse_condition(form, form_field, field_default): |
| 81 | if not form_field in form: |
| 82 | return field_default |
| 83 | return form[form_field].value |
| 84 | |
| 85 | |
| 86 | form = cgi.FieldStorage() |
mbligh | 3d7a5f5 | 2008-04-16 23:06:36 +0000 | [diff] [blame] | 87 | |
mbligh | be25745 | 2008-04-16 23:29:13 +0000 | [diff] [blame] | 88 | title_field = parse_condition(form, 'title', '') |
mbligh | 9add588 | 2008-04-16 23:32:01 +0000 | [diff] [blame] | 89 | row = parse_field(form, 'rows', 'kernel') |
| 90 | column = parse_field(form, 'columns', 'machine_group') |
| 91 | condition_field = parse_condition(form, 'condition', '') |
mbligh | 44710b6 | 2008-03-07 00:25:43 +0000 | [diff] [blame] | 92 | |
mbligh | 439661b | 2008-02-19 15:57:53 +0000 | [diff] [blame] | 93 | ## caller can specify rows and columns that shall be included into the report |
| 94 | ## regardless of whether actual test data is available yet |
| 95 | force_row_field = parse_condition(form,'force_row','') |
| 96 | force_column_field = parse_condition(form,'force_column','') |
mbligh | 190a81d | 2007-11-05 20:40:38 +0000 | [diff] [blame] | 97 | |
mbligh | 9add588 | 2008-04-16 23:32:01 +0000 | [diff] [blame] | 98 | |
mbligh | 439661b | 2008-02-19 15:57:53 +0000 | [diff] [blame] | 99 | def split_forced_fields(force_field): |
| 100 | if force_field: |
| 101 | return force_field.split() |
| 102 | else: |
| 103 | return [] |
| 104 | |
| 105 | force_row = split_forced_fields(force_row_field) |
| 106 | force_column = split_forced_fields(force_column_field) |
| 107 | |
mbligh | 2e4e5df | 2007-11-05 17:22:46 +0000 | [diff] [blame] | 108 | cgitb.enable() |
mbligh | aea0960 | 2008-04-16 22:59:37 +0000 | [diff] [blame] | 109 | db_obj = db.db() |
mbligh | 2e4e5df | 2007-11-05 17:22:46 +0000 | [diff] [blame] | 110 | |
mbligh | 12eebfa | 2008-01-03 02:01:53 +0000 | [diff] [blame] | 111 | |
mbligh | 2ba3e73 | 2008-01-16 01:30:19 +0000 | [diff] [blame] | 112 | def construct_link(x, y): |
| 113 | next_row = row |
| 114 | next_column = column |
mbligh | 5dd503b | 2008-01-03 16:35:27 +0000 | [diff] [blame] | 115 | condition_list = [] |
| 116 | if condition_field != '': |
| 117 | condition_list.append(condition_field) |
mbligh | 2ba3e73 | 2008-01-16 01:30:19 +0000 | [diff] [blame] | 118 | if y: |
| 119 | next_row = next_field[row] |
| 120 | condition_list.append("%s='%s'" % (row, y)) |
| 121 | if x: |
| 122 | next_column = next_field[column] |
| 123 | condition_list.append("%s='%s'" % (column, x)) |
mbligh | b180f6c | 2008-01-04 20:24:41 +0000 | [diff] [blame] | 124 | next_condition = '&'.join(condition_list) |
mbligh | be25745 | 2008-04-16 23:29:13 +0000 | [diff] [blame] | 125 | link = 'compose_query.cgi?' + urllib.urlencode({'columns': next_column, |
| 126 | 'rows': next_row, 'condition': next_condition, |
| 127 | 'title': title_field}) |
| 128 | return link |
mbligh | 5dd503b | 2008-01-03 16:35:27 +0000 | [diff] [blame] | 129 | |
| 130 | |
mbligh | 38e9d78 | 2008-05-01 20:49:41 +0000 | [diff] [blame] | 131 | def construct_logs_link(x, y, job_tag): |
| 132 | job_path = frontend.html_root + job_tag + '/' |
| 133 | test = '' |
| 134 | if (row == 'test' and |
| 135 | not y.split('.')[0] in ('boot', 'build', 'install')): |
| 136 | test = y |
| 137 | if (column == 'test' and |
| 138 | not x.split('.')[0] in ('boot', 'build', 'install')): |
| 139 | test = x |
| 140 | return 'retrieve_logs.cgi?' + urllib.urlencode({'job' : job_path, |
| 141 | 'test' : test}) |
| 142 | |
| 143 | |
mbligh | 12eebfa | 2008-01-03 02:01:53 +0000 | [diff] [blame] | 144 | def create_select_options(selected_val): |
mbligh | 190a81d | 2007-11-05 20:40:38 +0000 | [diff] [blame] | 145 | ret = "" |
mbligh | 2ba3e73 | 2008-01-16 01:30:19 +0000 | [diff] [blame] | 146 | for option in sorted(frontend.test_view_field_dict.keys()): |
mbligh | 190a81d | 2007-11-05 20:40:38 +0000 | [diff] [blame] | 147 | if selected_val == option: |
| 148 | selected = " SELECTED" |
| 149 | else: |
| 150 | selected = "" |
| 151 | |
mbligh | 2ba3e73 | 2008-01-16 01:30:19 +0000 | [diff] [blame] | 152 | ret += '<OPTION VALUE="%s"%s>%s</OPTION>\n' % \ |
| 153 | (option, selected, option) |
mbligh | 190a81d | 2007-11-05 20:40:38 +0000 | [diff] [blame] | 154 | return ret |
| 155 | |
| 156 | |
apw | 7a7316b | 2008-02-21 17:42:05 +0000 | [diff] [blame] | 157 | def map_kernel_base(kernel_name): |
mbligh | 439661b | 2008-02-19 15:57:53 +0000 | [diff] [blame] | 158 | ## insert <br> after each / in kernel name |
| 159 | ## but spare consequtive // |
mbligh | 8e7c78e | 2008-02-20 21:18:49 +0000 | [diff] [blame] | 160 | kernel_name = kernel_name.replace('/','/<br>') |
| 161 | kernel_name = kernel_name.replace('/<br>/<br>','//') |
mbligh | 439661b | 2008-02-19 15:57:53 +0000 | [diff] [blame] | 162 | return kernel_name |
| 163 | |
| 164 | |
mbligh | 44710b6 | 2008-03-07 00:25:43 +0000 | [diff] [blame] | 165 | def header_tuneup(field_name, header): |
| 166 | ## header tune up depends on particular field name and may include: |
| 167 | ## - breaking header into several strings if it is long url |
| 168 | ## - creating date from datetime stamp |
| 169 | ## - possibly, expect more various refinements for different fields |
| 170 | if field_name == 'kernel': |
| 171 | return map_kernel_base(header) |
mbligh | 44710b6 | 2008-03-07 00:25:43 +0000 | [diff] [blame] | 172 | else: |
| 173 | return header |
| 174 | |
| 175 | |
apw | 7a7316b | 2008-02-21 17:42:05 +0000 | [diff] [blame] | 176 | # Kernel name mappings -- the kernels table 'printable' field is |
| 177 | # effectively a sortable identifier for the kernel It encodes the base |
| 178 | # release which is used for overall sorting, plus where patches are |
| 179 | # applied it adds an increasing pNNN patch combination identifier |
| 180 | # (actually the kernel_idx for the entry). This allows sorting |
| 181 | # as normal by the base kernel version and then sub-sorting by the |
| 182 | # "first time we saw" a patch combination which should keep them in |
| 183 | # approximatly date order. This patch identifier is not suitable |
| 184 | # for display, so we have to map it to a suitable html fragment for |
| 185 | # display. This contains the kernel base version plus the truncated |
| 186 | # names of all the patches, |
| 187 | # |
| 188 | # 2.6.24-mm1 p112 |
| 189 | # +add-new-string-functions- |
| 190 | # +x86-amd-thermal-interrupt |
| 191 | # |
| 192 | # This mapping is produced when the first mapping is request, with |
| 193 | # a single query over the patches table; the result is then cached. |
| 194 | # |
| 195 | # Note: that we only count a base version as patched if it contains |
| 196 | # patches which are not already "expressed" in the base version. |
| 197 | # This includes both -gitN and -mmN kernels. |
| 198 | map_kernel_map = None |
| 199 | |
| 200 | |
| 201 | def map_kernel_init(): |
| 202 | fields = ['base', 'k.kernel_idx', 'name', 'url'] |
| 203 | map = {} |
mbligh | aea0960 | 2008-04-16 22:59:37 +0000 | [diff] [blame] | 204 | for (base, idx, name, url) in db_obj.select(','.join(fields), |
apw | 7a7316b | 2008-02-21 17:42:05 +0000 | [diff] [blame] | 205 | 'kernels k,patches p', 'k.kernel_idx=p.kernel_idx'): |
| 206 | match = re.match(r'.*(-mm[0-9]+|-git[0-9]+)\.(bz2|gz)$', url) |
| 207 | if match: |
| 208 | continue |
| 209 | |
| 210 | key = base + ' p%d' % (idx) |
| 211 | if not map.has_key(key): |
| 212 | map[key] = map_kernel_base(base) + ' p%d' % (idx) |
| 213 | map[key] += '<br>+<span title="' + name + '">' + name[0:25] + '</span>' |
| 214 | |
| 215 | return map |
| 216 | |
| 217 | |
| 218 | def map_kernel(name): |
| 219 | global map_kernel_map |
| 220 | if map_kernel_map == None: |
| 221 | map_kernel_map = map_kernel_init() |
| 222 | |
| 223 | if map_kernel_map.has_key(name): |
| 224 | return map_kernel_map[name] |
| 225 | |
| 226 | return map_kernel_base(name.split(' ')[0]) |
| 227 | |
| 228 | |
| 229 | field_map = { |
| 230 | 'kernel':map_kernel |
| 231 | } |
| 232 | |
jadmanski | 36505a9 | 2008-05-01 22:07:02 +0000 | [diff] [blame] | 233 | sql_wall_time = 0 |
apw | 7a7316b | 2008-02-21 17:42:05 +0000 | [diff] [blame] | 234 | |
mbligh | 12eebfa | 2008-01-03 02:01:53 +0000 | [diff] [blame] | 235 | def gen_matrix(): |
mbligh | 12eebfa | 2008-01-03 02:01:53 +0000 | [diff] [blame] | 236 | where = None |
| 237 | if condition_field.strip() != '': |
mbligh | 44710b6 | 2008-03-07 00:25:43 +0000 | [diff] [blame] | 238 | try: |
| 239 | where = query_lib.parse_scrub_and_gen_condition( |
| 240 | condition_field, frontend.test_view_field_dict) |
| 241 | print "<!-- where clause: %s -->" % (where,) |
| 242 | except: |
| 243 | msg = "Unspecified error when parsing condition" |
| 244 | return [[display.box(msg)]] |
mbligh | 2e4e5df | 2007-11-05 17:22:46 +0000 | [diff] [blame] | 245 | |
jadmanski | 36505a9 | 2008-05-01 22:07:02 +0000 | [diff] [blame] | 246 | wall_time_start = time.time() |
mbligh | aea0960 | 2008-04-16 22:59:37 +0000 | [diff] [blame] | 247 | try: |
mbligh | 3126069 | 2008-04-16 23:12:12 +0000 | [diff] [blame] | 248 | ## Unfortunately, we can not request reasons of failure always |
| 249 | ## because it may result in an inflated size of data transfer |
| 250 | ## (at the moment we fetch 500 bytes of reason descriptions into |
| 251 | ## each cell ) |
| 252 | ## If 'status' in [row,column] then either width or height |
| 253 | ## of the table <=7, hence table is not really 2D, and |
| 254 | ## query_reason is relatively save. |
| 255 | ## At the same time view when either rows or columns grouped |
| 256 | ## by status is when users need reasons of failures the most. |
| 257 | |
| 258 | ## TO DO: implement [Show/Hide reasons] button or link in |
| 259 | ## all views and make thorough performance testing |
| 260 | test_data = frontend.get_matrix_data(db_obj, column, row, where, |
| 261 | query_reasons = ('status' in [row,column]) |
| 262 | ) |
jadmanski | 36505a9 | 2008-05-01 22:07:02 +0000 | [diff] [blame] | 263 | global sql_wall_time |
| 264 | sql_wall_time = time.time() - wall_time_start |
| 265 | |
mbligh | aea0960 | 2008-04-16 22:59:37 +0000 | [diff] [blame] | 266 | except db.MySQLTooManyRows, error: |
| 267 | return [[display.box(str(error))]] |
mbligh | 44710b6 | 2008-03-07 00:25:43 +0000 | [diff] [blame] | 268 | |
mbligh | 439661b | 2008-02-19 15:57:53 +0000 | [diff] [blame] | 269 | for f_row in force_row: |
| 270 | if not f_row in test_data.y_values: |
| 271 | test_data.y_values.append(f_row) |
| 272 | for f_column in force_column: |
| 273 | if not f_column in test_data.x_values: |
| 274 | test_data.x_values.append(f_column) |
mbligh | 2e4e5df | 2007-11-05 17:22:46 +0000 | [diff] [blame] | 275 | |
mbligh | 2ba3e73 | 2008-01-16 01:30:19 +0000 | [diff] [blame] | 276 | if not test_data.y_values: |
mbligh | 12eebfa | 2008-01-03 02:01:53 +0000 | [diff] [blame] | 277 | msg = "There are no results for this query (yet?)." |
| 278 | return [[display.box(msg)]] |
mbligh | 2e4e5df | 2007-11-05 17:22:46 +0000 | [diff] [blame] | 279 | |
mbligh | 456b477 | 2008-03-25 23:54:45 +0000 | [diff] [blame] | 280 | dict_url = {'columns': row, |
mbligh | be25745 | 2008-04-16 23:29:13 +0000 | [diff] [blame] | 281 | 'rows': column, 'condition': condition_field, |
| 282 | 'title': title_field} |
mbligh | 456b477 | 2008-03-25 23:54:45 +0000 | [diff] [blame] | 283 | link = 'compose_query.cgi?' + urllib.urlencode(dict_url) |
mbligh | 5dd503b | 2008-01-03 16:35:27 +0000 | [diff] [blame] | 284 | header_row = [display.box("<center>(Flip Axis)</center>", link=link)] |
| 285 | |
mbligh | 2ba3e73 | 2008-01-16 01:30:19 +0000 | [diff] [blame] | 286 | for x in test_data.x_values: |
apw | 7a7316b | 2008-02-21 17:42:05 +0000 | [diff] [blame] | 287 | dx = x |
| 288 | if field_map.has_key(column): |
| 289 | dx = field_map[column](x) |
mbligh | 44710b6 | 2008-03-07 00:25:43 +0000 | [diff] [blame] | 290 | x_header = header_tuneup(column, dx) |
| 291 | link = construct_link(x, None) |
| 292 | header_row.append(display.box(x_header,header=True,link=link)) |
mbligh | 2e4e5df | 2007-11-05 17:22:46 +0000 | [diff] [blame] | 293 | |
| 294 | matrix = [header_row] |
mbligh | 2ba3e73 | 2008-01-16 01:30:19 +0000 | [diff] [blame] | 295 | for y in test_data.y_values: |
apw | 7a7316b | 2008-02-21 17:42:05 +0000 | [diff] [blame] | 296 | dy = y |
| 297 | if field_map.has_key(row): |
| 298 | dy = field_map[row](y) |
mbligh | 44710b6 | 2008-03-07 00:25:43 +0000 | [diff] [blame] | 299 | y_header = header_tuneup(row, dy) |
mbligh | 5bb5586 | 2008-04-16 23:09:31 +0000 | [diff] [blame] | 300 | link = construct_link(None, y) |
mbligh | 44710b6 | 2008-03-07 00:25:43 +0000 | [diff] [blame] | 301 | cur_row = [display.box(y_header, header=True, link=link)] |
mbligh | 2ba3e73 | 2008-01-16 01:30:19 +0000 | [diff] [blame] | 302 | for x in test_data.x_values: |
mbligh | 44710b6 | 2008-03-07 00:25:43 +0000 | [diff] [blame] | 303 | ## next 2 lines: temporary, until non timestamped |
| 304 | ## records are in the database |
| 305 | if x==datetime.datetime(1970,1,1): x = None |
| 306 | if y==datetime.datetime(1970,1,1): y = None |
mbligh | 12eebfa | 2008-01-03 02:01:53 +0000 | [diff] [blame] | 307 | try: |
mbligh | 3126069 | 2008-04-16 23:12:12 +0000 | [diff] [blame] | 308 | box_data = test_data.data[x][y] |
mbligh | 12eebfa | 2008-01-03 02:01:53 +0000 | [diff] [blame] | 309 | except: |
| 310 | cur_row.append(display.box(None, None)) |
| 311 | continue |
mbligh | 2ba3e73 | 2008-01-16 01:30:19 +0000 | [diff] [blame] | 312 | job_tag = test_data.data[x][y].job_tag |
mbligh | 5dd503b | 2008-01-03 16:35:27 +0000 | [diff] [blame] | 313 | if job_tag: |
mbligh | 38e9d78 | 2008-05-01 20:49:41 +0000 | [diff] [blame] | 314 | link = construct_logs_link(x, y, job_tag) |
mbligh | 5dd503b | 2008-01-03 16:35:27 +0000 | [diff] [blame] | 315 | else: |
mbligh | 2ba3e73 | 2008-01-16 01:30:19 +0000 | [diff] [blame] | 316 | link = construct_link(x, y) |
mbligh | 44710b6 | 2008-03-07 00:25:43 +0000 | [diff] [blame] | 317 | |
mbligh | aea0960 | 2008-04-16 22:59:37 +0000 | [diff] [blame] | 318 | cur_row.append(display.status_precounted_box(db_obj, |
mbligh | 5dd503b | 2008-01-03 16:35:27 +0000 | [diff] [blame] | 319 | box_data, |
| 320 | link)) |
mbligh | 12eebfa | 2008-01-03 02:01:53 +0000 | [diff] [blame] | 321 | matrix.append(cur_row) |
mbligh | 12eebfa | 2008-01-03 02:01:53 +0000 | [diff] [blame] | 322 | return matrix |
mbligh | 2b67253 | 2007-11-05 19:24:51 +0000 | [diff] [blame] | 323 | |
mbligh | 2b67253 | 2007-11-05 19:24:51 +0000 | [diff] [blame] | 324 | |
mbligh | 12eebfa | 2008-01-03 02:01:53 +0000 | [diff] [blame] | 325 | def main(): |
mbligh | 190a81d | 2007-11-05 20:40:38 +0000 | [diff] [blame] | 326 | # create the actual page |
mbligh | 190a81d | 2007-11-05 20:40:38 +0000 | [diff] [blame] | 327 | print '<html><head><title>' |
| 328 | print 'Filtered Autotest Results' |
| 329 | print '</title></head><body>' |
mbligh | 1467162 | 2008-01-11 16:49:54 +0000 | [diff] [blame] | 330 | display.print_main_header() |
mbligh | 2ba3e73 | 2008-01-16 01:30:19 +0000 | [diff] [blame] | 331 | print html_header % (create_select_options(column), |
| 332 | create_select_options(row), |
mbligh | be25745 | 2008-04-16 23:29:13 +0000 | [diff] [blame] | 333 | condition_field, title_field) |
| 334 | if title_field: |
| 335 | print '<h1> %s </h1>' % (title_field) |
mbligh | 439661b | 2008-02-19 15:57:53 +0000 | [diff] [blame] | 336 | print display.color_keys_row() |
mbligh | 12eebfa | 2008-01-03 02:01:53 +0000 | [diff] [blame] | 337 | display.print_table(gen_matrix()) |
mbligh | 439661b | 2008-02-19 15:57:53 +0000 | [diff] [blame] | 338 | print display.color_keys_row() |
jadmanski | 36505a9 | 2008-05-01 22:07:02 +0000 | [diff] [blame] | 339 | total_wall_time = time.time() - total_wall_time_start |
| 340 | print '<p style="font-size:x-small;">sql access wall time = %s secs, \ |
| 341 | total wall time = %s secs</p>' % (sql_wall_time, total_wall_time) |
mbligh | 190a81d | 2007-11-05 20:40:38 +0000 | [diff] [blame] | 342 | print '</body></html>' |
mbligh | 2e4e5df | 2007-11-05 17:22:46 +0000 | [diff] [blame] | 343 | |
mbligh | 2e4e5df | 2007-11-05 17:22:46 +0000 | [diff] [blame] | 344 | main() |