mbligh | e0b2178 | 2007-10-26 19:39:53 +0000 | [diff] [blame] | 1 | #!/usr/bin/python |
mbligh | cb54126 | 2007-11-15 20:20:10 +0000 | [diff] [blame] | 2 | import sys, re, os, itertools |
mbligh | e0b2178 | 2007-10-26 19:39:53 +0000 | [diff] [blame] | 3 | |
| 4 | |
mbligh | 7dd510c | 2007-11-13 17:11:22 +0000 | [diff] [blame] | 5 | class Machine: |
| 6 | """ |
| 7 | Represents the current state of a machine. Possible values are: |
| 8 | TESTING currently running a test |
| 9 | REBOOTING currently rebooting |
| 10 | BROKEN busted somehow (e.g. reboot timed out) |
| 11 | OTHER none of the above |
| 12 | |
| 13 | The implementation is basically that of a state machine. From an |
| 14 | external point of view the only relevant attributes are: |
| 15 | details text description of the current status |
| 16 | test_count number of tests run |
| 17 | """ |
| 18 | def __init__(self): |
| 19 | self.state = "OTHER" |
| 20 | self.details = "Running" |
| 21 | self.test_name = "" |
| 22 | self.test_count = 0 |
mbligh | 87c5d88 | 2007-10-29 17:07:24 +0000 | [diff] [blame] | 23 | |
mbligh | e0b2178 | 2007-10-26 19:39:53 +0000 | [diff] [blame] | 24 | |
mbligh | 7dd510c | 2007-11-13 17:11:22 +0000 | [diff] [blame] | 25 | def process_line(self, line): |
| 26 | self.handlers[self.state](self, line) |
mbligh | e0b2178 | 2007-10-26 19:39:53 +0000 | [diff] [blame] | 27 | |
mbligh | 7dd510c | 2007-11-13 17:11:22 +0000 | [diff] [blame] | 28 | |
| 29 | def _OTHER_handler(self, line): |
| 30 | match = self.job_start.match(line) |
mbligh | cfc6dd3 | 2007-11-20 00:44:35 +0000 | [diff] [blame] | 31 | if match and match.group(2) != "----": |
mbligh | 7dd510c | 2007-11-13 17:11:22 +0000 | [diff] [blame] | 32 | self.state = "TESTING" |
| 33 | self.tab_level = len(match.group(1)) |
| 34 | self.test_name = match.group(2) |
| 35 | self.test_status = "GOOD" |
| 36 | self.details = "Running %s" % self.test_name |
| 37 | return |
| 38 | |
| 39 | match = self.reboot_start.match(line) |
| 40 | if match: |
| 41 | self.boot_status = match.group(1) |
| 42 | if self.worse_status("GOOD", self.boot_status) == "GOOD": |
| 43 | self.state = "REBOOTING" |
| 44 | self.details = "Rebooting" |
| 45 | else: |
| 46 | self.state = "BROKEN" |
| 47 | self.details = "Reboot failed - machine broken" |
| 48 | return |
| 49 | |
| 50 | |
| 51 | def _TESTING_handler(self, line): |
| 52 | match = self.job_status.match(line) |
| 53 | if match: |
| 54 | if len(match.group(1)) != self.tab_level + 1: |
| 55 | return # we don't care about subgroups |
| 56 | if self.test_name != match.group(3): |
| 57 | return # we don't care about other tests |
| 58 | self.test_status = self.worse_status(self.test_status, |
| 59 | match.group(2)) |
| 60 | self.details = "Running %s: %s" % (self.test_name, |
| 61 | match.group(4)) |
| 62 | return |
| 63 | |
| 64 | match = self.job_end.match(line) |
| 65 | if match: |
| 66 | if len(match.group(1)) != self.tab_level: |
| 67 | return # we don't care about subgroups |
| 68 | if self.test_name != match.group(3): |
mbligh | 4d6feff | 2008-01-14 16:48:56 +0000 | [diff] [blame] | 69 | raise ValueError('Group START and END name mismatch') |
mbligh | 7dd510c | 2007-11-13 17:11:22 +0000 | [diff] [blame] | 70 | self.state = "OTHER" |
| 71 | self.test_status = self.worse_status(self.test_status, |
| 72 | match.group(2)) |
| 73 | self.test_name = "" |
| 74 | del self.test_status |
| 75 | self.details = "Running" |
| 76 | self.test_count += 1 |
| 77 | return |
| 78 | |
| 79 | |
| 80 | def _REBOOTING_handler(self, line): |
| 81 | match = self.reboot_done.match(line) |
| 82 | if match: |
| 83 | status = self.worse_status(self.boot_status, |
| 84 | match.group(1)) |
| 85 | del self.boot_status |
| 86 | if status == "GOOD": |
| 87 | self.state = "OTHER" |
| 88 | self.details = "Running" |
| 89 | else: |
| 90 | self.state = "BROKEN" |
| 91 | self.details = "Reboot failed - machine broken" |
| 92 | return |
| 93 | |
| 94 | |
| 95 | def _BROKEN_handler(self, line): |
| 96 | pass # just do nothing - we're broken and staying broken |
| 97 | |
| 98 | |
| 99 | handlers = {"OTHER": _OTHER_handler, |
| 100 | "TESTING": _TESTING_handler, |
| 101 | "REBOOTING": _REBOOTING_handler, |
| 102 | "BROKEN": _BROKEN_handler} |
| 103 | |
| 104 | |
| 105 | status_list = ["GOOD", "WARN", "FAIL", "ABORT", "ERROR"] |
| 106 | order_dict = {None: -1} |
| 107 | order_dict.update((status, i) |
| 108 | for i, status in enumerate(status_list)) |
| 109 | |
| 110 | |
mbligh | cfc6dd3 | 2007-11-20 00:44:35 +0000 | [diff] [blame] | 111 | job_start = re.compile(r"^(\t*)START\t----\t([^\t]+).*$") |
mbligh | c343016 | 2007-11-14 23:57:19 +0000 | [diff] [blame] | 112 | job_status = re.compile(r"^(\t*)(%s)\t([^\t]+)\t(?:[^\t]+).*\t([^\t]+)$" % |
mbligh | 7dd510c | 2007-11-13 17:11:22 +0000 | [diff] [blame] | 113 | "|".join(status_list)) |
mbligh | cfc6dd3 | 2007-11-20 00:44:35 +0000 | [diff] [blame] | 114 | job_end = re.compile(r"^(\t*)END (%s)\t----\t([^\t]+).*$" % |
mbligh | 7dd510c | 2007-11-13 17:11:22 +0000 | [diff] [blame] | 115 | "|".join(status_list)) |
mbligh | c343016 | 2007-11-14 23:57:19 +0000 | [diff] [blame] | 116 | reboot_start = re.compile(r"^\t?(%s)\t[^\t]+\treboot\.start.*$" % |
mbligh | 7dd510c | 2007-11-13 17:11:22 +0000 | [diff] [blame] | 117 | "|".join(status_list)) |
mbligh | c343016 | 2007-11-14 23:57:19 +0000 | [diff] [blame] | 118 | reboot_done = re.compile(r"^\t?(%s)\t[^\t]+\treboot\.verify.*$" % |
mbligh | 7dd510c | 2007-11-13 17:11:22 +0000 | [diff] [blame] | 119 | "|".join(status_list)) |
| 120 | |
| 121 | @classmethod |
| 122 | def worse_status(cls, old_status, new_status): |
| 123 | if cls.order_dict[new_status] > cls.order_dict[old_status]: |
| 124 | return new_status |
mbligh | 3027030 | 2007-11-05 20:33:52 +0000 | [diff] [blame] | 125 | else: |
mbligh | 7dd510c | 2007-11-13 17:11:22 +0000 | [diff] [blame] | 126 | return old_status |
mbligh | 31a49de | 2007-11-05 18:41:19 +0000 | [diff] [blame] | 127 | |
mbligh | e0b2178 | 2007-10-26 19:39:53 +0000 | [diff] [blame] | 128 | |
| 129 | def parse_status(status_log): |
mbligh | cb54126 | 2007-11-15 20:20:10 +0000 | [diff] [blame] | 130 | """\ |
| 131 | Parse the status from a single status log. |
| 132 | Do not use with status logs from multi-machine tests. |
| 133 | """ |
mbligh | 7dd510c | 2007-11-13 17:11:22 +0000 | [diff] [blame] | 134 | parser = Machine() |
mbligh | e0b2178 | 2007-10-26 19:39:53 +0000 | [diff] [blame] | 135 | for line in file(status_log): |
mbligh | 7dd510c | 2007-11-13 17:11:22 +0000 | [diff] [blame] | 136 | parser.process_line(line) |
| 137 | result = { |
| 138 | "status": parser.details, |
| 139 | "test_on": parser.test_name, |
| 140 | "test_num_complete": parser.test_count |
| 141 | } |
| 142 | return result |
mbligh | e0b2178 | 2007-10-26 19:39:53 +0000 | [diff] [blame] | 143 | |
| 144 | |
mbligh | cb54126 | 2007-11-15 20:20:10 +0000 | [diff] [blame] | 145 | def _file_iterator(filename): |
| 146 | """\ |
| 147 | Return an iterator over file(filename), or an empty iterator |
| 148 | if the file does not exist. |
| 149 | """ |
| 150 | if os.path.exists(filename): |
| 151 | return iter(file(filename)) |
| 152 | else: |
| 153 | return () |
| 154 | |
| 155 | |
| 156 | def parse_machine_status(root_path, name): |
| 157 | """Parse the status for one machine (of a multi-machine test)""" |
| 158 | general_log = _file_iterator(os.path.join(root_path, "status.log")) |
| 159 | machine_log = _file_iterator(os.path.join(root_path, name, "status.log")) |
| 160 | timestamp_regex = re.compile("\ttimestamp=(\d+)") |
| 161 | # collect all the lines from both the root & machine-specific log |
| 162 | lines = [] |
| 163 | timestamp = 0 |
| 164 | for line in itertools.chain(general_log, machine_log): |
| 165 | timestamp_match = timestamp_regex.search(line) |
| 166 | # if the log line has a timestamp, use it |
| 167 | # otherwise, just use the timestamp from the previous line |
| 168 | if timestamp_match: |
| 169 | timestamp = int(timestamp_match.group(1)) |
| 170 | lines.append((timestamp, line)) |
| 171 | lines.sort() # this will sort the lines by timestamp |
| 172 | # now actually run the lines through the parser |
| 173 | parser = Machine() |
| 174 | for timestamp, line in lines: |
| 175 | parser.process_line(line) |
| 176 | return { |
| 177 | "status": parser.details, |
| 178 | "test_on": parser.test_name, |
| 179 | "test_num_complete": parser.test_count |
| 180 | } |
| 181 | |
| 182 | |
| 183 | def parse_multimachine_status(root_path, machine_names): |
| 184 | """Parse the status for a set of machines.""" |
| 185 | results = {} |
| 186 | for name in machine_names: |
| 187 | results[name] = parse_machine_status(root_path, name) |
| 188 | return results |
| 189 | |
| 190 | |
mbligh | e0b2178 | 2007-10-26 19:39:53 +0000 | [diff] [blame] | 191 | if __name__ == "__main__": |
| 192 | args = sys.argv[1:] |
| 193 | if len(args) != 1: |
| 194 | print "USAGE: status.py status_log" |
| 195 | sys.exit(1) |
mbligh | 7dd510c | 2007-11-13 17:11:22 +0000 | [diff] [blame] | 196 | print parse_status(args[0]) |