Igor Murashkin | 96bd019 | 2012-11-19 16:49:37 -0800 | [diff] [blame^] | 1 | #!/usr/bin/python |
| 2 | |
| 3 | # |
| 4 | # Copyright (C) 2012 The Android Open Source Project |
| 5 | # |
| 6 | # Licensed under the Apache License, Version 2.0 (the "License"); |
| 7 | # you may not use this file except in compliance with the License. |
| 8 | # You may obtain a copy of the License at |
| 9 | # |
| 10 | # http://www.apache.org/licenses/LICENSE-2.0 |
| 11 | # |
| 12 | # Unless required by applicable law or agreed to in writing, software |
| 13 | # distributed under the License is distributed on an "AS IS" BASIS, |
| 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 15 | # See the License for the specific language governing permissions and |
| 16 | # limitations under the License. |
| 17 | # |
| 18 | |
| 19 | """ |
| 20 | A parser for metadata_properties.xml can also render the resulting model |
| 21 | over a Mako template. |
| 22 | |
| 23 | Usage: |
| 24 | metadata_parser_xml.py <filename.xml> |
| 25 | - outputs the resulting template to stdout |
| 26 | |
| 27 | Module: |
| 28 | The parser is also available as a module import (MetadataParserXml) to use |
| 29 | in other modules. |
| 30 | |
| 31 | Dependencies: |
| 32 | BeautifulSoup - an HTML/XML parser available to download from |
| 33 | http://www.crummy.com/software/BeautifulSoup/ |
| 34 | Mako - a template engine for Python, available to download from |
| 35 | http://www.makotemplates.org/ |
| 36 | """ |
| 37 | |
| 38 | import sys |
| 39 | |
| 40 | from bs4 import BeautifulSoup |
| 41 | from bs4 import NavigableString |
| 42 | |
| 43 | from mako.template import Template |
| 44 | |
| 45 | from metadata_model import * |
| 46 | from metadata_validate import * |
| 47 | |
| 48 | class MetadataParserXml: |
| 49 | """ |
| 50 | A class to parse any XML file that passes validation with metadata-validate. |
| 51 | It builds a metadata_model.Metadata graph and then renders it over a |
| 52 | Mako template. |
| 53 | |
| 54 | Attributes (Read-Only): |
| 55 | soup: an instance of BeautifulSoup corresponding to the XML contents |
| 56 | metadata: a constructed instance of metadata_model.Metadata |
| 57 | """ |
| 58 | def __init__(self, file_name): |
| 59 | """ |
| 60 | Construct a new MetadataParserXml, immediately try to parse it into a |
| 61 | metadata model. |
| 62 | |
| 63 | Args: |
| 64 | file_name: path to an XML file that passes metadata-validate |
| 65 | |
| 66 | Raises: |
| 67 | ValueError: if the XML file failed to pass metadata_validate.py |
| 68 | """ |
| 69 | self._soup = validate_xml(file_name) |
| 70 | |
| 71 | if self._soup is None: |
| 72 | raise ValueError("%s has an invalid XML file" %(file_name)) |
| 73 | |
| 74 | self._metadata = Metadata() |
| 75 | self._parse() |
| 76 | self._metadata.construct_graph() |
| 77 | |
| 78 | @property |
| 79 | def soup(self): |
| 80 | return self._soup |
| 81 | |
| 82 | @property |
| 83 | def metadata(self): |
| 84 | return self._metadata |
| 85 | |
| 86 | @staticmethod |
| 87 | def _find_direct_strings(element): |
| 88 | if element.string is not None: |
| 89 | return [element.string] |
| 90 | |
| 91 | return [i for i in element.contents if isinstance(i, NavigableString)] |
| 92 | |
| 93 | @staticmethod |
| 94 | def _strings_no_nl(element): |
| 95 | return "".join([i.strip() for i in MetadataParserXml._find_direct_strings(element)]) |
| 96 | |
| 97 | def _parse(self): |
| 98 | |
| 99 | tags = self.soup.tags |
| 100 | if tags is not None: |
| 101 | for tag in tags.find_all('tag'): |
| 102 | self.metadata.insert_tag(tag['id'], tag.string) |
| 103 | |
| 104 | for entry in self.soup.find_all("entry"): |
| 105 | d = { |
| 106 | 'name': fully_qualified_name(entry), |
| 107 | 'type': entry['type'], |
| 108 | 'kind': find_kind(entry), |
| 109 | 'type_notes': entry.attrs.get('type_notes') |
| 110 | } |
| 111 | |
| 112 | d2 = self._parse_entry(entry) |
| 113 | d3 = self._parse_entry_optional(entry) |
| 114 | |
| 115 | entry_dict = dict(d.items() + d2.items() + d3.items()) |
| 116 | self.metadata.insert_entry(entry_dict) |
| 117 | |
| 118 | entry = None |
| 119 | |
| 120 | for clone in self.soup.find_all("clone"): |
| 121 | d = { |
| 122 | 'name': clone['entry'], |
| 123 | 'kind': find_kind(clone), |
| 124 | 'target_kind': clone['kind'], |
| 125 | # no type since its the same |
| 126 | # no type_notes since its the same |
| 127 | } |
| 128 | |
| 129 | d2 = self._parse_entry_optional(clone) |
| 130 | clone_dict = dict(d.items() + d2.items()) |
| 131 | self.metadata.insert_clone(clone_dict) |
| 132 | |
| 133 | self.metadata.construct_graph() |
| 134 | |
| 135 | def _parse_entry(self, entry): |
| 136 | d = {} |
| 137 | |
| 138 | # |
| 139 | # Enum |
| 140 | # |
| 141 | if entry['type'] == 'enum': |
| 142 | |
| 143 | enum_values = [] |
| 144 | enum_optionals = [] |
| 145 | enum_notes = {} |
| 146 | enum_ids = {} |
| 147 | for value in entry.enum.find_all('value'): |
| 148 | |
| 149 | value_body = self._strings_no_nl(value) |
| 150 | enum_values.append(value_body) |
| 151 | |
| 152 | if value.attrs.get('optional', 'false') == 'true': |
| 153 | enum_optionals.append(value_body) |
| 154 | |
| 155 | notes = value.find('notes') |
| 156 | if notes is not None: |
| 157 | enum_notes[value_body] = notes.string |
| 158 | |
| 159 | if value.attrs.get('id') is not None: |
| 160 | enum_ids[value_body] = value['id'] |
| 161 | |
| 162 | d['enum_values'] = enum_values |
| 163 | d['enum_optionals'] = enum_optionals |
| 164 | d['enum_notes'] = enum_notes |
| 165 | d['enum_ids'] = enum_ids |
| 166 | |
| 167 | # |
| 168 | # Container (Array/Tuple) |
| 169 | # |
| 170 | if entry.attrs.get('container') is not None: |
| 171 | container_name = entry['container'] |
| 172 | |
| 173 | array = entry.find('array') |
| 174 | if array is not None: |
| 175 | array_sizes = [] |
| 176 | for size in array.find_all('size'): |
| 177 | array_sizes.append(size.string) |
| 178 | d['container_sizes'] = array_sizes |
| 179 | |
| 180 | tupl = entry.find('tuple') |
| 181 | if tupl is not None: |
| 182 | tupl_values = [] |
| 183 | for val in tupl.find_all('value'): |
| 184 | tupl_values.append(val.name) |
| 185 | d['tuple_values'] = tupl_values |
| 186 | d['container_sizes'] = len(tupl_values) |
| 187 | |
| 188 | d['container'] = container_name |
| 189 | |
| 190 | return d |
| 191 | |
| 192 | def _parse_entry_optional(self, entry): |
| 193 | d = {} |
| 194 | |
| 195 | optional_elements = ['description', 'range', 'units', 'notes'] |
| 196 | for i in optional_elements: |
| 197 | prop = find_child_tag(entry, i) |
| 198 | |
| 199 | if prop is not None: |
| 200 | d[i] = prop.string |
| 201 | |
| 202 | tag_ids = [] |
| 203 | for tag in entry.find_all('tag'): |
| 204 | tag_ids.append(tag['id']) |
| 205 | |
| 206 | d['tag_ids'] = tag_ids |
| 207 | |
| 208 | return d |
| 209 | |
| 210 | def render(self, template, output_name=None): |
| 211 | """ |
| 212 | Render the metadata model using a Mako template as the view. |
| 213 | |
| 214 | Args: |
| 215 | template: path to a Mako template file |
| 216 | output_name: path to the output file, or None to use stdout |
| 217 | """ |
| 218 | tpl = Template(filename=template) |
| 219 | tpl_data = tpl.render(metadata=self.metadata) |
| 220 | |
| 221 | if output_name is None: |
| 222 | print tpl_data |
| 223 | else: |
| 224 | file(output_name, "w").write(tpl_data) |
| 225 | |
| 226 | ##################### |
| 227 | ##################### |
| 228 | |
| 229 | if __name__ == "__main__": |
| 230 | if len(sys.argv) <= 1: |
| 231 | print >> sys.stderr, "Usage: %s <filename.xml>" % (sys.argv[0]) |
| 232 | sys.exit(0) |
| 233 | |
| 234 | file_name = sys.argv[1] |
| 235 | parser = MetadataParserXml(file_name) |
| 236 | parser.render("metadata_template.mako") |
| 237 | |
| 238 | sys.exit(0) |