Tobin Ehlis | 3198ba3 | 2017-04-19 17:30:52 -0600 | [diff] [blame] | 1 | #!/usr/bin/python -i |
Tobin Ehlis | 5ade069 | 2016-10-05 17:18:15 -0600 | [diff] [blame] | 2 | |
| 3 | import sys |
Tobin Ehlis | 98d109a | 2017-05-11 14:42:38 -0600 | [diff] [blame] | 4 | #import xml.etree.ElementTree as etree |
Tobin Ehlis | 5ade069 | 2016-10-05 17:18:15 -0600 | [diff] [blame] | 5 | import urllib2 |
Tobin Ehlis | 3198ba3 | 2017-04-19 17:30:52 -0600 | [diff] [blame] | 6 | from bs4 import BeautifulSoup |
Tobin Ehlis | 98d109a | 2017-05-11 14:42:38 -0600 | [diff] [blame] | 7 | import json |
| 8 | import vuid_mapping |
Tobin Ehlis | 3c37fb3 | 2017-05-24 09:31:13 -0600 | [diff] [blame^] | 9 | import operator |
| 10 | import re |
Tobin Ehlis | 5ade069 | 2016-10-05 17:18:15 -0600 | [diff] [blame] | 11 | |
| 12 | ############################# |
| 13 | # spec.py script |
| 14 | # |
| 15 | # Overview - this script is intended to generate validation error codes and message strings from the xhtml version of |
| 16 | # the specification. In addition to generating the header file, it provides a number of corrollary services to aid in |
| 17 | # generating/updating the header. |
| 18 | # |
| 19 | # Ideal flow - Not there currently, but the ideal flow for this script would be that you run the script, it pulls the |
| 20 | # latest spec, compares it to the current set of generated error codes, and makes any updates as needed |
| 21 | # |
| 22 | # Current flow - the current flow acheives all of the ideal flow goals, but with more steps than are desired |
| 23 | # 1. Get the spec - right now spec has to be manually generated or pulled from the web |
| 24 | # 2. Generate header from spec - This is done in a single command line |
| 25 | # 3. Generate database file from spec - Can be done along with step #2 above, the database file contains a list of |
| 26 | # all error enums and message strings, along with some other info on if those errors are implemented/tested |
| 27 | # 4. Update header using a given database file as the root and a new spec file as goal - This makes sure that existing |
| 28 | # errors keep the same enum identifier while also making sure that new errors get a unique_id that continues on |
| 29 | # from the end of the previous highest unique_id. |
| 30 | # |
| 31 | # TODO: |
| 32 | # 1. Improve string matching to add more automation for figuring out which messages are changed vs. completely new |
Tobin Ehlis | 5ade069 | 2016-10-05 17:18:15 -0600 | [diff] [blame] | 33 | # |
| 34 | ############################# |
| 35 | |
| 36 | |
| 37 | spec_filename = "vkspec.html" # can override w/ '-spec <filename>' option |
| 38 | out_filename = "vk_validation_error_messages.h" # can override w/ '-out <filename>' option |
| 39 | db_filename = "vk_validation_error_database.txt" # can override w/ '-gendb <filename>' option |
Tobin Ehlis | 2c93213 | 2017-05-19 16:32:15 -0600 | [diff] [blame] | 40 | json_filename = None # con pass in w/ '-json <filename> option |
Tobin Ehlis | 5ade069 | 2016-10-05 17:18:15 -0600 | [diff] [blame] | 41 | gen_db = False # set to True when '-gendb <filename>' option provided |
| 42 | spec_compare = False # set to True with '-compare <db_filename>' option |
Tobin Ehlis | 2c93213 | 2017-05-19 16:32:15 -0600 | [diff] [blame] | 43 | json_compare = False # compare existing DB to json file input |
Tobin Ehlis | 3c37fb3 | 2017-05-24 09:31:13 -0600 | [diff] [blame^] | 44 | migrate_ids = False # set to True with '-migrate' option to move old ids from database to new ids in spec |
| 45 | # When migrating IDs: |
| 46 | # 1. Read in DB |
| 47 | # 2. Create mapping between old VU enum values & new string-based IDs |
| 48 | # 3. Read in source and update all old VU enum values to new string-based IDs |
Tobin Ehlis | 828781a | 2017-05-23 15:23:40 -0600 | [diff] [blame] | 49 | json_url = "https://www.khronos.org/registry/vulkan/specs/1.0-extensions/validation/validusage.json" |
| 50 | read_json = False |
Tobin Ehlis | 5ade069 | 2016-10-05 17:18:15 -0600 | [diff] [blame] | 51 | # This is the root spec link that is used in error messages to point users to spec sections |
Tobin Ehlis | bd0a9c6 | 2016-10-14 18:06:16 -0600 | [diff] [blame] | 52 | #old_spec_url = "https://www.khronos.org/registry/vulkan/specs/1.0/xhtml/vkspec.html" |
Tobin Ehlis | a55b1d4 | 2017-04-04 12:23:48 -0600 | [diff] [blame] | 53 | spec_url = "https://www.khronos.org/registry/vulkan/specs/1.0-extensions/html/vkspec.html" |
Tobin Ehlis | 828781a | 2017-05-23 15:23:40 -0600 | [diff] [blame] | 54 | core_url = "https://www.khronos.org/registry/vulkan/specs/1.0/html/vkspec.html" |
| 55 | ext_url = "https://www.khronos.org/registry/vulkan/specs/1.0-extensions/html/vkspec.html" |
Tobin Ehlis | 5ade069 | 2016-10-05 17:18:15 -0600 | [diff] [blame] | 56 | # After the custom validation error message, this is the prefix for the standard message that includes the |
| 57 | # spec valid usage language as well as the link to nearest section of spec to that language |
| 58 | error_msg_prefix = "For more information refer to Vulkan Spec Section " |
| 59 | ns = {'ns': 'http://www.w3.org/1999/xhtml'} |
Mark Lobodzinski | 629d47b | 2016-10-18 13:34:58 -0600 | [diff] [blame] | 60 | validation_error_enum_name = "VALIDATION_ERROR_" |
Tobin Ehlis | 5ade069 | 2016-10-05 17:18:15 -0600 | [diff] [blame] | 61 | # Dict of new enum values that should be forced to remap to old handles, explicitly set by -remap option |
| 62 | remap_dict = {} |
Tobin Ehlis | 2c93213 | 2017-05-19 16:32:15 -0600 | [diff] [blame] | 63 | # Custom map to override known-bad values |
| 64 | struct_to_api_map = { |
| 65 | 'VkDeviceQueueCreateInfo' : 'vkCreateDevice', |
| 66 | } |
Tobin Ehlis | 5ade069 | 2016-10-05 17:18:15 -0600 | [diff] [blame] | 67 | |
| 68 | def printHelp(): |
Tobin Ehlis | 98d109a | 2017-05-11 14:42:38 -0600 | [diff] [blame] | 69 | print ("Usage: python spec.py [-spec <specfile.html>] [-out <headerfile.h>] [-gendb <databasefile.txt>] [-compare <databasefile.txt>] [-update] [-remap <new_id-old_id,count>] [-json <json_file>] [-help]") |
Tobin Ehlis | 3198ba3 | 2017-04-19 17:30:52 -0600 | [diff] [blame] | 70 | print ("\n Default script behavior is to parse the specfile and generate a header of unique error enums and corresponding error messages based on the specfile.\n") |
| 71 | print (" Default specfile is from online at %s" % (spec_url)) |
| 72 | print (" Default headerfile is %s" % (out_filename)) |
| 73 | print (" Default databasefile is %s" % (db_filename)) |
| 74 | print ("\nIf '-gendb' option is specified then a database file is generated to default file or <databasefile.txt> if supplied. The database file stores") |
| 75 | print (" the list of enums and their error messages.") |
| 76 | print ("\nIf '-compare' option is specified then the given database file will be read in as the baseline for generating the new specfile") |
| 77 | print ("\nIf '-update' option is specified this triggers the master flow to automate updating header and database files using default db file as baseline") |
| 78 | print (" and online spec file as the latest. The default header and database files will be updated in-place for review and commit to the git repo.") |
| 79 | print ("\nIf '-remap' option is specified it supplies forced remapping from new enum ids to old enum ids. This should only be specified along with -update") |
| 80 | print (" option. Starting at newid and remapping to oldid, count ids will be remapped. Default count is '1' and use ':' to specify multiple remappings.") |
Tobin Ehlis | 828781a | 2017-05-23 15:23:40 -0600 | [diff] [blame] | 81 | print ("\nIf '-json' option is used trigger the script to load in data from a json file.") |
| 82 | print ("\nIf '-json-file' option is it will point to a local json file, else '%s' is used from the web." % (json_url)) |
Tobin Ehlis | 5ade069 | 2016-10-05 17:18:15 -0600 | [diff] [blame] | 83 | |
Tobin Ehlis | 3c37fb3 | 2017-05-24 09:31:13 -0600 | [diff] [blame^] | 84 | def get8digithex(dec_num): |
| 85 | """Convert a decimal # into an 8-digit hex""" |
| 86 | if dec_num > 4294967295: |
| 87 | print ("ERROR: Decimal # %d can't be represented in 8 hex digits" % (dec_num)) |
| 88 | sys.exit() |
| 89 | hex_num = hex(dec_num) |
| 90 | return hex_num[2:].zfill(8) |
| 91 | |
Tobin Ehlis | 5ade069 | 2016-10-05 17:18:15 -0600 | [diff] [blame] | 92 | class Specification: |
| 93 | def __init__(self): |
| 94 | self.tree = None |
Tobin Ehlis | e7560e7 | 2016-10-19 15:59:38 -0600 | [diff] [blame] | 95 | self.val_error_dict = {} # string for enum is key that references 'error_msg' and 'api' |
Tobin Ehlis | 5ade069 | 2016-10-05 17:18:15 -0600 | [diff] [blame] | 96 | self.error_db_dict = {} # dict of previous error values read in from database file |
| 97 | self.delimiter = '~^~' # delimiter for db file |
Tobin Ehlis | 2a176b1 | 2017-01-11 16:18:20 -0700 | [diff] [blame] | 98 | self.implicit_count = 0 |
Tobin Ehlis | a55b1d4 | 2017-04-04 12:23:48 -0600 | [diff] [blame] | 99 | # Global dicts used for tracking spec updates from old to new VUs |
| 100 | self.orig_full_msg_dict = {} # Original full error msg to ID mapping |
| 101 | self.orig_no_link_msg_dict = {} # Pair of API,Original msg w/o spec link to ID list mapping |
| 102 | self.orig_core_msg_dict = {} # Pair of API,Original core msg (no link or section) to ID list mapping |
| 103 | self.last_mapped_id = -10 # start as negative so we don't hit an accidental sequence |
| 104 | self.orig_test_imp_enums = set() # Track old enums w/ tests and/or implementation to flag any that aren't carried fwd |
Tobin Ehlis | 2c93213 | 2017-05-19 16:32:15 -0600 | [diff] [blame] | 105 | # Dict of data from json DB |
| 106 | # Key is API,<short_msg> which leads to dict w/ following values |
| 107 | # 'ext' -> <core|<ext_name>> |
| 108 | # 'string_vuid' -> <string_vuid> |
| 109 | # 'number_vuid' -> <numerical_vuid> |
| 110 | self.json_db = {} |
| 111 | self.json_missing = 0 |
| 112 | self.struct_to_func_map = {} # Map structs to the API func that they fall under in the spec |
| 113 | self.duplicate_json_key_count = 0 |
Tobin Ehlis | 5ade069 | 2016-10-05 17:18:15 -0600 | [diff] [blame] | 114 | self.copyright = """/* THIS FILE IS GENERATED. DO NOT EDIT. */ |
| 115 | |
| 116 | /* |
| 117 | * Vulkan |
| 118 | * |
| 119 | * Copyright (c) 2016 Google Inc. |
Mark Lobodzinski | 629d47b | 2016-10-18 13:34:58 -0600 | [diff] [blame] | 120 | * Copyright (c) 2016 LunarG, Inc. |
Tobin Ehlis | 5ade069 | 2016-10-05 17:18:15 -0600 | [diff] [blame] | 121 | * |
| 122 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 123 | * you may not use this file except in compliance with the License. |
| 124 | * You may obtain a copy of the License at |
| 125 | * |
| 126 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 127 | * |
| 128 | * Unless required by applicable law or agreed to in writing, software |
| 129 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 130 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 131 | * See the License for the specific language governing permissions and |
| 132 | * limitations under the License. |
| 133 | * |
| 134 | * Author: Tobin Ehlis <tobine@google.com> |
| 135 | */""" |
| 136 | def _checkInternetSpec(self): |
| 137 | """Verify that we can access the spec online""" |
| 138 | try: |
| 139 | online = urllib2.urlopen(spec_url,timeout=1) |
| 140 | return True |
| 141 | except urllib2.URLError as err: |
| 142 | return False |
| 143 | return False |
Tobin Ehlis | 3198ba3 | 2017-04-19 17:30:52 -0600 | [diff] [blame] | 144 | def soupLoadFile(self, online=True, spec_file=spec_filename): |
| 145 | """Load a spec file into BeutifulSoup""" |
Tobin Ehlis | 5ade069 | 2016-10-05 17:18:15 -0600 | [diff] [blame] | 146 | if (online and self._checkInternetSpec()): |
Tobin Ehlis | 3198ba3 | 2017-04-19 17:30:52 -0600 | [diff] [blame] | 147 | print ("Making soup from spec online at %s, this will take a minute" % (spec_url)) |
| 148 | self.soup = BeautifulSoup(urllib2.urlopen(spec_url), 'html.parser') |
Tobin Ehlis | 5ade069 | 2016-10-05 17:18:15 -0600 | [diff] [blame] | 149 | else: |
Tobin Ehlis | 3198ba3 | 2017-04-19 17:30:52 -0600 | [diff] [blame] | 150 | print ("Making soup from local spec %s, this will take a minute" % (spec_file)) |
Tobin Ehlis | ec45e42 | 2017-05-19 08:24:04 -0600 | [diff] [blame] | 151 | with open(spec_file, "r") as sf: |
| 152 | self.soup = BeautifulSoup(sf, 'html.parser') |
Tobin Ehlis | 3198ba3 | 2017-04-19 17:30:52 -0600 | [diff] [blame] | 153 | self.parseSoup() |
| 154 | #print(self.soup.prettify()) |
Tobin Ehlis | 5ade069 | 2016-10-05 17:18:15 -0600 | [diff] [blame] | 155 | def updateDict(self, updated_dict): |
| 156 | """Assign internal dict to use updated_dict""" |
| 157 | self.val_error_dict = updated_dict |
Tobin Ehlis | 98d109a | 2017-05-11 14:42:38 -0600 | [diff] [blame] | 158 | |
Tobin Ehlis | 828781a | 2017-05-23 15:23:40 -0600 | [diff] [blame] | 159 | def readJSON(self): |
Tobin Ehlis | 98d109a | 2017-05-11 14:42:38 -0600 | [diff] [blame] | 160 | """Read in JSON file""" |
Tobin Ehlis | 828781a | 2017-05-23 15:23:40 -0600 | [diff] [blame] | 161 | if json_filename is not None: |
| 162 | with open(json_filename) as jsf: |
| 163 | self.json_data = json.load(jsf) |
| 164 | else: |
| 165 | self.json_data = json.load(urllib2.urlopen(json_url)) |
| 166 | |
Tobin Ehlis | 98d109a | 2017-05-11 14:42:38 -0600 | [diff] [blame] | 167 | def parseJSON(self): |
| 168 | """Parse JSON VUIDs into data struct""" |
| 169 | # Format of JSON file is: |
| 170 | # "API": { "core|EXT": [ {"vuid": "<id>", "text": "<VU txt>"}]}, |
| 171 | # "VK_KHX_external_memory" & "VK_KHX_device_group" - extension case (vs. "core") |
Tobin Ehlis | 98d109a | 2017-05-11 14:42:38 -0600 | [diff] [blame] | 172 | for api in sorted(self.json_data): |
| 173 | for ext in sorted(self.json_data[api]): |
| 174 | for vu_txt_dict in self.json_data[api][ext]: |
| 175 | vuid = vu_txt_dict['vuid'] |
| 176 | vutxt = vu_txt_dict['text'] |
| 177 | #print ("%s:%s:%s:%s" % (api, ext, vuid, vutxt)) |
Tobin Ehlis | 828781a | 2017-05-23 15:23:40 -0600 | [diff] [blame] | 178 | #print ("VUTXT orig:%s" % (vutxt)) |
Tobin Ehlis | 2c93213 | 2017-05-19 16:32:15 -0600 | [diff] [blame] | 179 | just_txt = BeautifulSoup(vutxt, 'html.parser') |
| 180 | #print ("VUTXT only:%s" % (just_txt.get_text())) |
| 181 | num_vuid = vuid_mapping.convertVUID(vuid) |
| 182 | function = api |
| 183 | if function in self.struct_to_func_map: |
| 184 | function = self.struct_to_func_map[function] |
| 185 | key = "%s,'%s'" % (function, just_txt.get_text()) |
| 186 | if key in self.json_db: |
| 187 | print ("Key '%s' is already in json_db!" % (key)) |
| 188 | self.duplicate_json_key_count = self.duplicate_json_key_count + 1 |
| 189 | #sys.exit() |
Tobin Ehlis | 828781a | 2017-05-23 15:23:40 -0600 | [diff] [blame] | 190 | self.json_db[vuid] = {} |
| 191 | self.json_db[vuid]['ext'] = ext |
| 192 | #self.json_db[key]['string_vuid'] = vuid |
| 193 | self.json_db[vuid]['number_vuid'] = num_vuid |
| 194 | self.json_db[vuid]['struct_func'] = api |
| 195 | just_txt = just_txt.get_text().strip() |
| 196 | unicode_map = { |
| 197 | u"\u2019" : "'", |
| 198 | u"\u2192" : "->", |
| 199 | } |
| 200 | for um in unicode_map: |
| 201 | just_txt = just_txt.replace(um, unicode_map[um]) |
| 202 | self.json_db[vuid]['vu_txt'] = just_txt |
| 203 | print ("Spec vu txt:%s" % (self.json_db[vuid]['vu_txt'])) |
| 204 | # if 'must specify aspects present in the calling command' in key: |
| 205 | # print "Found KEY:%s" % (key) |
Tobin Ehlis | 2c93213 | 2017-05-19 16:32:15 -0600 | [diff] [blame] | 206 | #sys.exit() |
Tobin Ehlis | 828781a | 2017-05-23 15:23:40 -0600 | [diff] [blame] | 207 | #sys.exit() |
Tobin Ehlis | 2c93213 | 2017-05-19 16:32:15 -0600 | [diff] [blame] | 208 | |
| 209 | def compareJSON(self): |
| 210 | """Compare parsed json file with existing data read in from DB file""" |
Tobin Ehlis | 828781a | 2017-05-23 15:23:40 -0600 | [diff] [blame] | 211 | json_db_set = set() |
| 212 | for vuid in self.json_db: # pull entries out and see which fields we're missing from error_db |
| 213 | json_db_set.add(vuid) |
Tobin Ehlis | 2c93213 | 2017-05-19 16:32:15 -0600 | [diff] [blame] | 214 | for enum in self.error_db_dict: |
Tobin Ehlis | 828781a | 2017-05-23 15:23:40 -0600 | [diff] [blame] | 215 | vuid_string = self.error_db_dict[enum]['vuid_string'] |
| 216 | if vuid_string not in self.json_db: |
| 217 | #print ("Full string for %s is:%s" % (enum, full_error_string)) |
| 218 | print ("WARN: Couldn't find vuid_string in json db:%s" % (vuid_string)) |
Tobin Ehlis | 2c93213 | 2017-05-19 16:32:15 -0600 | [diff] [blame] | 219 | self.json_missing = self.json_missing + 1 |
Tobin Ehlis | 828781a | 2017-05-23 15:23:40 -0600 | [diff] [blame] | 220 | self.error_db_dict[enum]['ext'] = 'core' |
| 221 | #sys.exit() |
| 222 | else: |
| 223 | json_db_set.remove(vuid_string) |
| 224 | # TMP: Add ext details to error db |
| 225 | self.error_db_dict[enum]['ext'] = self.json_db[vuid_string]['ext'] |
| 226 | if 'core' == self.json_db[vuid_string]['ext'] or '!' in self.json_db[vuid_string]['ext']: |
| 227 | spec_link = "%s#%s" % (core_url, vuid_string) |
| 228 | else: |
| 229 | spec_link = "%s#%s" % (ext_url, vuid_string) |
| 230 | self.error_db_dict[enum]['error_msg'] = "The spec valid usage text states '%s' (%s)" % (self.json_db[vuid_string]['vu_txt'], spec_link) |
| 231 | print ("Updated error_db error_msg:%s" % (self.error_db_dict[enum]['error_msg'])) |
| 232 | #sys.exit() |
| 233 | print ("These json DB entries are not in error DB:") |
| 234 | for extra_vuid in json_db_set: |
| 235 | print ("\t%s" % (extra_vuid)) |
| 236 | # Add these missing entries into the error_db |
| 237 | # Create link into core or ext spec as needed |
| 238 | if 'core' == self.json_db[extra_vuid]['ext'] or '!' in self.json_db[extra_vuid]['ext']: |
| 239 | spec_link = "%s#%s" % (core_url, extra_vuid) |
| 240 | else: |
| 241 | spec_link = "%s#%s" % (ext_url, extra_vuid) |
| 242 | error_enum = "VALIDATION_ERROR_%d" % (self.json_db[extra_vuid]['number_vuid']) |
| 243 | self.error_db_dict[error_enum] = {} |
| 244 | self.error_db_dict[error_enum]['check_implemented'] = 'N' |
| 245 | self.error_db_dict[error_enum]['testname'] = 'None' |
| 246 | self.error_db_dict[error_enum]['api'] = self.json_db[extra_vuid]['struct_func'] |
| 247 | self.error_db_dict[error_enum]['vuid_string'] = extra_vuid |
| 248 | self.error_db_dict[error_enum]['error_msg'] = "The spec valid usage text states '%s' (%s)" % (self.json_db[extra_vuid]['vu_txt'], spec_link) |
| 249 | self.error_db_dict[error_enum]['note'] = '' |
| 250 | self.error_db_dict[error_enum]['ext'] = self.json_db[extra_vuid]['ext'] |
| 251 | # Enable this code if you want to reset val_error_dict & assign to db dict |
| 252 | # self.val_error_dict = {} |
| 253 | # for enum in self.error_db_dict: |
| 254 | # self.val_error_dict[enum] = {} |
| 255 | # self.val_error_dict[enum]['check_implemented'] = self.error_db_dict[enum]['check_implemented'] |
| 256 | # self.val_error_dict[enum]['testname'] = self.error_db_dict[enum]['testname'] |
| 257 | # self.val_error_dict[enum]['api'] = self.error_db_dict[enum]['api'] |
| 258 | # self.val_error_dict[enum]['vuid_string'] = self.error_db_dict[enum]['vuid_string'] |
| 259 | # self.val_error_dict[enum]['ext'] = self.error_db_dict[enum]['ext'] |
| 260 | # self.val_error_dict[enum]['error_msg'] = self.error_db_dict[enum]['error_msg'] |
| 261 | # self.val_error_dict[enum]['note'] = self.error_db_dict[enum]['note'] |
| 262 | # implicit = False |
| 263 | # if extra_vuid.split("_")[-1].isdigit(): |
| 264 | # implicit = True |
| 265 | # self.val_error_dict[enum]['implicit'] = implicit |
Tobin Ehlis | 98d109a | 2017-05-11 14:42:38 -0600 | [diff] [blame] | 266 | |
Tobin Ehlis | 3198ba3 | 2017-04-19 17:30:52 -0600 | [diff] [blame] | 267 | def parseSoup(self): |
Tobin Ehlis | 5ade069 | 2016-10-05 17:18:15 -0600 | [diff] [blame] | 268 | """Parse the registry Element, once created""" |
Tobin Ehlis | 3198ba3 | 2017-04-19 17:30:52 -0600 | [diff] [blame] | 269 | print ("Parsing spec file...") |
Tobin Ehlis | 5ade069 | 2016-10-05 17:18:15 -0600 | [diff] [blame] | 270 | unique_enum_id = 0 |
Tobin Ehlis | 3198ba3 | 2017-04-19 17:30:52 -0600 | [diff] [blame] | 271 | #self.root = self.tree.getroot() |
| 272 | #print ("ROOT: %s") % self.root |
Tobin Ehlis | 5ade069 | 2016-10-05 17:18:15 -0600 | [diff] [blame] | 273 | prev_heading = '' # Last seen section heading or sub-heading |
| 274 | prev_link = '' # Last seen link id within the spec |
Tobin Ehlis | e7560e7 | 2016-10-19 15:59:38 -0600 | [diff] [blame] | 275 | api_function = '' # API call that a check appears under |
Tobin Ehlis | 85008cd | 2016-10-19 15:32:35 -0600 | [diff] [blame] | 276 | error_strings = set() # Flag any exact duplicate error strings and skip them |
Tobin Ehlis | 3198ba3 | 2017-04-19 17:30:52 -0600 | [diff] [blame] | 277 | for tag in self.soup.find_all(True):#self.root.iter(): # iterate down tree |
Tobin Ehlis | 5ade069 | 2016-10-05 17:18:15 -0600 | [diff] [blame] | 278 | # Grab most recent section heading and link |
Tobin Ehlis | 3198ba3 | 2017-04-19 17:30:52 -0600 | [diff] [blame] | 279 | #print ("tag.name is %s and class is %s" % (tag.name, tag.get('class'))) |
| 280 | if tag.name in ['h2', 'h3', 'h4']: |
Tobin Ehlis | ce1e56f | 2017-01-25 12:42:55 -0800 | [diff] [blame] | 281 | #if tag.get('class') != 'title': |
| 282 | # continue |
Tobin Ehlis | 3198ba3 | 2017-04-19 17:30:52 -0600 | [diff] [blame] | 283 | #print ("Found heading %s w/ string %s" % (tag.name, tag.string)) |
| 284 | if None == tag.string: |
| 285 | prev_heading = "" |
| 286 | else: |
| 287 | prev_heading = "".join(tag.string) |
Tobin Ehlis | 5ade069 | 2016-10-05 17:18:15 -0600 | [diff] [blame] | 288 | # Insert a space between heading number & title |
| 289 | sh_list = prev_heading.rsplit('.', 1) |
| 290 | prev_heading = '. '.join(sh_list) |
Tobin Ehlis | 3198ba3 | 2017-04-19 17:30:52 -0600 | [diff] [blame] | 291 | prev_link = tag['id'] |
| 292 | #print ("Set prev_heading %s to have link of %s" % (prev_heading.encode("ascii", "ignore"), prev_link.encode("ascii", "ignore"))) |
| 293 | elif tag.name == 'a': # grab any intermediate links |
Tobin Ehlis | 5ade069 | 2016-10-05 17:18:15 -0600 | [diff] [blame] | 294 | if tag.get('id') != None: |
| 295 | prev_link = tag.get('id') |
Tobin Ehlis | 3198ba3 | 2017-04-19 17:30:52 -0600 | [diff] [blame] | 296 | #print ("Updated prev link to %s" % (prev_link)) |
| 297 | elif tag.name == 'div' and tag.get('class') is not None and tag['class'][0] == 'listingblock': |
Tobin Ehlis | e7560e7 | 2016-10-19 15:59:38 -0600 | [diff] [blame] | 298 | # Check and see if this is API function |
Tobin Ehlis | 3198ba3 | 2017-04-19 17:30:52 -0600 | [diff] [blame] | 299 | code_text = "".join(tag.strings).replace('\n', '') |
Tobin Ehlis | e7560e7 | 2016-10-19 15:59:38 -0600 | [diff] [blame] | 300 | code_text_list = code_text.split() |
| 301 | if len(code_text_list) > 1 and code_text_list[1].startswith('vk'): |
| 302 | api_function = code_text_list[1].strip('(') |
Tobin Ehlis | 3198ba3 | 2017-04-19 17:30:52 -0600 | [diff] [blame] | 303 | #print ("Found API function: %s" % (api_function)) |
Tobin Ehlis | a55b1d4 | 2017-04-04 12:23:48 -0600 | [diff] [blame] | 304 | prev_link = api_function |
Tobin Ehlis | 3198ba3 | 2017-04-19 17:30:52 -0600 | [diff] [blame] | 305 | #print ("Updated prev link to %s" % (prev_link)) |
Tobin Ehlis | a55b1d4 | 2017-04-04 12:23:48 -0600 | [diff] [blame] | 306 | elif tag.get('id') != None: |
| 307 | prev_link = tag.get('id') |
Tobin Ehlis | 3198ba3 | 2017-04-19 17:30:52 -0600 | [diff] [blame] | 308 | #print ("Updated prev link to %s" % (prev_link)) |
Tobin Ehlis | 2c93213 | 2017-05-19 16:32:15 -0600 | [diff] [blame] | 309 | if tag.get('id').startswith('Vk') and api_function != '': |
| 310 | #if len(code_text_list) > 1 and code_text_list[1].startswith('Vk'): |
| 311 | self.struct_to_func_map[tag.get('id')] = api_function |
| 312 | print ("Added mapping from struct:%s to API:%s" % (tag.get('id'), api_function)) |
| 313 | |
Tobin Ehlis | 3198ba3 | 2017-04-19 17:30:52 -0600 | [diff] [blame] | 314 | #elif tag.name == '{http://www.w3.org/1999/xhtml}div' and tag.get('class') == 'sidebar': |
| 315 | elif tag.name == 'div' and tag.get('class') is not None and tag['class'][0] == 'content': |
| 316 | #print("Parsing down a div content tag") |
Tobin Ehlis | 69ebddf | 2016-10-18 15:55:07 -0600 | [diff] [blame] | 317 | # parse down sidebar to check for valid usage cases |
| 318 | valid_usage = False |
Tobin Ehlis | 2a176b1 | 2017-01-11 16:18:20 -0700 | [diff] [blame] | 319 | implicit = False |
Tobin Ehlis | 3198ba3 | 2017-04-19 17:30:52 -0600 | [diff] [blame] | 320 | for elem in tag.find_all(True): |
| 321 | #print(" elem is %s w/ string %s" % (elem.name, elem.string)) |
| 322 | if elem.name == 'div' and None != elem.string and 'Valid Usage' in elem.string: |
Tobin Ehlis | 69ebddf | 2016-10-18 15:55:07 -0600 | [diff] [blame] | 323 | valid_usage = True |
Tobin Ehlis | 3198ba3 | 2017-04-19 17:30:52 -0600 | [diff] [blame] | 324 | if '(Implicit)' in elem.string: |
Tobin Ehlis | 2a176b1 | 2017-01-11 16:18:20 -0700 | [diff] [blame] | 325 | implicit = True |
| 326 | else: |
| 327 | implicit = False |
Tobin Ehlis | 3198ba3 | 2017-04-19 17:30:52 -0600 | [diff] [blame] | 328 | elif valid_usage and elem.name == 'li': # grab actual valid usage requirements |
Tobin Ehlis | 2c93213 | 2017-05-19 16:32:15 -0600 | [diff] [blame] | 329 | # Grab link |
| 330 | prev_link = elem.a.get('id') |
| 331 | if 'VUID' not in prev_link: |
| 332 | print ("Found VU link that doesn't have 'VUID':%s" % (prev_link)) |
| 333 | sys.exit() |
Tobin Ehlis | 3198ba3 | 2017-04-19 17:30:52 -0600 | [diff] [blame] | 334 | #print("I think this is a VU w/ elem.strings is %s" % (elem.strings)) |
| 335 | error_msg_str = "%s '%s' which states '%s' (%s#%s)" % (error_msg_prefix, prev_heading, "".join(elem.strings).replace('\n', ' ').strip(), spec_url, prev_link) |
Tobin Ehlis | 69ebddf | 2016-10-18 15:55:07 -0600 | [diff] [blame] | 336 | # Some txt has multiple spaces so split on whitespace and join w/ single space |
| 337 | error_msg_str = " ".join(error_msg_str.split()) |
Tobin Ehlis | 85008cd | 2016-10-19 15:32:35 -0600 | [diff] [blame] | 338 | if error_msg_str in error_strings: |
Tobin Ehlis | 3198ba3 | 2017-04-19 17:30:52 -0600 | [diff] [blame] | 339 | print ("WARNING: SKIPPING adding repeat entry for string. Please review spec and file issue as appropriate. Repeat string is: %s" % (error_msg_str)) |
Tobin Ehlis | 85008cd | 2016-10-19 15:32:35 -0600 | [diff] [blame] | 340 | else: |
| 341 | error_strings.add(error_msg_str) |
| 342 | enum_str = "%s%05d" % (validation_error_enum_name, unique_enum_id) |
| 343 | # TODO : '\' chars in spec error messages are most likely bad spec txt that needs to be updated |
Tobin Ehlis | e7560e7 | 2016-10-19 15:59:38 -0600 | [diff] [blame] | 344 | self.val_error_dict[enum_str] = {} |
| 345 | self.val_error_dict[enum_str]['error_msg'] = error_msg_str.encode("ascii", "ignore").replace("\\", "/") |
Tobin Ehlis | 3198ba3 | 2017-04-19 17:30:52 -0600 | [diff] [blame] | 346 | self.val_error_dict[enum_str]['api'] = api_function.encode("ascii", "ignore") |
Tobin Ehlis | 2a176b1 | 2017-01-11 16:18:20 -0700 | [diff] [blame] | 347 | self.val_error_dict[enum_str]['implicit'] = False |
Tobin Ehlis | 2c93213 | 2017-05-19 16:32:15 -0600 | [diff] [blame] | 348 | self.val_error_dict[enum_str]['vuid_string'] = prev_link |
Tobin Ehlis | 2a176b1 | 2017-01-11 16:18:20 -0700 | [diff] [blame] | 349 | if implicit: |
| 350 | self.val_error_dict[enum_str]['implicit'] = True |
| 351 | self.implicit_count = self.implicit_count + 1 |
Tobin Ehlis | 85008cd | 2016-10-19 15:32:35 -0600 | [diff] [blame] | 352 | unique_enum_id = unique_enum_id + 1 |
Tobin Ehlis | 3198ba3 | 2017-04-19 17:30:52 -0600 | [diff] [blame] | 353 | #print ("Validation Error Dict has a total of %d unique errors and contents are:\n%s" % (unique_enum_id, self.val_error_dict)) |
Tobin Ehlis | 2c93213 | 2017-05-19 16:32:15 -0600 | [diff] [blame] | 354 | # TEMP : override struct to api mapping with manual data |
| 355 | for struct in struct_to_api_map: |
| 356 | self.struct_to_func_map[struct] = struct_to_api_map[struct] |
Tobin Ehlis | 3198ba3 | 2017-04-19 17:30:52 -0600 | [diff] [blame] | 357 | print ("Validation Error Dict has a total of %d unique errors" % (unique_enum_id)) |
Tobin Ehlis | 5ade069 | 2016-10-05 17:18:15 -0600 | [diff] [blame] | 358 | def genHeader(self, header_file): |
| 359 | """Generate a header file based on the contents of a parsed spec""" |
Tobin Ehlis | 3198ba3 | 2017-04-19 17:30:52 -0600 | [diff] [blame] | 360 | print ("Generating header %s..." % (header_file)) |
Tobin Ehlis | 5ade069 | 2016-10-05 17:18:15 -0600 | [diff] [blame] | 361 | file_contents = [] |
| 362 | file_contents.append(self.copyright) |
| 363 | file_contents.append('\n#pragma once') |
Mark Lobodzinski | 267a7cf | 2017-01-25 09:33:25 -0700 | [diff] [blame] | 364 | file_contents.append('\n// Disable auto-formatting for generated file') |
| 365 | file_contents.append('// clang-format off') |
| 366 | file_contents.append('\n#include <unordered_map>') |
Tobin Ehlis | 5ade069 | 2016-10-05 17:18:15 -0600 | [diff] [blame] | 367 | file_contents.append('\n// enum values for unique validation error codes') |
| 368 | file_contents.append('// Corresponding validation error message for each enum is given in the mapping table below') |
| 369 | file_contents.append('// When a given error occurs, these enum values should be passed to the as the messageCode') |
| 370 | file_contents.append('// parameter to the PFN_vkDebugReportCallbackEXT function') |
Tobin Ehlis | 387fd63 | 2016-12-08 13:32:05 -0700 | [diff] [blame] | 371 | enum_decl = ['enum UNIQUE_VALIDATION_ERROR_CODE {\n VALIDATION_ERROR_UNDEFINED = -1,'] |
Tobin Ehlis | bf98b69 | 2016-10-06 12:58:06 -0600 | [diff] [blame] | 372 | error_string_map = ['static std::unordered_map<int, char const *const> validation_error_map{'] |
Tobin Ehlis | ce1e56f | 2017-01-25 12:42:55 -0800 | [diff] [blame] | 373 | enum_value = 0 |
Tobin Ehlis | 3c37fb3 | 2017-05-24 09:31:13 -0600 | [diff] [blame^] | 374 | max_enum_val = 0 |
Tobin Ehlis | 5ade069 | 2016-10-05 17:18:15 -0600 | [diff] [blame] | 375 | for enum in sorted(self.val_error_dict): |
Tobin Ehlis | 3198ba3 | 2017-04-19 17:30:52 -0600 | [diff] [blame] | 376 | #print ("Header enum is %s" % (enum)) |
Tobin Ehlis | 3c37fb3 | 2017-05-24 09:31:13 -0600 | [diff] [blame^] | 377 | #enum_value = int(enum.split('_')[-1]) |
| 378 | # TMP: Use updated value |
| 379 | vuid_str = self.val_error_dict[enum]['vuid_string'] |
| 380 | if vuid_str in self.json_db: |
| 381 | enum_value = self.json_db[vuid_str]['number_vuid'] |
| 382 | else: |
| 383 | enum_value = vuid_mapping.convertVUID(vuid_str) |
| 384 | new_enum = "%s%s" % (validation_error_enum_name, get8digithex(enum_value)) |
| 385 | enum_decl.append(' %s = 0x%s,' % (new_enum, get8digithex(enum_value))) |
| 386 | error_string_map.append(' {%s, "%s"},' % (new_enum, self.val_error_dict[enum]['error_msg'])) |
| 387 | max_enum_val = max(max_enum_val, enum_value) |
| 388 | #enum_decl.append(' %sMAX_ENUM = %d,' % (validation_error_enum_name, enum_value + 1)) |
| 389 | enum_decl.append(' %sMAX_ENUM = %d,' % (validation_error_enum_name, max_enum_val + 1)) |
Tobin Ehlis | 5ade069 | 2016-10-05 17:18:15 -0600 | [diff] [blame] | 390 | enum_decl.append('};') |
Tobin Ehlis | e7560e7 | 2016-10-19 15:59:38 -0600 | [diff] [blame] | 391 | error_string_map.append('};\n') |
Tobin Ehlis | 5ade069 | 2016-10-05 17:18:15 -0600 | [diff] [blame] | 392 | file_contents.extend(enum_decl) |
| 393 | file_contents.append('\n// Mapping from unique validation error enum to the corresponding error message') |
| 394 | file_contents.append('// The error message should be appended to the end of a custom error message that is passed') |
| 395 | file_contents.append('// as the pMessage parameter to the PFN_vkDebugReportCallbackEXT function') |
| 396 | file_contents.extend(error_string_map) |
Tobin Ehlis | 3198ba3 | 2017-04-19 17:30:52 -0600 | [diff] [blame] | 397 | #print ("File contents: %s" % (file_contents)) |
Tobin Ehlis | 5ade069 | 2016-10-05 17:18:15 -0600 | [diff] [blame] | 398 | with open(header_file, "w") as outfile: |
| 399 | outfile.write("\n".join(file_contents)) |
| 400 | def analyze(self): |
| 401 | """Print out some stats on the valid usage dict""" |
| 402 | # Create dict for # of occurences of identical strings |
| 403 | str_count_dict = {} |
| 404 | unique_id_count = 0 |
| 405 | for enum in self.val_error_dict: |
Tobin Ehlis | e7560e7 | 2016-10-19 15:59:38 -0600 | [diff] [blame] | 406 | err_str = self.val_error_dict[enum]['error_msg'] |
Tobin Ehlis | 5ade069 | 2016-10-05 17:18:15 -0600 | [diff] [blame] | 407 | if err_str in str_count_dict: |
Tobin Ehlis | 3198ba3 | 2017-04-19 17:30:52 -0600 | [diff] [blame] | 408 | print ("Found repeat error string") |
Tobin Ehlis | 5ade069 | 2016-10-05 17:18:15 -0600 | [diff] [blame] | 409 | str_count_dict[err_str] = str_count_dict[err_str] + 1 |
| 410 | else: |
| 411 | str_count_dict[err_str] = 1 |
| 412 | unique_id_count = unique_id_count + 1 |
Tobin Ehlis | 3198ba3 | 2017-04-19 17:30:52 -0600 | [diff] [blame] | 413 | print ("Processed %d unique_ids" % (unique_id_count)) |
Tobin Ehlis | 5ade069 | 2016-10-05 17:18:15 -0600 | [diff] [blame] | 414 | repeat_string = 0 |
| 415 | for es in str_count_dict: |
| 416 | if str_count_dict[es] > 1: |
| 417 | repeat_string = repeat_string + 1 |
Tobin Ehlis | 3198ba3 | 2017-04-19 17:30:52 -0600 | [diff] [blame] | 418 | print ("String '%s' repeated %d times" % (es, repeat_string)) |
| 419 | print ("Found %d repeat strings" % (repeat_string)) |
| 420 | print ("Found %d implicit checks" % (self.implicit_count)) |
Tobin Ehlis | 5ade069 | 2016-10-05 17:18:15 -0600 | [diff] [blame] | 421 | def genDB(self, db_file): |
Tobin Ehlis | 828781a | 2017-05-23 15:23:40 -0600 | [diff] [blame] | 422 | """Generate a database of check_enum, check_coded?, testname, API, VUID_string, core|ext, error_string, notes""" |
Tobin Ehlis | 5ade069 | 2016-10-05 17:18:15 -0600 | [diff] [blame] | 423 | db_lines = [] |
| 424 | # Write header for database file |
| 425 | db_lines.append("# This is a database file with validation error check information") |
| 426 | db_lines.append("# Comments are denoted with '#' char") |
| 427 | db_lines.append("# The format of the lines is:") |
Tobin Ehlis | 828781a | 2017-05-23 15:23:40 -0600 | [diff] [blame] | 428 | db_lines.append("# <error_enum>%s<check_implemented>%s<testname>%s<api>%s<vuid_string>%s<core|ext>%s<errormsg>%s<note>" % (self.delimiter, self.delimiter, self.delimiter, self.delimiter, self.delimiter, self.delimiter, self.delimiter)) |
Mark Lobodzinski | 629d47b | 2016-10-18 13:34:58 -0600 | [diff] [blame] | 429 | db_lines.append("# error_enum: Unique error enum for this check of format %s<uniqueid>" % validation_error_enum_name) |
Mike Weiblen | fe18612 | 2017-02-03 12:44:53 -0700 | [diff] [blame] | 430 | db_lines.append("# check_implemented: 'Y' if check has been implemented in layers, or 'N' for not implemented") |
Dave Houlton | 14f7e66 | 2017-05-17 13:25:53 -0600 | [diff] [blame] | 431 | db_lines.append("# testname: Name of validation test for this check, 'Unknown' for unknown, 'None' if not implemented, or 'NotTestable' if cannot be implemented") |
Tobin Ehlis | e7560e7 | 2016-10-19 15:59:38 -0600 | [diff] [blame] | 432 | db_lines.append("# api: Vulkan API function that this check is related to") |
Tobin Ehlis | 2c93213 | 2017-05-19 16:32:15 -0600 | [diff] [blame] | 433 | db_lines.append("# vuid_string: Unique string to identify this check") |
Tobin Ehlis | 828781a | 2017-05-23 15:23:40 -0600 | [diff] [blame] | 434 | db_lines.append("# core|ext: Either 'core' for core spec or some extension string that indicates the extension required for this VU to be relevant") |
Tobin Ehlis | 5ade069 | 2016-10-05 17:18:15 -0600 | [diff] [blame] | 435 | db_lines.append("# errormsg: The unique error message for this check that includes spec language and link") |
Tobin Ehlis | e7560e7 | 2016-10-19 15:59:38 -0600 | [diff] [blame] | 436 | db_lines.append("# note: Free txt field with any custom notes related to the check in question") |
Tobin Ehlis | 5ade069 | 2016-10-05 17:18:15 -0600 | [diff] [blame] | 437 | for enum in sorted(self.val_error_dict): |
Mike Weiblen | fe18612 | 2017-02-03 12:44:53 -0700 | [diff] [blame] | 438 | # Default check/test implementation status to N/Unknown, then update below if appropriate |
| 439 | implemented = 'N' |
Tobin Ehlis | 5ade069 | 2016-10-05 17:18:15 -0600 | [diff] [blame] | 440 | testname = 'Unknown' |
Tobin Ehlis | 70980c0 | 2016-10-25 14:00:20 -0600 | [diff] [blame] | 441 | note = '' |
Tobin Ehlis | 828781a | 2017-05-23 15:23:40 -0600 | [diff] [blame] | 442 | core_ext = 'core' |
Tobin Ehlis | 2a176b1 | 2017-01-11 16:18:20 -0700 | [diff] [blame] | 443 | implicit = self.val_error_dict[enum]['implicit'] |
Tobin Ehlis | 5ade069 | 2016-10-05 17:18:15 -0600 | [diff] [blame] | 444 | # If we have an existing db entry for this enum, use its implemented/testname values |
| 445 | if enum in self.error_db_dict: |
| 446 | implemented = self.error_db_dict[enum]['check_implemented'] |
| 447 | testname = self.error_db_dict[enum]['testname'] |
Tobin Ehlis | 70980c0 | 2016-10-25 14:00:20 -0600 | [diff] [blame] | 448 | note = self.error_db_dict[enum]['note'] |
Tobin Ehlis | 828781a | 2017-05-23 15:23:40 -0600 | [diff] [blame] | 449 | core_ext = self.error_db_dict[enum]['ext'] |
| 450 | self.val_error_dict[enum]['vuid_string'] = self.error_db_dict[enum]['vuid_string'] |
Tobin Ehlis | 2a176b1 | 2017-01-11 16:18:20 -0700 | [diff] [blame] | 451 | if implicit and 'implicit' not in note: # add implicit note |
| 452 | if '' != note: |
| 453 | note = "implicit, %s" % (note) |
| 454 | else: |
| 455 | note = "implicit" |
Tobin Ehlis | 3198ba3 | 2017-04-19 17:30:52 -0600 | [diff] [blame] | 456 | #print ("delimiter: %s, id: %s, str: %s" % (self.delimiter, enum, self.val_error_dict[enum]) |
Tobin Ehlis | 5ade069 | 2016-10-05 17:18:15 -0600 | [diff] [blame] | 457 | # No existing entry so default to N for implemented and None for testname |
Tobin Ehlis | 828781a | 2017-05-23 15:23:40 -0600 | [diff] [blame] | 458 | db_lines.append("%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s" % (enum, self.delimiter, implemented, self.delimiter, testname, self.delimiter, self.val_error_dict[enum]['api'], self.delimiter, self.val_error_dict[enum]['vuid_string'], self.delimiter, core_ext, self.delimiter, self.val_error_dict[enum]['error_msg'], self.delimiter, note)) |
Tobin Ehlis | af75f7c | 2016-10-31 11:10:38 -0600 | [diff] [blame] | 459 | db_lines.append("\n") # newline at end of file |
Tobin Ehlis | 3198ba3 | 2017-04-19 17:30:52 -0600 | [diff] [blame] | 460 | print ("Generating database file %s" % (db_file)) |
Tobin Ehlis | 5ade069 | 2016-10-05 17:18:15 -0600 | [diff] [blame] | 461 | with open(db_file, "w") as outfile: |
| 462 | outfile.write("\n".join(db_lines)) |
| 463 | def readDB(self, db_file): |
Tobin Ehlis | 2c93213 | 2017-05-19 16:32:15 -0600 | [diff] [blame] | 464 | """Read a db file into a dict, refer to genDB function above for format of each line""" |
Tobin Ehlis | 5ade069 | 2016-10-05 17:18:15 -0600 | [diff] [blame] | 465 | db_dict = {} # This is a simple db of just enum->errormsg, the same as is created from spec |
| 466 | max_id = 0 |
| 467 | with open(db_file, "r") as infile: |
| 468 | for line in infile: |
Tobin Ehlis | 5ade069 | 2016-10-05 17:18:15 -0600 | [diff] [blame] | 469 | line = line.strip() |
Tobin Ehlis | f4245cb | 2016-10-31 07:55:19 -0600 | [diff] [blame] | 470 | if line.startswith('#') or '' == line: |
| 471 | continue |
Tobin Ehlis | 5ade069 | 2016-10-05 17:18:15 -0600 | [diff] [blame] | 472 | db_line = line.split(self.delimiter) |
Tobin Ehlis | 828781a | 2017-05-23 15:23:40 -0600 | [diff] [blame] | 473 | if len(db_line) != 8: |
| 474 | print ("ERROR: Bad database line doesn't have 8 elements: %s" % (line)) |
Tobin Ehlis | 5ade069 | 2016-10-05 17:18:15 -0600 | [diff] [blame] | 475 | error_enum = db_line[0] |
| 476 | implemented = db_line[1] |
| 477 | testname = db_line[2] |
Tobin Ehlis | 70980c0 | 2016-10-25 14:00:20 -0600 | [diff] [blame] | 478 | api = db_line[3] |
Tobin Ehlis | 828781a | 2017-05-23 15:23:40 -0600 | [diff] [blame] | 479 | vuid_str = db_line[4] |
| 480 | core_ext = db_line[5] |
| 481 | error_str = db_line[6] |
| 482 | note = db_line[7] |
Tobin Ehlis | 5ade069 | 2016-10-05 17:18:15 -0600 | [diff] [blame] | 483 | db_dict[error_enum] = error_str |
| 484 | # Also read complete database contents into our class var for later use |
| 485 | self.error_db_dict[error_enum] = {} |
| 486 | self.error_db_dict[error_enum]['check_implemented'] = implemented |
| 487 | self.error_db_dict[error_enum]['testname'] = testname |
Tobin Ehlis | 70980c0 | 2016-10-25 14:00:20 -0600 | [diff] [blame] | 488 | self.error_db_dict[error_enum]['api'] = api |
Tobin Ehlis | 828781a | 2017-05-23 15:23:40 -0600 | [diff] [blame] | 489 | self.error_db_dict[error_enum]['vuid_string'] = vuid_str |
| 490 | self.error_db_dict[error_enum]['core_ext'] = core_ext |
| 491 | self.error_db_dict[error_enum]['error_msg'] = error_str |
Tobin Ehlis | 70980c0 | 2016-10-25 14:00:20 -0600 | [diff] [blame] | 492 | self.error_db_dict[error_enum]['note'] = note |
Tobin Ehlis | 5ade069 | 2016-10-05 17:18:15 -0600 | [diff] [blame] | 493 | unique_id = int(db_line[0].split('_')[-1]) |
| 494 | if unique_id > max_id: |
| 495 | max_id = unique_id |
| 496 | return (db_dict, max_id) |
Tobin Ehlis | a55b1d4 | 2017-04-04 12:23:48 -0600 | [diff] [blame] | 497 | # This is a helper function to do bookkeeping on data structs when comparing original |
| 498 | # error ids to current error ids |
| 499 | # It tracks all updated enums in mapped_enums and removes those enums from any lists |
| 500 | # in the no_link and core dicts |
| 501 | def _updateMappedEnum(self, mapped_enums, enum): |
| 502 | mapped_enums.add(enum) |
| 503 | # When looking for ID to map, we favor sequences so track last ID mapped |
| 504 | self.last_mapped_id = int(enum.split('_')[-1]) |
| 505 | for msg in self.orig_no_link_msg_dict: |
| 506 | if enum in self.orig_no_link_msg_dict[msg]: |
| 507 | self.orig_no_link_msg_dict[msg].remove(enum) |
| 508 | for msg in self.orig_core_msg_dict: |
| 509 | if enum in self.orig_core_msg_dict[msg]: |
| 510 | self.orig_core_msg_dict[msg].remove(enum) |
| 511 | return mapped_enums |
| 512 | # Check all ids in given id list to see if one is in sequence from last mapped id |
| 513 | def findSeqID(self, id_list): |
| 514 | next_seq_id = self.last_mapped_id + 1 |
| 515 | for map_id in id_list: |
| 516 | id_num = int(map_id.split('_')[-1]) |
| 517 | if id_num == next_seq_id: |
| 518 | return True |
| 519 | return False |
| 520 | # Use the next ID in sequence. This should only be called if findSeqID() just returned True |
| 521 | def useSeqID(self, id_list, mapped_enums): |
| 522 | next_seq_id = self.last_mapped_id + 1 |
| 523 | mapped_id = '' |
| 524 | for map_id in id_list: |
| 525 | id_num = int(map_id.split('_')[-1]) |
| 526 | if id_num == next_seq_id: |
| 527 | mapped_id = map_id |
| 528 | self._updateMappedEnum(mapped_enums, mapped_id) |
| 529 | return (mapped_enums, mapped_id) |
| 530 | return (mapped_enums, mapped_id) |
Tobin Ehlis | 5ade069 | 2016-10-05 17:18:15 -0600 | [diff] [blame] | 531 | # Compare unique ids from original database to data generated from updated spec |
Tobin Ehlis | a55b1d4 | 2017-04-04 12:23:48 -0600 | [diff] [blame] | 532 | # First, make 3 separate mappings of original error messages: |
| 533 | # 1. Map the full error message to its id. There should only be 1 ID per full message (orig_full_msg_dict) |
| 534 | # 2. Map the intial portion of the message w/o link to list of IDs. There May be a little aliasing here (orig_no_link_msg_dict) |
| 535 | # 3. Map the core spec message w/o link or section info to list of IDs. There will be lots of aliasing here (orig_core_msg_dict) |
| 536 | # Also store a set of all IDs that have been mapped to that will serve 2 purposes: |
| 537 | # 1. Pull IDs out of the above dicts as they're remapped since we know they won't be used |
| 538 | # 2. Make sure that we don't re-use an ID |
| 539 | # The general algorithm for remapping from new IDs to old IDs is: |
| 540 | # 1. If there is a user-specified remapping, use that above all else |
| 541 | # 2. Elif the new error message hits in orig_full_msg_dict then use that ID |
| 542 | # 3. Elif the new error message hits orig_no_link_msg_dict then |
| 543 | # a. If only a single ID, use it |
| 544 | # b. Elif multiple IDs & one matches last used ID in sequence, use it |
| 545 | # c. Else assign a new ID and flag for manual remapping |
| 546 | # 4. Elif the new error message hits orig_core_msg_dict then |
| 547 | # a. If only a single ID, use it |
| 548 | # b. Elif multiple IDs & one matches last used ID in sequence, use it |
| 549 | # c. Else assign a new ID and flag for manual remapping |
| 550 | # 5. Else - No matches use a new ID |
Tobin Ehlis | e7560e7 | 2016-10-19 15:59:38 -0600 | [diff] [blame] | 551 | def compareDB(self, orig_error_msg_dict, max_id): |
Tobin Ehlis | 5ade069 | 2016-10-05 17:18:15 -0600 | [diff] [blame] | 552 | """Compare orig database dict to new dict, report out findings, and return potential new dict for parsed spec""" |
| 553 | # First create reverse dicts of err_strings to IDs |
| 554 | next_id = max_id + 1 |
Tobin Ehlis | a55b1d4 | 2017-04-04 12:23:48 -0600 | [diff] [blame] | 555 | ids_parsed = 0 |
| 556 | mapped_enums = set() # store all enums that have been mapped to avoid re-use |
Tobin Ehlis | 5ade069 | 2016-10-05 17:18:15 -0600 | [diff] [blame] | 557 | # Create an updated dict in-place that will be assigned to self.val_error_dict when done |
| 558 | updated_val_error_dict = {} |
Tobin Ehlis | a55b1d4 | 2017-04-04 12:23:48 -0600 | [diff] [blame] | 559 | # Create a few separate mappings of error msg formats to associated ID(s) |
Tobin Ehlis | e7560e7 | 2016-10-19 15:59:38 -0600 | [diff] [blame] | 560 | for enum in orig_error_msg_dict: |
Tobin Ehlis | a55b1d4 | 2017-04-04 12:23:48 -0600 | [diff] [blame] | 561 | api = self.error_db_dict[enum]['api'] |
| 562 | original_full_msg = orig_error_msg_dict[enum] |
| 563 | orig_no_link_msg = "%s,%s" % (api, original_full_msg.split('(https', 1)[0]) |
| 564 | orig_core_msg = "%s,%s" % (api, orig_no_link_msg.split(' which states ', 1)[-1]) |
| 565 | orig_core_msg_period = "%s.' " % (orig_core_msg[:-2]) |
Tobin Ehlis | 3198ba3 | 2017-04-19 17:30:52 -0600 | [diff] [blame] | 566 | print ("Orig core msg:%s\nOrig cw/o per:%s" % (orig_core_msg, orig_core_msg_period)) |
Tobin Ehlis | a55b1d4 | 2017-04-04 12:23:48 -0600 | [diff] [blame] | 567 | |
| 568 | # First store mapping of full error msg to ID, shouldn't have duplicates |
| 569 | if original_full_msg in self.orig_full_msg_dict: |
Tobin Ehlis | 3198ba3 | 2017-04-19 17:30:52 -0600 | [diff] [blame] | 570 | print ("ERROR: Found duplicate full msg in original full error messages: %s" % (original_full_msg)) |
Tobin Ehlis | a55b1d4 | 2017-04-04 12:23:48 -0600 | [diff] [blame] | 571 | self.orig_full_msg_dict[original_full_msg] = enum |
| 572 | # Now map API,no_link_msg to list of IDs |
| 573 | if orig_no_link_msg in self.orig_no_link_msg_dict: |
| 574 | self.orig_no_link_msg_dict[orig_no_link_msg].append(enum) |
| 575 | else: |
| 576 | self.orig_no_link_msg_dict[orig_no_link_msg] = [enum] |
| 577 | # Finally map API,core_msg to list of IDs |
| 578 | if orig_core_msg in self.orig_core_msg_dict: |
| 579 | self.orig_core_msg_dict[orig_core_msg].append(enum) |
| 580 | else: |
| 581 | self.orig_core_msg_dict[orig_core_msg] = [enum] |
| 582 | if orig_core_msg_period in self.orig_core_msg_dict: |
| 583 | self.orig_core_msg_dict[orig_core_msg_period].append(enum) |
Tobin Ehlis | 3198ba3 | 2017-04-19 17:30:52 -0600 | [diff] [blame] | 584 | print ("Added msg '%s' w/ enum %s to orig_core_msg_dict" % (orig_core_msg_period, enum)) |
Tobin Ehlis | a55b1d4 | 2017-04-04 12:23:48 -0600 | [diff] [blame] | 585 | else: |
Tobin Ehlis | 3198ba3 | 2017-04-19 17:30:52 -0600 | [diff] [blame] | 586 | print ("Added msg '%s' w/ enum %s to orig_core_msg_dict" % (orig_core_msg_period, enum)) |
Tobin Ehlis | a55b1d4 | 2017-04-04 12:23:48 -0600 | [diff] [blame] | 587 | self.orig_core_msg_dict[orig_core_msg_period] = [enum] |
| 588 | # Also capture all enums that have a test and/or implementation |
Dave Houlton | 14f7e66 | 2017-05-17 13:25:53 -0600 | [diff] [blame] | 589 | if self.error_db_dict[enum]['check_implemented'] == 'Y' or self.error_db_dict[enum]['testname'] not in ['None','Unknown','NotTestable']: |
Tobin Ehlis | 3198ba3 | 2017-04-19 17:30:52 -0600 | [diff] [blame] | 590 | print ("Recording %s with implemented value %s and testname %s" % (enum, self.error_db_dict[enum]['check_implemented'], self.error_db_dict[enum]['testname'])) |
Tobin Ehlis | a55b1d4 | 2017-04-04 12:23:48 -0600 | [diff] [blame] | 591 | self.orig_test_imp_enums.add(enum) |
Tobin Ehlis | e7560e7 | 2016-10-19 15:59:38 -0600 | [diff] [blame] | 592 | # Values to be used for the update dict |
| 593 | update_enum = '' |
| 594 | update_msg = '' |
| 595 | update_api = '' |
Tobin Ehlis | 5ade069 | 2016-10-05 17:18:15 -0600 | [diff] [blame] | 596 | # Now parse through new dict and figure out what to do with non-matching things |
| 597 | for enum in sorted(self.val_error_dict): |
| 598 | ids_parsed = ids_parsed + 1 |
| 599 | enum_list = enum.split('_') # grab sections of enum for use below |
Tobin Ehlis | e7560e7 | 2016-10-19 15:59:38 -0600 | [diff] [blame] | 600 | # Default update values to be the same |
| 601 | update_enum = enum |
| 602 | update_msg = self.val_error_dict[enum]['error_msg'] |
| 603 | update_api = self.val_error_dict[enum]['api'] |
Tobin Ehlis | 2a176b1 | 2017-01-11 16:18:20 -0700 | [diff] [blame] | 604 | implicit = self.val_error_dict[enum]['implicit'] |
Tobin Ehlis | 2c93213 | 2017-05-19 16:32:15 -0600 | [diff] [blame] | 605 | vuid_string = self.val_error_dict[enum]['vuid_string'] |
Tobin Ehlis | a55b1d4 | 2017-04-04 12:23:48 -0600 | [diff] [blame] | 606 | new_full_msg = update_msg |
| 607 | new_no_link_msg = "%s,%s" % (update_api, new_full_msg.split('(https', 1)[0]) |
| 608 | new_core_msg = "%s,%s" % (update_api, new_no_link_msg.split(' which states ', 1)[-1]) |
Tobin Ehlis | bd0a9c6 | 2016-10-14 18:06:16 -0600 | [diff] [blame] | 609 | # Any user-forced remap takes precendence |
| 610 | if enum_list[-1] in remap_dict: |
| 611 | enum_list[-1] = remap_dict[enum_list[-1]] |
Tobin Ehlis | a55b1d4 | 2017-04-04 12:23:48 -0600 | [diff] [blame] | 612 | self.last_mapped_id = int(enum_list[-1]) |
Tobin Ehlis | bd0a9c6 | 2016-10-14 18:06:16 -0600 | [diff] [blame] | 613 | new_enum = "_".join(enum_list) |
Tobin Ehlis | 3198ba3 | 2017-04-19 17:30:52 -0600 | [diff] [blame] | 614 | print ("NOTE: Using user-supplied remap to force %s to be %s" % (enum, new_enum)) |
| 615 | mapped_enums = self._updateMappedEnum(mapped_enums, new_enum) |
Tobin Ehlis | e7560e7 | 2016-10-19 15:59:38 -0600 | [diff] [blame] | 616 | update_enum = new_enum |
Tobin Ehlis | a55b1d4 | 2017-04-04 12:23:48 -0600 | [diff] [blame] | 617 | elif new_full_msg in self.orig_full_msg_dict: |
| 618 | orig_enum = self.orig_full_msg_dict[new_full_msg] |
Tobin Ehlis | 3198ba3 | 2017-04-19 17:30:52 -0600 | [diff] [blame] | 619 | print ("Found exact match for full error msg so switching new ID %s to original ID %s" % (enum, orig_enum)) |
Tobin Ehlis | a55b1d4 | 2017-04-04 12:23:48 -0600 | [diff] [blame] | 620 | mapped_enums = self._updateMappedEnum(mapped_enums, orig_enum) |
| 621 | update_enum = orig_enum |
| 622 | elif new_no_link_msg in self.orig_no_link_msg_dict: |
| 623 | # Try to get single ID to map to from no_link matches |
| 624 | if len(self.orig_no_link_msg_dict[new_no_link_msg]) == 1: # Only 1 id, use it! |
| 625 | orig_enum = self.orig_no_link_msg_dict[new_no_link_msg][0] |
Tobin Ehlis | 3198ba3 | 2017-04-19 17:30:52 -0600 | [diff] [blame] | 626 | print ("Found no-link err msg match w/ only 1 ID match so switching new ID %s to original ID %s" % (enum, orig_enum)) |
Tobin Ehlis | a55b1d4 | 2017-04-04 12:23:48 -0600 | [diff] [blame] | 627 | mapped_enums = self._updateMappedEnum(mapped_enums, orig_enum) |
| 628 | update_enum = orig_enum |
Tobin Ehlis | 5ade069 | 2016-10-05 17:18:15 -0600 | [diff] [blame] | 629 | else: |
Tobin Ehlis | a55b1d4 | 2017-04-04 12:23:48 -0600 | [diff] [blame] | 630 | if self.findSeqID(self.orig_no_link_msg_dict[new_no_link_msg]): # If we have an id in sequence, use it! |
| 631 | (mapped_enums, update_enum) = self.useSeqID(self.orig_no_link_msg_dict[new_no_link_msg], mapped_enums) |
Tobin Ehlis | 3198ba3 | 2017-04-19 17:30:52 -0600 | [diff] [blame] | 632 | print ("Found no-link err msg match w/ seq ID match so switching new ID %s to original ID %s" % (enum, update_enum)) |
Tobin Ehlis | 5ade069 | 2016-10-05 17:18:15 -0600 | [diff] [blame] | 633 | else: |
| 634 | enum_list[-1] = "%05d" % (next_id) |
| 635 | new_enum = "_".join(enum_list) |
| 636 | next_id = next_id + 1 |
Tobin Ehlis | 3198ba3 | 2017-04-19 17:30:52 -0600 | [diff] [blame] | 637 | print ("Found no-link msg match but have multiple matched IDs w/o a sequence ID, updating ID %s to unique ID %s for msg %s" % (enum, new_enum, new_no_link_msg)) |
Tobin Ehlis | e7560e7 | 2016-10-19 15:59:38 -0600 | [diff] [blame] | 638 | update_enum = new_enum |
Tobin Ehlis | a55b1d4 | 2017-04-04 12:23:48 -0600 | [diff] [blame] | 639 | elif new_core_msg in self.orig_core_msg_dict: |
| 640 | # Do similar stuff here |
| 641 | if len(self.orig_core_msg_dict[new_core_msg]) == 1: |
| 642 | orig_enum = self.orig_core_msg_dict[new_core_msg][0] |
Tobin Ehlis | 3198ba3 | 2017-04-19 17:30:52 -0600 | [diff] [blame] | 643 | print ("Found core err msg match w/ only 1 ID match so switching new ID %s to original ID %s" % (enum, orig_enum)) |
Tobin Ehlis | a55b1d4 | 2017-04-04 12:23:48 -0600 | [diff] [blame] | 644 | mapped_enums = self._updateMappedEnum(mapped_enums, orig_enum) |
| 645 | update_enum = orig_enum |
Tobin Ehlis | 5ade069 | 2016-10-05 17:18:15 -0600 | [diff] [blame] | 646 | else: |
Tobin Ehlis | a55b1d4 | 2017-04-04 12:23:48 -0600 | [diff] [blame] | 647 | if self.findSeqID(self.orig_core_msg_dict[new_core_msg]): |
| 648 | (mapped_enums, update_enum) = self.useSeqID(self.orig_core_msg_dict[new_core_msg], mapped_enums) |
Tobin Ehlis | 3198ba3 | 2017-04-19 17:30:52 -0600 | [diff] [blame] | 649 | print ("Found core err msg match w/ seq ID match so switching new ID %s to original ID %s" % (enum, update_enum)) |
Tobin Ehlis | a55b1d4 | 2017-04-04 12:23:48 -0600 | [diff] [blame] | 650 | else: |
| 651 | enum_list[-1] = "%05d" % (next_id) |
| 652 | new_enum = "_".join(enum_list) |
| 653 | next_id = next_id + 1 |
Tobin Ehlis | 3198ba3 | 2017-04-19 17:30:52 -0600 | [diff] [blame] | 654 | print ("Found core msg match but have multiple matched IDs w/o a sequence ID, updating ID %s to unique ID %s for msg %s" % (enum, new_enum, new_no_link_msg)) |
Tobin Ehlis | a55b1d4 | 2017-04-04 12:23:48 -0600 | [diff] [blame] | 655 | update_enum = new_enum |
| 656 | # This seems to be a new error so need to pick it up from end of original unique ids & flag for review |
| 657 | else: |
| 658 | enum_list[-1] = "%05d" % (next_id) |
| 659 | new_enum = "_".join(enum_list) |
| 660 | next_id = next_id + 1 |
Tobin Ehlis | 3198ba3 | 2017-04-19 17:30:52 -0600 | [diff] [blame] | 661 | print ("Completely new id and error code, update new id from %s to unique %s for core message:%s" % (enum, new_enum, new_core_msg)) |
Tobin Ehlis | a55b1d4 | 2017-04-04 12:23:48 -0600 | [diff] [blame] | 662 | update_enum = new_enum |
Tobin Ehlis | 3198ba3 | 2017-04-19 17:30:52 -0600 | [diff] [blame] | 663 | if update_enum in updated_val_error_dict: |
| 664 | print ("ERROR: About to OVERWRITE entry for %s" % update_enum) |
Tobin Ehlis | e7560e7 | 2016-10-19 15:59:38 -0600 | [diff] [blame] | 665 | updated_val_error_dict[update_enum] = {} |
| 666 | updated_val_error_dict[update_enum]['error_msg'] = update_msg |
| 667 | updated_val_error_dict[update_enum]['api'] = update_api |
Tobin Ehlis | 2a176b1 | 2017-01-11 16:18:20 -0700 | [diff] [blame] | 668 | updated_val_error_dict[update_enum]['implicit'] = implicit |
Tobin Ehlis | 2c93213 | 2017-05-19 16:32:15 -0600 | [diff] [blame] | 669 | updated_val_error_dict[update_enum]['vuid_string'] = vuid_string |
Tobin Ehlis | a55b1d4 | 2017-04-04 12:23:48 -0600 | [diff] [blame] | 670 | # Assign parsed dict to be the updated dict based on db compare |
Tobin Ehlis | 3198ba3 | 2017-04-19 17:30:52 -0600 | [diff] [blame] | 671 | print ("In compareDB parsed %d entries" % (ids_parsed)) |
Tobin Ehlis | 5ade069 | 2016-10-05 17:18:15 -0600 | [diff] [blame] | 672 | return updated_val_error_dict |
Tobin Ehlis | a55b1d4 | 2017-04-04 12:23:48 -0600 | [diff] [blame] | 673 | |
Tobin Ehlis | 3c37fb3 | 2017-05-24 09:31:13 -0600 | [diff] [blame^] | 674 | def migrateIDs(self): |
| 675 | """Using the error db dict map from old IDs to new IDs and then update source w/ the mappings""" |
| 676 | #First create a mapping of old ID to new ID |
| 677 | old_to_new_id_map = {} |
| 678 | new_to_old_map = {} # Reverse sort according to new IDs to avoid start of a new ID aliasing an old ID and getting replaced |
| 679 | for enum in self.error_db_dict: |
| 680 | vuid_string = self.error_db_dict[enum]['vuid_string'] |
| 681 | if vuid_string in self.json_db: |
| 682 | new_enum = "%s%s" % (validation_error_enum_name, get8digithex(self.json_db[vuid_string]['number_vuid'])) |
| 683 | else: |
| 684 | new_enum = "%s%s" % (validation_error_enum_name, get8digithex(vuid_mapping.convertVUID(vuid_string))) |
| 685 | old_to_new_id_map[enum] = new_enum |
| 686 | new_to_old_map[new_enum] = enum |
| 687 | for enum in sorted(old_to_new_id_map): |
| 688 | print ("ID mapping old:new = %s:%s" % (enum, old_to_new_id_map[enum])) |
| 689 | # Create a data struct of old->new ids based on new ids so we don't double-replace any ids |
| 690 | #new_id_sorted_list = sorted(old_to_new_id_map, key=operator.itemgetter(1)) |
| 691 | #print ("NEW ID SORTED LIST:\n%s" % (new_id_sorted_list)) |
| 692 | #sys.exit() |
| 693 | layer_source_files = ['vk_validation_stats.py','../tests/layer_validation_tests.cpp','swapchain.cpp','core_validation.cpp','core_validation_types.h','descriptor_sets.h','descriptor_sets.cpp','parameter_validation.cpp','unique_objects.cpp','object_tracker.cpp','buffer_validation.h','buffer_validation.cpp'] |
| 694 | # For each file in source file list |
| 695 | for source_file in layer_source_files: |
| 696 | with open(source_file, 'r+') as f: |
| 697 | # Read in the file |
| 698 | src_txt = f.read() |
| 699 | # For each old id, replace it with the new id |
| 700 | for n_enum in reversed(sorted(new_to_old_map)): |
| 701 | src_txt = re.sub(new_to_old_map[n_enum], n_enum, src_txt) |
| 702 | f.seek(0) |
| 703 | # Write out file if there were updates |
| 704 | f.write(src_txt) |
| 705 | f.truncate() |
Tobin Ehlis | 5ade069 | 2016-10-05 17:18:15 -0600 | [diff] [blame] | 706 | def validateUpdateDict(self, update_dict): |
| 707 | """Compare original dict vs. update dict and make sure that all of the checks are still there""" |
| 708 | # Currently just make sure that the same # of checks as the original checks are there |
| 709 | #orig_ids = {} |
| 710 | orig_id_count = len(self.val_error_dict) |
| 711 | #update_ids = {} |
| 712 | update_id_count = len(update_dict) |
| 713 | if orig_id_count != update_id_count: |
Tobin Ehlis | 3198ba3 | 2017-04-19 17:30:52 -0600 | [diff] [blame] | 714 | print ("Original dict had %d unique_ids, but updated dict has %d!" % (orig_id_count, update_id_count)) |
Tobin Ehlis | 5ade069 | 2016-10-05 17:18:15 -0600 | [diff] [blame] | 715 | return False |
Tobin Ehlis | 3198ba3 | 2017-04-19 17:30:52 -0600 | [diff] [blame] | 716 | print ("Original dict and updated dict both have %d unique_ids. Great!" % (orig_id_count)) |
Tobin Ehlis | a55b1d4 | 2017-04-04 12:23:48 -0600 | [diff] [blame] | 717 | # Now flag any original dict enums that had tests and/or checks that are missing from updated |
| 718 | for enum in update_dict: |
| 719 | if enum in self.orig_test_imp_enums: |
| 720 | self.orig_test_imp_enums.remove(enum) |
| 721 | if len(self.orig_test_imp_enums) > 0: |
Tobin Ehlis | 3198ba3 | 2017-04-19 17:30:52 -0600 | [diff] [blame] | 722 | print ("TODO: Have some enums with tests and/or checks implemented that are missing in update:") |
Tobin Ehlis | a55b1d4 | 2017-04-04 12:23:48 -0600 | [diff] [blame] | 723 | for enum in sorted(self.orig_test_imp_enums): |
Tobin Ehlis | 3198ba3 | 2017-04-19 17:30:52 -0600 | [diff] [blame] | 724 | print ("\t%s") % enum |
Tobin Ehlis | 5ade069 | 2016-10-05 17:18:15 -0600 | [diff] [blame] | 725 | return True |
| 726 | # TODO : include some more analysis |
| 727 | |
| 728 | # User passes in arg of form <new_id1>-<old_id1>[,count1]:<new_id2>-<old_id2>[,count2]:... |
| 729 | # new_id# = the new enum id that was assigned to an error |
| 730 | # old_id# = the previous enum id that was assigned to the same error |
| 731 | # [,count#] = The number of ids to remap starting at new_id#=old_id# and ending at new_id[#+count#-1]=old_id[#+count#-1] |
| 732 | # If not supplied, then ,1 is assumed, which will only update a single id |
| 733 | def updateRemapDict(remap_string): |
| 734 | """Set up global remap_dict based on user input""" |
| 735 | remap_list = remap_string.split(":") |
| 736 | for rmap in remap_list: |
| 737 | count = 1 # Default count if none supplied |
| 738 | id_count_list = rmap.split(',') |
| 739 | if len(id_count_list) > 1: |
| 740 | count = int(id_count_list[1]) |
| 741 | new_old_id_list = id_count_list[0].split('-') |
| 742 | for offset in range(count): |
| 743 | remap_dict["%05d" % (int(new_old_id_list[0]) + offset)] = "%05d" % (int(new_old_id_list[1]) + offset) |
| 744 | for new_id in sorted(remap_dict): |
Tobin Ehlis | 3198ba3 | 2017-04-19 17:30:52 -0600 | [diff] [blame] | 745 | print ("Set to remap new id %s to old id %s" % (new_id, remap_dict[new_id])) |
Tobin Ehlis | 5ade069 | 2016-10-05 17:18:15 -0600 | [diff] [blame] | 746 | |
| 747 | if __name__ == "__main__": |
| 748 | i = 1 |
| 749 | use_online = True # Attempt to grab spec from online by default |
| 750 | update_option = False |
| 751 | while (i < len(sys.argv)): |
| 752 | arg = sys.argv[i] |
| 753 | i = i + 1 |
| 754 | if (arg == '-spec'): |
| 755 | spec_filename = sys.argv[i] |
| 756 | # If user specifies local specfile, skip online |
| 757 | use_online = False |
| 758 | i = i + 1 |
Tobin Ehlis | 828781a | 2017-05-23 15:23:40 -0600 | [diff] [blame] | 759 | elif (arg == '-json-file'): |
Tobin Ehlis | 98d109a | 2017-05-11 14:42:38 -0600 | [diff] [blame] | 760 | json_filename = sys.argv[i] |
| 761 | i = i + 1 |
Tobin Ehlis | 828781a | 2017-05-23 15:23:40 -0600 | [diff] [blame] | 762 | elif (arg == '-json'): |
| 763 | read_json = True |
Tobin Ehlis | 2c93213 | 2017-05-19 16:32:15 -0600 | [diff] [blame] | 764 | elif (arg == '-json-compare'): |
| 765 | json_compare = True |
Tobin Ehlis | 5ade069 | 2016-10-05 17:18:15 -0600 | [diff] [blame] | 766 | elif (arg == '-out'): |
| 767 | out_filename = sys.argv[i] |
| 768 | i = i + 1 |
| 769 | elif (arg == '-gendb'): |
| 770 | gen_db = True |
| 771 | # Set filename if supplied, else use default |
| 772 | if i < len(sys.argv) and not sys.argv[i].startswith('-'): |
| 773 | db_filename = sys.argv[i] |
| 774 | i = i + 1 |
| 775 | elif (arg == '-compare'): |
| 776 | db_filename = sys.argv[i] |
| 777 | spec_compare = True |
| 778 | i = i + 1 |
| 779 | elif (arg == '-update'): |
| 780 | update_option = True |
| 781 | spec_compare = True |
| 782 | gen_db = True |
| 783 | elif (arg == '-remap'): |
| 784 | updateRemapDict(sys.argv[i]) |
| 785 | i = i + 1 |
Tobin Ehlis | 3c37fb3 | 2017-05-24 09:31:13 -0600 | [diff] [blame^] | 786 | elif (arg == '-migrate'): |
| 787 | migrate_ids = True |
| 788 | read_json = True |
Tobin Ehlis | 5ade069 | 2016-10-05 17:18:15 -0600 | [diff] [blame] | 789 | elif (arg in ['-help', '-h']): |
| 790 | printHelp() |
| 791 | sys.exit() |
| 792 | if len(remap_dict) > 1 and not update_option: |
Tobin Ehlis | 3198ba3 | 2017-04-19 17:30:52 -0600 | [diff] [blame] | 793 | print ("ERROR: '-remap' option can only be used along with '-update' option. Exiting.") |
Tobin Ehlis | 5ade069 | 2016-10-05 17:18:15 -0600 | [diff] [blame] | 794 | sys.exit() |
| 795 | spec = Specification() |
Tobin Ehlis | 2c93213 | 2017-05-19 16:32:15 -0600 | [diff] [blame] | 796 | spec.soupLoadFile(use_online, spec_filename) |
| 797 | spec.analyze() |
Tobin Ehlis | 828781a | 2017-05-23 15:23:40 -0600 | [diff] [blame] | 798 | if read_json: |
| 799 | spec.readJSON() |
Tobin Ehlis | 98d109a | 2017-05-11 14:42:38 -0600 | [diff] [blame] | 800 | spec.parseJSON() |
Tobin Ehlis | 2c93213 | 2017-05-19 16:32:15 -0600 | [diff] [blame] | 801 | #sys.exit() |
| 802 | if (json_compare): |
| 803 | # Read in current spec info from db file |
| 804 | (orig_err_msg_dict, max_id) = spec.readDB(db_filename) |
| 805 | spec.compareJSON() |
| 806 | print ("Found %d missing db entries in json db" % (spec.json_missing)) |
| 807 | print ("Found %d duplicate json entries" % (spec.duplicate_json_key_count)) |
Tobin Ehlis | 828781a | 2017-05-23 15:23:40 -0600 | [diff] [blame] | 808 | spec.genDB("json_vk_validation_error_database.txt") |
Tobin Ehlis | 98d109a | 2017-05-11 14:42:38 -0600 | [diff] [blame] | 809 | sys.exit() |
Tobin Ehlis | 3c37fb3 | 2017-05-24 09:31:13 -0600 | [diff] [blame^] | 810 | if (migrate_ids): |
| 811 | # Updated spec is already read into spec |
| 812 | # Read in existing error ids from database |
| 813 | (db_err_msg_dict, max_id) = spec.readDB(db_filename) |
| 814 | spec.migrateIDs() |
| 815 | spec.val_error_dict = spec.error_db_dict |
| 816 | #spec.genDB(db_filename) |
| 817 | spec.genHeader(out_filename) |
| 818 | sys.exit() |
Tobin Ehlis | 5ade069 | 2016-10-05 17:18:15 -0600 | [diff] [blame] | 819 | if (spec_compare): |
| 820 | # Read in old spec info from db file |
Tobin Ehlis | e7560e7 | 2016-10-19 15:59:38 -0600 | [diff] [blame] | 821 | (orig_err_msg_dict, max_id) = spec.readDB(db_filename) |
Tobin Ehlis | 5ade069 | 2016-10-05 17:18:15 -0600 | [diff] [blame] | 822 | # New spec data should already be read into self.val_error_dict |
Tobin Ehlis | e7560e7 | 2016-10-19 15:59:38 -0600 | [diff] [blame] | 823 | updated_dict = spec.compareDB(orig_err_msg_dict, max_id) |
Tobin Ehlis | 5ade069 | 2016-10-05 17:18:15 -0600 | [diff] [blame] | 824 | update_valid = spec.validateUpdateDict(updated_dict) |
| 825 | if update_valid: |
| 826 | spec.updateDict(updated_dict) |
| 827 | else: |
| 828 | sys.exit() |
| 829 | if (gen_db): |
| 830 | spec.genDB(db_filename) |
Tobin Ehlis | 3198ba3 | 2017-04-19 17:30:52 -0600 | [diff] [blame] | 831 | print ("Writing out file (-out) to '%s'" % (out_filename)) |
Tobin Ehlis | 5ade069 | 2016-10-05 17:18:15 -0600 | [diff] [blame] | 832 | spec.genHeader(out_filename) |
| 833 | |
| 834 | ##### Example dataset |
| 835 | # <div class="sidebar"> |
| 836 | # <div class="titlepage"> |
| 837 | # <div> |
| 838 | # <div> |
| 839 | # <p class="title"> |
| 840 | # <strong>Valid Usage</strong> # When we get to this guy, we know we're under interesting sidebar |
| 841 | # </p> |
| 842 | # </div> |
| 843 | # </div> |
| 844 | # </div> |
| 845 | # <div class="itemizedlist"> |
| 846 | # <ul class="itemizedlist" style="list-style-type: disc; "> |
| 847 | # <li class="listitem"> |
| 848 | # <em class="parameter"> |
| 849 | # <code>device</code> |
| 850 | # </em> |
| 851 | # <span class="normative">must</span> be a valid |
| 852 | # <code class="code">VkDevice</code> handle |
| 853 | # </li> |
| 854 | # <li class="listitem"> |
| 855 | # <em class="parameter"> |
| 856 | # <code>commandPool</code> |
| 857 | # </em> |
| 858 | # <span class="normative">must</span> be a valid |
| 859 | # <code class="code">VkCommandPool</code> handle |
| 860 | # </li> |
| 861 | # <li class="listitem"> |
| 862 | # <em class="parameter"> |
| 863 | # <code>flags</code> |
| 864 | # </em> |
| 865 | # <span class="normative">must</span> be a valid combination of |
| 866 | # <code class="code"> |
| 867 | # <a class="link" href="#VkCommandPoolResetFlagBits">VkCommandPoolResetFlagBits</a> |
| 868 | # </code> values |
| 869 | # </li> |
| 870 | # <li class="listitem"> |
| 871 | # <em class="parameter"> |
| 872 | # <code>commandPool</code> |
| 873 | # </em> |
| 874 | # <span class="normative">must</span> have been created, allocated, or retrieved from |
| 875 | # <em class="parameter"> |
| 876 | # <code>device</code> |
| 877 | # </em> |
| 878 | # </li> |
| 879 | # <li class="listitem">All |
| 880 | # <code class="code">VkCommandBuffer</code> |
| 881 | # objects allocated from |
| 882 | # <em class="parameter"> |
| 883 | # <code>commandPool</code> |
| 884 | # </em> |
| 885 | # <span class="normative">must</span> not currently be pending execution |
| 886 | # </li> |
| 887 | # </ul> |
| 888 | # </div> |
| 889 | # </div> |
| 890 | ##### Second example dataset |
| 891 | # <div class="sidebar"> |
| 892 | # <div class="titlepage"> |
| 893 | # <div> |
| 894 | # <div> |
| 895 | # <p class="title"> |
| 896 | # <strong>Valid Usage</strong> |
| 897 | # </p> |
| 898 | # </div> |
| 899 | # </div> |
| 900 | # </div> |
| 901 | # <div class="itemizedlist"> |
| 902 | # <ul class="itemizedlist" style="list-style-type: disc; "> |
| 903 | # <li class="listitem">The <em class="parameter"><code>queueFamilyIndex</code></em> member of any given element of <em class="parameter"><code>pQueueCreateInfos</code></em> <span class="normative">must</span> be unique within <em class="parameter"><code>pQueueCreateInfos</code></em> |
| 904 | # </li> |
| 905 | # </ul> |
| 906 | # </div> |
| 907 | # </div> |
| 908 | # <div class="sidebar"> |
| 909 | # <div class="titlepage"> |
| 910 | # <div> |
| 911 | # <div> |
| 912 | # <p class="title"> |
| 913 | # <strong>Valid Usage (Implicit)</strong> |
| 914 | # </p> |
| 915 | # </div> |
| 916 | # </div> |
| 917 | # </div> |
| 918 | # <div class="itemizedlist"><ul class="itemizedlist" style="list-style-type: disc; "><li class="listitem"> |
| 919 | #<em class="parameter"><code>sType</code></em> <span class="normative">must</span> be <code class="code">VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO</code> |
| 920 | #</li><li class="listitem"> |
| 921 | #<em class="parameter"><code>pNext</code></em> <span class="normative">must</span> be <code class="literal">NULL</code> |
| 922 | #</li><li class="listitem"> |
| 923 | #<em class="parameter"><code>flags</code></em> <span class="normative">must</span> be <code class="literal">0</code> |
| 924 | #</li><li class="listitem"> |
| 925 | #<em class="parameter"><code>pQueueCreateInfos</code></em> <span class="normative">must</span> be a pointer to an array of <em class="parameter"><code>queueCreateInfoCount</code></em> valid <code class="code">VkDeviceQueueCreateInfo</code> structures |
Mark Lobodzinski | 267a7cf | 2017-01-25 09:33:25 -0700 | [diff] [blame] | 926 | #</li> |