| Karl Schultz | 7b024b4 | 2018-08-30 16:18:18 -0600 | [diff] [blame^] | 1 | /* Copyright (c) 2015-2019 The Khronos Group Inc. |
| 2 | * Copyright (c) 2015-2019 Valve Corporation |
| 3 | * Copyright (c) 2015-2019 LunarG, Inc. |
| 4 | * Copyright (C) 2015-2019 Google Inc. |
| Chris Forbes | 47567b7 | 2017-06-09 12:09:45 -0700 | [diff] [blame] | 5 | * |
| 6 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 7 | * you may not use this file except in compliance with the License. |
| 8 | * You may obtain a copy of the License at |
| 9 | * |
| 10 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 11 | * |
| 12 | * Unless required by applicable law or agreed to in writing, software |
| 13 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 15 | * See the License for the specific language governing permissions and |
| 16 | * limitations under the License. |
| 17 | * |
| 18 | * Author: Chris Forbes <chrisf@ijw.co.nz> |
| 19 | */ |
| 20 | #ifndef VULKAN_SHADER_VALIDATION_H |
| 21 | #define VULKAN_SHADER_VALIDATION_H |
| 22 | |
| Cort Stratton | a81851c | 2017-11-06 19:13:53 -0800 | [diff] [blame] | 23 | #include <spirv_tools_commit_id.h> |
| Mark Lobodzinski | bbaa6a2 | 2018-11-02 12:03:35 -0600 | [diff] [blame] | 24 | #include "spirv-tools/optimizer.hpp" |
| Cort Stratton | a81851c | 2017-11-06 19:13:53 -0800 | [diff] [blame] | 25 | |
| Chris Forbes | 47567b7 | 2017-06-09 12:09:45 -0700 | [diff] [blame] | 26 | // A forward iterator over spirv instructions. Provides easy access to len, opcode, and content words |
| 27 | // without the caller needing to care too much about the physical SPIRV module layout. |
| 28 | struct spirv_inst_iter { |
| 29 | std::vector<uint32_t>::const_iterator zero; |
| 30 | std::vector<uint32_t>::const_iterator it; |
| 31 | |
| 32 | uint32_t len() { |
| 33 | auto result = *it >> 16; |
| 34 | assert(result > 0); |
| 35 | return result; |
| 36 | } |
| 37 | |
| 38 | uint32_t opcode() { return *it & 0x0ffffu; } |
| 39 | |
| 40 | uint32_t const &word(unsigned n) { |
| 41 | assert(n < len()); |
| 42 | return it[n]; |
| 43 | } |
| 44 | |
| 45 | uint32_t offset() { return (uint32_t)(it - zero); } |
| 46 | |
| 47 | spirv_inst_iter() {} |
| 48 | |
| 49 | spirv_inst_iter(std::vector<uint32_t>::const_iterator zero, std::vector<uint32_t>::const_iterator it) : zero(zero), it(it) {} |
| 50 | |
| 51 | bool operator==(spirv_inst_iter const &other) { return it == other.it; } |
| 52 | |
| 53 | bool operator!=(spirv_inst_iter const &other) { return it != other.it; } |
| 54 | |
| 55 | spirv_inst_iter operator++(int) { // x++ |
| 56 | spirv_inst_iter ii = *this; |
| 57 | it += len(); |
| 58 | return ii; |
| 59 | } |
| 60 | |
| 61 | spirv_inst_iter operator++() { // ++x; |
| 62 | it += len(); |
| 63 | return *this; |
| 64 | } |
| 65 | |
| 66 | // The iterator and the value are the same thing. |
| 67 | spirv_inst_iter &operator*() { return *this; } |
| 68 | spirv_inst_iter const &operator*() const { return *this; } |
| 69 | }; |
| 70 | |
| 71 | struct shader_module { |
| 72 | // The spirv image itself |
| 73 | std::vector<uint32_t> words; |
| 74 | // A mapping of <id> to the first word of its def. this is useful because walking type |
| 75 | // trees, constant expressions, etc requires jumping all over the instruction stream. |
| 76 | std::unordered_map<unsigned, unsigned> def_index; |
| 77 | bool has_valid_spirv; |
| Mark Young | 4e919b2 | 2018-05-21 15:53:59 -0600 | [diff] [blame] | 78 | VkShaderModule vk_shader_module; |
| Karl Schultz | 7b024b4 | 2018-08-30 16:18:18 -0600 | [diff] [blame^] | 79 | uint32_t gpu_validation_shader_id; |
| Chris Forbes | 47567b7 | 2017-06-09 12:09:45 -0700 | [diff] [blame] | 80 | |
| Mark Lobodzinski | bbaa6a2 | 2018-11-02 12:03:35 -0600 | [diff] [blame] | 81 | std::vector<uint32_t> PreprocessShaderBinary(uint32_t *src_binary, size_t binary_size, spv_target_env env) { |
| 82 | spvtools::Optimizer optimizer(env); |
| 83 | optimizer.RegisterPass(spvtools::CreateFlattenDecorationPass()); |
| 84 | std::vector<uint32_t> optimized_binary; |
| 85 | auto result = optimizer.Run(src_binary, binary_size / sizeof(uint32_t), &optimized_binary); |
| 86 | return (result ? optimized_binary : std::vector<uint32_t>(src_binary, src_binary + binary_size / sizeof(uint32_t))); |
| 87 | } |
| 88 | |
| Karl Schultz | 7b024b4 | 2018-08-30 16:18:18 -0600 | [diff] [blame^] | 89 | shader_module(VkShaderModuleCreateInfo const *pCreateInfo, VkShaderModule shaderModule, spv_target_env env, |
| 90 | uint32_t unique_shader_id) |
| Mark Lobodzinski | bbaa6a2 | 2018-11-02 12:03:35 -0600 | [diff] [blame] | 91 | : words(PreprocessShaderBinary((uint32_t *)pCreateInfo->pCode, pCreateInfo->codeSize, env)), |
| Chris Forbes | 47567b7 | 2017-06-09 12:09:45 -0700 | [diff] [blame] | 92 | def_index(), |
| Mark Young | 4e919b2 | 2018-05-21 15:53:59 -0600 | [diff] [blame] | 93 | has_valid_spirv(true), |
| Karl Schultz | 7b024b4 | 2018-08-30 16:18:18 -0600 | [diff] [blame^] | 94 | vk_shader_module(shaderModule), |
| 95 | gpu_validation_shader_id(unique_shader_id) { |
| Shannon McPherson | c06c33d | 2018-06-28 17:21:12 -0600 | [diff] [blame] | 96 | BuildDefIndex(); |
| Chris Forbes | 47567b7 | 2017-06-09 12:09:45 -0700 | [diff] [blame] | 97 | } |
| 98 | |
| Mark Young | 4e919b2 | 2018-05-21 15:53:59 -0600 | [diff] [blame] | 99 | shader_module() : has_valid_spirv(false), vk_shader_module(VK_NULL_HANDLE) {} |
| Chris Forbes | 47567b7 | 2017-06-09 12:09:45 -0700 | [diff] [blame] | 100 | |
| 101 | // Expose begin() / end() to enable range-based for |
| 102 | spirv_inst_iter begin() const { return spirv_inst_iter(words.begin(), words.begin() + 5); } // First insn |
| 103 | spirv_inst_iter end() const { return spirv_inst_iter(words.begin(), words.end()); } // Just past last insn |
| 104 | // Given an offset into the module, produce an iterator there. |
| 105 | spirv_inst_iter at(unsigned offset) const { return spirv_inst_iter(words.begin(), words.begin() + offset); } |
| 106 | |
| 107 | // Gets an iterator to the definition of an id |
| 108 | spirv_inst_iter get_def(unsigned id) const { |
| 109 | auto it = def_index.find(id); |
| 110 | if (it == def_index.end()) { |
| 111 | return end(); |
| 112 | } |
| 113 | return at(it->second); |
| 114 | } |
| 115 | |
| Shannon McPherson | c06c33d | 2018-06-28 17:21:12 -0600 | [diff] [blame] | 116 | void BuildDefIndex(); |
| Chris Forbes | 47567b7 | 2017-06-09 12:09:45 -0700 | [diff] [blame] | 117 | }; |
| 118 | |
| Chris Forbes | 9a61e08 | 2017-07-24 15:35:29 -0700 | [diff] [blame] | 119 | class ValidationCache { |
| 120 | // hashes of shaders that have passed validation before, and can be skipped. |
| 121 | // we don't store negative results, as we would have to also store what was |
| 122 | // wrong with them; also, we expect they will get fixed, so we're less |
| 123 | // likely to see them again. |
| 124 | std::unordered_set<uint32_t> good_shader_hashes; |
| 125 | ValidationCache() {} |
| 126 | |
| Dave Houlton | a9df0ce | 2018-02-07 10:51:23 -0700 | [diff] [blame] | 127 | public: |
| Chris Forbes | 9a61e08 | 2017-07-24 15:35:29 -0700 | [diff] [blame] | 128 | static VkValidationCacheEXT Create(VkValidationCacheCreateInfoEXT const *pCreateInfo) { |
| 129 | auto cache = new ValidationCache(); |
| 130 | cache->Load(pCreateInfo); |
| 131 | return VkValidationCacheEXT(cache); |
| 132 | } |
| 133 | |
| 134 | void Load(VkValidationCacheCreateInfoEXT const *pCreateInfo) { |
| Cort Stratton | 77955d8 | 2018-02-01 23:14:50 -0800 | [diff] [blame] | 135 | const auto headerSize = 2 * sizeof(uint32_t) + VK_UUID_SIZE; |
| 136 | auto size = headerSize; |
| Dave Houlton | a9df0ce | 2018-02-07 10:51:23 -0700 | [diff] [blame] | 137 | if (!pCreateInfo->pInitialData || pCreateInfo->initialDataSize < size) return; |
| Chris Forbes | 9a61e08 | 2017-07-24 15:35:29 -0700 | [diff] [blame] | 138 | |
| 139 | uint32_t const *data = (uint32_t const *)pCreateInfo->pInitialData; |
| Dave Houlton | a9df0ce | 2018-02-07 10:51:23 -0700 | [diff] [blame] | 140 | if (data[0] != size) return; |
| 141 | if (data[1] != VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT) return; |
| Cort Stratton | 77955d8 | 2018-02-01 23:14:50 -0800 | [diff] [blame] | 142 | uint8_t expected_uuid[VK_UUID_SIZE]; |
| 143 | Sha1ToVkUuid(SPIRV_TOOLS_COMMIT_ID, expected_uuid); |
| Dave Houlton | a9df0ce | 2018-02-07 10:51:23 -0700 | [diff] [blame] | 144 | if (memcmp(&data[2], expected_uuid, VK_UUID_SIZE) != 0) return; // different version |
| Chris Forbes | 9a61e08 | 2017-07-24 15:35:29 -0700 | [diff] [blame] | 145 | |
| Dave Houlton | a9df0ce | 2018-02-07 10:51:23 -0700 | [diff] [blame] | 146 | data = (uint32_t const *)(reinterpret_cast<uint8_t const *>(data) + headerSize); |
| Chris Forbes | 9a61e08 | 2017-07-24 15:35:29 -0700 | [diff] [blame] | 147 | |
| Dave Houlton | a9df0ce | 2018-02-07 10:51:23 -0700 | [diff] [blame] | 148 | for (; size < pCreateInfo->initialDataSize; data++, size += sizeof(uint32_t)) { |
| Chris Forbes | 9a61e08 | 2017-07-24 15:35:29 -0700 | [diff] [blame] | 149 | good_shader_hashes.insert(*data); |
| 150 | } |
| 151 | } |
| 152 | |
| 153 | void Write(size_t *pDataSize, void *pData) { |
| Dave Houlton | a9df0ce | 2018-02-07 10:51:23 -0700 | [diff] [blame] | 154 | const auto headerSize = 2 * sizeof(uint32_t) + VK_UUID_SIZE; // 4 bytes for header size + 4 bytes for version number + UUID |
| Chris Forbes | 9a61e08 | 2017-07-24 15:35:29 -0700 | [diff] [blame] | 155 | if (!pData) { |
| 156 | *pDataSize = headerSize + good_shader_hashes.size() * sizeof(uint32_t); |
| 157 | return; |
| 158 | } |
| 159 | |
| 160 | if (*pDataSize < headerSize) { |
| 161 | *pDataSize = 0; |
| Dave Houlton | a9df0ce | 2018-02-07 10:51:23 -0700 | [diff] [blame] | 162 | return; // Too small for even the header! |
| Chris Forbes | 9a61e08 | 2017-07-24 15:35:29 -0700 | [diff] [blame] | 163 | } |
| 164 | |
| 165 | uint32_t *out = (uint32_t *)pData; |
| 166 | size_t actualSize = headerSize; |
| 167 | |
| 168 | // Write the header |
| 169 | *out++ = headerSize; |
| 170 | *out++ = VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT; |
| Dave Houlton | a9df0ce | 2018-02-07 10:51:23 -0700 | [diff] [blame] | 171 | Sha1ToVkUuid(SPIRV_TOOLS_COMMIT_ID, reinterpret_cast<uint8_t *>(out)); |
| 172 | out = (uint32_t *)(reinterpret_cast<uint8_t *>(out) + VK_UUID_SIZE); |
| Chris Forbes | 9a61e08 | 2017-07-24 15:35:29 -0700 | [diff] [blame] | 173 | |
| Dave Houlton | a9df0ce | 2018-02-07 10:51:23 -0700 | [diff] [blame] | 174 | for (auto it = good_shader_hashes.begin(); it != good_shader_hashes.end() && actualSize < *pDataSize; |
| Chris Forbes | 9a61e08 | 2017-07-24 15:35:29 -0700 | [diff] [blame] | 175 | it++, out++, actualSize += sizeof(uint32_t)) { |
| 176 | *out = *it; |
| 177 | } |
| 178 | |
| 179 | *pDataSize = actualSize; |
| 180 | } |
| 181 | |
| 182 | void Merge(ValidationCache const *other) { |
| Chris Forbes | f73483b | 2017-11-22 16:54:46 -0800 | [diff] [blame] | 183 | good_shader_hashes.reserve(good_shader_hashes.size() + other->good_shader_hashes.size()); |
| Dave Houlton | a9df0ce | 2018-02-07 10:51:23 -0700 | [diff] [blame] | 184 | for (auto h : other->good_shader_hashes) good_shader_hashes.insert(h); |
| Chris Forbes | 9a61e08 | 2017-07-24 15:35:29 -0700 | [diff] [blame] | 185 | } |
| 186 | |
| 187 | static uint32_t MakeShaderHash(VkShaderModuleCreateInfo const *smci); |
| 188 | |
| Dave Houlton | a9df0ce | 2018-02-07 10:51:23 -0700 | [diff] [blame] | 189 | bool Contains(uint32_t hash) { return good_shader_hashes.count(hash) != 0; } |
| Chris Forbes | 9a61e08 | 2017-07-24 15:35:29 -0700 | [diff] [blame] | 190 | |
| Dave Houlton | a9df0ce | 2018-02-07 10:51:23 -0700 | [diff] [blame] | 191 | void Insert(uint32_t hash) { good_shader_hashes.insert(hash); } |
| 192 | |
| 193 | private: |
| 194 | void Sha1ToVkUuid(const char *sha1_str, uint8_t uuid[VK_UUID_SIZE]) { |
| 195 | // Convert sha1_str from a hex string to binary. We only need VK_UUID_BYTES of |
| 196 | // output, so pad with zeroes if the input string is shorter than that, and truncate |
| 197 | // if it's longer. |
| 198 | char padded_sha1_str[2 * VK_UUID_SIZE + 1] = {}; |
| 199 | strncpy(padded_sha1_str, sha1_str, 2 * VK_UUID_SIZE + 1); |
| 200 | char byte_str[3] = {}; |
| 201 | for (uint32_t i = 0; i < VK_UUID_SIZE; ++i) { |
| 202 | byte_str[0] = padded_sha1_str[2 * i + 0]; |
| 203 | byte_str[1] = padded_sha1_str[2 * i + 1]; |
| 204 | uuid[i] = static_cast<uint8_t>(strtol(byte_str, NULL, 16)); |
| 205 | } |
| Cort Stratton | b614d33 | 2017-11-22 16:05:49 -0800 | [diff] [blame] | 206 | } |
| Chris Forbes | 9a61e08 | 2017-07-24 15:35:29 -0700 | [diff] [blame] | 207 | }; |
| 208 | |
| Shannon McPherson | c06c33d | 2018-06-28 17:21:12 -0600 | [diff] [blame] | 209 | bool ValidateAndCapturePipelineShaderState(layer_data *dev_data, PIPELINE_STATE *pPipeline); |
| 210 | bool ValidateComputePipeline(layer_data *dev_data, PIPELINE_STATE *pPipeline); |
| Eric Werness | 30127fd | 2018-10-31 21:01:03 -0700 | [diff] [blame] | 211 | bool ValidateRayTracingPipelineNV(layer_data *dev_data, PIPELINE_STATE *pipeline); |
| Chris Forbes | 47567b7 | 2017-06-09 12:09:45 -0700 | [diff] [blame] | 212 | typedef std::pair<unsigned, unsigned> descriptor_slot_t; |
| Karl Schultz | 7b024b4 | 2018-08-30 16:18:18 -0600 | [diff] [blame^] | 213 | bool PreCallValidateCreateShaderModule(layer_data *dev_data, VkShaderModuleCreateInfo const *pCreateInfo, bool *is_spirv, |
| 214 | bool *spirv_valid); |
| Chris Forbes | 47567b7 | 2017-06-09 12:09:45 -0700 | [diff] [blame] | 215 | |
| Dave Houlton | a9df0ce | 2018-02-07 10:51:23 -0700 | [diff] [blame] | 216 | #endif // VULKAN_SHADER_VALIDATION_H |