Improve D3D GetProgramBinary chipset validation
- Check chipset identifying info before trying to compile shaders
- Check device feature level when loading a binary
- Use chipset VendorID/DeviceID etc instead of LUID so that program
binaries remain valid across system reboots
Change-Id: I88ba4543bb990956d1d8fb324abf9784d72950cd
Reviewed-on: https://chromium-review.googlesource.com/280428
Reviewed-by: Jamie Madill <jmadill@chromium.org>
Tested-by: Jamie Madill <jmadill@chromium.org>
diff --git a/src/libANGLE/renderer/d3d/ProgramD3D.cpp b/src/libANGLE/renderer/d3d/ProgramD3D.cpp
index afaec55..617080b 100644
--- a/src/libANGLE/renderer/d3d/ProgramD3D.cpp
+++ b/src/libANGLE/renderer/d3d/ProgramD3D.cpp
@@ -436,6 +436,16 @@
LinkResult ProgramD3D::load(gl::InfoLog &infoLog, gl::BinaryInputStream *stream)
{
+ DeviceIdentifier binaryDeviceIdentifier = { 0 };
+ stream->readBytes(reinterpret_cast<unsigned char*>(&binaryDeviceIdentifier), sizeof(DeviceIdentifier));
+
+ DeviceIdentifier identifier = mRenderer->getAdapterIdentifier();
+ if (memcmp(&identifier, &binaryDeviceIdentifier, sizeof(DeviceIdentifier)) != 0)
+ {
+ infoLog << "Invalid program binary, device configuration has changed.";
+ return LinkResult(false, gl::Error(GL_NO_ERROR));
+ }
+
int compileFlags = stream->readInt<int>();
if (compileFlags != ANGLE_COMPILE_OPTIMIZATION_LEVEL)
{
@@ -678,16 +688,6 @@
stream->skip(geometryShaderSize);
}
- GUID binaryIdentifier = {0};
- stream->readBytes(reinterpret_cast<unsigned char*>(&binaryIdentifier), sizeof(GUID));
-
- GUID identifier = mRenderer->getAdapterIdentifier();
- if (memcmp(&identifier, &binaryIdentifier, sizeof(GUID)) != 0)
- {
- infoLog << "Invalid program binary.";
- return LinkResult(false, gl::Error(GL_NO_ERROR));
- }
-
initializeUniformStorage();
initAttributesByLayout();
@@ -696,6 +696,11 @@
gl::Error ProgramD3D::save(gl::BinaryOutputStream *stream)
{
+ // Output the DeviceIdentifier before we output any shader code
+ // When we load the binary again later, we can validate the device identifier before trying to compile any HLSL
+ DeviceIdentifier binaryIdentifier = mRenderer->getAdapterIdentifier();
+ stream->writeBytes(reinterpret_cast<unsigned char*>(&binaryIdentifier), sizeof(DeviceIdentifier));
+
stream->writeInt(ANGLE_COMPILE_OPTIMIZATION_LEVEL);
stream->writeInt(mShaderVersion);
@@ -849,9 +854,6 @@
stream->writeBytes(geometryBlob, geometryShaderSize);
}
- GUID binaryIdentifier = mRenderer->getAdapterIdentifier();
- stream->writeBytes(reinterpret_cast<unsigned char*>(&binaryIdentifier), sizeof(GUID));
-
return gl::Error(GL_NO_ERROR);
}