Timothy Liang | 057c390 | 2018-08-08 10:48:45 -0400 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2018 Google Inc. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license that can be |
| 5 | * found in the LICENSE file. |
| 6 | */ |
| 7 | |
| 8 | #include "GrMtlPipelineState.h" |
| 9 | |
Timothy Liang | 6ed6396 | 2018-08-10 09:49:44 -0400 | [diff] [blame] | 10 | #include "GrContext.h" |
| 11 | #include "GrContextPriv.h" |
| 12 | #include "GrPipeline.h" |
| 13 | #include "GrRenderTarget.h" |
Ethan Nicholas | 0106351 | 2018-10-08 16:58:25 -0400 | [diff] [blame] | 14 | #include "GrRenderTargetPriv.h" |
Timothy Liang | 6ed6396 | 2018-08-10 09:49:44 -0400 | [diff] [blame] | 15 | #include "GrTexturePriv.h" |
Timothy Liang | 057c390 | 2018-08-08 10:48:45 -0400 | [diff] [blame] | 16 | #include "GrMtlBuffer.h" |
| 17 | #include "GrMtlGpu.h" |
Timothy Liang | 6ed6396 | 2018-08-10 09:49:44 -0400 | [diff] [blame] | 18 | #include "GrMtlSampler.h" |
| 19 | #include "GrMtlTexture.h" |
| 20 | #include "glsl/GrGLSLFragmentProcessor.h" |
| 21 | #include "glsl/GrGLSLGeometryProcessor.h" |
| 22 | #include "glsl/GrGLSLXferProcessor.h" |
Timothy Liang | 057c390 | 2018-08-08 10:48:45 -0400 | [diff] [blame] | 23 | |
Timothy Liang | 6ed6396 | 2018-08-10 09:49:44 -0400 | [diff] [blame] | 24 | GrMtlPipelineState::SamplerBindings::SamplerBindings(const GrSamplerState& state, |
| 25 | GrTexture* texture, |
Timothy Liang | 6ed6396 | 2018-08-10 09:49:44 -0400 | [diff] [blame] | 26 | GrMtlGpu* gpu) |
Greg Daniel | 0f70be8 | 2018-10-08 17:35:08 +0000 | [diff] [blame] | 27 | : fTexture(static_cast<GrMtlTexture*>(texture)->mtlTexture()) { |
Timothy Liang | 6ed6396 | 2018-08-10 09:49:44 -0400 | [diff] [blame] | 28 | // TODO: use resource provider to get sampler. |
| 29 | std::unique_ptr<GrMtlSampler> sampler( |
| 30 | GrMtlSampler::Create(gpu, state, texture->texturePriv().maxMipMapLevel())); |
| 31 | fSampler = sampler->mtlSamplerState(); |
| 32 | } |
| 33 | |
| 34 | GrMtlPipelineState::GrMtlPipelineState( |
| 35 | GrMtlGpu* gpu, |
| 36 | id<MTLRenderPipelineState> pipelineState, |
| 37 | MTLPixelFormat pixelFormat, |
| 38 | const GrGLSLBuiltinUniformHandles& builtinUniformHandles, |
| 39 | const UniformInfoArray& uniforms, |
Brian Salomon | 12d2264 | 2019-01-29 14:38:50 -0500 | [diff] [blame] | 40 | sk_sp<GrMtlBuffer> geometryUniformBuffer, |
| 41 | sk_sp<GrMtlBuffer> fragmentUniformBuffer, |
Timothy Liang | 6ed6396 | 2018-08-10 09:49:44 -0400 | [diff] [blame] | 42 | uint32_t numSamplers, |
| 43 | std::unique_ptr<GrGLSLPrimitiveProcessor> geometryProcessor, |
| 44 | std::unique_ptr<GrGLSLXferProcessor> xferProcessor, |
| 45 | std::unique_ptr<std::unique_ptr<GrGLSLFragmentProcessor>[]> fragmentProcessors, |
| 46 | int fragmentProcessorCnt) |
Timothy Liang | 057c390 | 2018-08-08 10:48:45 -0400 | [diff] [blame] | 47 | : fGpu(gpu) |
| 48 | , fPipelineState(pipelineState) |
| 49 | , fPixelFormat(pixelFormat) |
Timothy Liang | 6ed6396 | 2018-08-10 09:49:44 -0400 | [diff] [blame] | 50 | , fBuiltinUniformHandles(builtinUniformHandles) |
Brian Salomon | 12d2264 | 2019-01-29 14:38:50 -0500 | [diff] [blame] | 51 | , fGeometryUniformBuffer(std::move(geometryUniformBuffer)) |
| 52 | , fFragmentUniformBuffer(std::move(fragmentUniformBuffer)) |
Timothy Liang | 6ed6396 | 2018-08-10 09:49:44 -0400 | [diff] [blame] | 53 | , fNumSamplers(numSamplers) |
| 54 | , fGeometryProcessor(std::move(geometryProcessor)) |
| 55 | , fXferProcessor(std::move(xferProcessor)) |
| 56 | , fFragmentProcessors(std::move(fragmentProcessors)) |
| 57 | , fFragmentProcessorCnt(fragmentProcessorCnt) |
Brian Salomon | dbf7072 | 2019-02-07 11:31:24 -0500 | [diff] [blame] | 58 | , fDataManager(uniforms, fGeometryUniformBuffer->size(), |
| 59 | fFragmentUniformBuffer->size()) { |
Timothy Liang | 057c390 | 2018-08-08 10:48:45 -0400 | [diff] [blame] | 60 | (void) fPixelFormat; // Suppress unused-var warning. |
| 61 | } |
Timothy Liang | 6ed6396 | 2018-08-10 09:49:44 -0400 | [diff] [blame] | 62 | |
Robert Phillips | d0fe875 | 2019-01-31 14:13:59 -0500 | [diff] [blame] | 63 | void GrMtlPipelineState::setData(const GrRenderTarget* renderTarget, |
| 64 | GrSurfaceOrigin origin, |
| 65 | const GrPrimitiveProcessor& primProc, |
Timothy Liang | 6ed6396 | 2018-08-10 09:49:44 -0400 | [diff] [blame] | 66 | const GrPipeline& pipeline, |
| 67 | const GrTextureProxy* const primProcTextures[]) { |
| 68 | SkASSERT(primProcTextures || !primProc.numTextureSamplers()); |
| 69 | |
Robert Phillips | d0fe875 | 2019-01-31 14:13:59 -0500 | [diff] [blame] | 70 | this->setRenderTargetState(renderTarget, origin); |
Timothy Liang | 6ed6396 | 2018-08-10 09:49:44 -0400 | [diff] [blame] | 71 | fGeometryProcessor->setData(fDataManager, primProc, |
| 72 | GrFragmentProcessor::CoordTransformIter(pipeline)); |
| 73 | fSamplerBindings.reset(); |
| 74 | for (int i = 0; i < primProc.numTextureSamplers(); ++i) { |
| 75 | const auto& sampler = primProc.textureSampler(i); |
| 76 | auto texture = static_cast<GrMtlTexture*>(primProcTextures[i]->peekTexture()); |
Greg Daniel | 0f70be8 | 2018-10-08 17:35:08 +0000 | [diff] [blame] | 77 | fSamplerBindings.emplace_back(sampler.samplerState(), texture, fGpu); |
Timothy Liang | 6ed6396 | 2018-08-10 09:49:44 -0400 | [diff] [blame] | 78 | } |
| 79 | |
| 80 | GrFragmentProcessor::Iter iter(pipeline); |
| 81 | GrGLSLFragmentProcessor::Iter glslIter(fFragmentProcessors.get(), fFragmentProcessorCnt); |
| 82 | const GrFragmentProcessor* fp = iter.next(); |
| 83 | GrGLSLFragmentProcessor* glslFP = glslIter.next(); |
| 84 | while (fp && glslFP) { |
Michael Ludwig | d3a357d | 2018-09-27 17:31:08 -0400 | [diff] [blame] | 85 | glslFP->setData(fDataManager, *fp); |
Timothy Liang | 6ed6396 | 2018-08-10 09:49:44 -0400 | [diff] [blame] | 86 | for (int i = 0; i < fp->numTextureSamplers(); ++i) { |
| 87 | const auto& sampler = fp->textureSampler(i); |
Greg Daniel | 0f70be8 | 2018-10-08 17:35:08 +0000 | [diff] [blame] | 88 | fSamplerBindings.emplace_back(sampler.samplerState(), sampler.peekTexture(), fGpu); |
Timothy Liang | 6ed6396 | 2018-08-10 09:49:44 -0400 | [diff] [blame] | 89 | } |
| 90 | fp = iter.next(); |
| 91 | glslFP = glslIter.next(); |
| 92 | } |
| 93 | SkASSERT(!fp && !glslFP); |
| 94 | |
| 95 | { |
| 96 | SkIPoint offset; |
| 97 | GrTexture* dstTexture = pipeline.peekDstTexture(&offset); |
| 98 | |
| 99 | fXferProcessor->setData(fDataManager, pipeline.getXferProcessor(), dstTexture, offset); |
| 100 | } |
| 101 | |
| 102 | if (GrTextureProxy* dstTextureProxy = pipeline.dstTextureProxy()) { |
| 103 | fSamplerBindings.emplace_back(GrSamplerState::ClampNearest(), |
| 104 | dstTextureProxy->peekTexture(), |
Timothy Liang | 6ed6396 | 2018-08-10 09:49:44 -0400 | [diff] [blame] | 105 | fGpu); |
| 106 | } |
| 107 | |
| 108 | SkASSERT(fNumSamplers == fSamplerBindings.count()); |
| 109 | if (fGeometryUniformBuffer || fFragmentUniformBuffer) { |
| 110 | fDataManager.uploadUniformBuffers(fGpu, fGeometryUniformBuffer.get(), |
| 111 | fFragmentUniformBuffer.get()); |
| 112 | } |
Ethan Nicholas | 0106351 | 2018-10-08 16:58:25 -0400 | [diff] [blame] | 113 | |
| 114 | if (pipeline.isStencilEnabled()) { |
Robert Phillips | d0fe875 | 2019-01-31 14:13:59 -0500 | [diff] [blame] | 115 | SkASSERT(renderTarget->renderTargetPriv().getStencilAttachment()); |
Ethan Nicholas | 0106351 | 2018-10-08 16:58:25 -0400 | [diff] [blame] | 116 | fStencil.reset(*pipeline.getUserStencil(), pipeline.hasStencilClip(), |
Robert Phillips | d0fe875 | 2019-01-31 14:13:59 -0500 | [diff] [blame] | 117 | renderTarget->renderTargetPriv().numStencilBits()); |
Ethan Nicholas | 0106351 | 2018-10-08 16:58:25 -0400 | [diff] [blame] | 118 | } |
Timothy Liang | 6ed6396 | 2018-08-10 09:49:44 -0400 | [diff] [blame] | 119 | } |
| 120 | |
| 121 | void GrMtlPipelineState::bind(id<MTLRenderCommandEncoder> renderCmdEncoder) { |
| 122 | if (fGeometryUniformBuffer) { |
| 123 | [renderCmdEncoder setVertexBuffer: fGeometryUniformBuffer->mtlBuffer() |
| 124 | offset: 0 |
| 125 | atIndex: GrMtlUniformHandler::kGeometryBinding]; |
| 126 | } |
| 127 | if (fFragmentUniformBuffer) { |
| 128 | [renderCmdEncoder setFragmentBuffer: fFragmentUniformBuffer->mtlBuffer() |
| 129 | offset: 0 |
| 130 | atIndex: GrMtlUniformHandler::kFragBinding]; |
| 131 | } |
| 132 | SkASSERT(fNumSamplers == fSamplerBindings.count()); |
| 133 | for (int index = 0; index < fNumSamplers; ++index) { |
Greg Daniel | 0f70be8 | 2018-10-08 17:35:08 +0000 | [diff] [blame] | 134 | [renderCmdEncoder setFragmentTexture: fSamplerBindings[index].fTexture |
| 135 | atIndex: index]; |
| 136 | [renderCmdEncoder setFragmentSamplerState: fSamplerBindings[index].fSampler |
| 137 | atIndex: index]; |
Timothy Liang | 6ed6396 | 2018-08-10 09:49:44 -0400 | [diff] [blame] | 138 | } |
| 139 | } |
| 140 | |
Robert Phillips | d0fe875 | 2019-01-31 14:13:59 -0500 | [diff] [blame] | 141 | void GrMtlPipelineState::setRenderTargetState(const GrRenderTarget* rt, GrSurfaceOrigin origin) { |
Timothy Liang | 6ed6396 | 2018-08-10 09:49:44 -0400 | [diff] [blame] | 142 | // Load the RT height uniform if it is needed to y-flip gl_FragCoord. |
Greg Daniel | e6ab998 | 2018-08-22 13:56:32 +0000 | [diff] [blame] | 143 | if (fBuiltinUniformHandles.fRTHeightUni.isValid() && |
| 144 | fRenderTargetState.fRenderTargetSize.fHeight != rt->height()) { |
| 145 | fDataManager.set1f(fBuiltinUniformHandles.fRTHeightUni, SkIntToScalar(rt->height())); |
Timothy Liang | 6ed6396 | 2018-08-10 09:49:44 -0400 | [diff] [blame] | 146 | } |
| 147 | |
| 148 | // set RT adjustment |
| 149 | SkISize size; |
| 150 | size.set(rt->width(), rt->height()); |
| 151 | SkASSERT(fBuiltinUniformHandles.fRTAdjustmentUni.isValid()); |
Robert Phillips | d0fe875 | 2019-01-31 14:13:59 -0500 | [diff] [blame] | 152 | if (fRenderTargetState.fRenderTargetOrigin != origin || |
Timothy Liang | 6ed6396 | 2018-08-10 09:49:44 -0400 | [diff] [blame] | 153 | fRenderTargetState.fRenderTargetSize != size) { |
| 154 | fRenderTargetState.fRenderTargetSize = size; |
Robert Phillips | d0fe875 | 2019-01-31 14:13:59 -0500 | [diff] [blame] | 155 | fRenderTargetState.fRenderTargetOrigin = origin; |
Timothy Liang | 6ed6396 | 2018-08-10 09:49:44 -0400 | [diff] [blame] | 156 | |
| 157 | float rtAdjustmentVec[4]; |
| 158 | fRenderTargetState.getRTAdjustmentVec(rtAdjustmentVec); |
| 159 | fDataManager.set4fv(fBuiltinUniformHandles.fRTAdjustmentUni, 1, rtAdjustmentVec); |
| 160 | } |
| 161 | } |
Timothy Liang | de0be80 | 2018-08-10 13:48:08 -0400 | [diff] [blame] | 162 | |
| 163 | static bool blend_coeff_refs_constant(GrBlendCoeff coeff) { |
| 164 | switch (coeff) { |
| 165 | case kConstC_GrBlendCoeff: |
| 166 | case kIConstC_GrBlendCoeff: |
| 167 | case kConstA_GrBlendCoeff: |
| 168 | case kIConstA_GrBlendCoeff: |
| 169 | return true; |
| 170 | default: |
| 171 | return false; |
| 172 | } |
| 173 | } |
| 174 | |
| 175 | void GrMtlPipelineState::setBlendConstants(id<MTLRenderCommandEncoder> renderCmdEncoder, |
| 176 | GrPixelConfig config, |
| 177 | const GrXferProcessor& xferProcessor) { |
| 178 | if (!renderCmdEncoder) { |
| 179 | return; |
| 180 | } |
| 181 | |
| 182 | GrXferProcessor::BlendInfo blendInfo; |
| 183 | xferProcessor.getBlendInfo(&blendInfo); |
| 184 | GrBlendCoeff srcCoeff = blendInfo.fSrcBlend; |
| 185 | GrBlendCoeff dstCoeff = blendInfo.fDstBlend; |
| 186 | if (blend_coeff_refs_constant(srcCoeff) || blend_coeff_refs_constant(dstCoeff)) { |
Timothy Liang | de0be80 | 2018-08-10 13:48:08 -0400 | [diff] [blame] | 187 | // Swizzle the blend to match what the shader will output. |
| 188 | const GrSwizzle& swizzle = fGpu->caps()->shaderCaps()->configOutputSwizzle(config); |
Brian Osman | 422f95b | 2018-11-05 16:49:04 -0500 | [diff] [blame] | 189 | SkPMColor4f blendConst = swizzle.applyTo(blendInfo.fBlendConstant); |
Timothy Liang | de0be80 | 2018-08-10 13:48:08 -0400 | [diff] [blame] | 190 | |
Brian Osman | 422f95b | 2018-11-05 16:49:04 -0500 | [diff] [blame] | 191 | [renderCmdEncoder setBlendColorRed: blendConst.fR |
| 192 | green: blendConst.fG |
| 193 | blue: blendConst.fB |
| 194 | alpha: blendConst.fA]; |
Timothy Liang | de0be80 | 2018-08-10 13:48:08 -0400 | [diff] [blame] | 195 | } |
| 196 | } |
Ethan Nicholas | 0106351 | 2018-10-08 16:58:25 -0400 | [diff] [blame] | 197 | |
| 198 | MTLStencilOperation skia_stencil_op_to_mtl(GrStencilOp op) { |
| 199 | switch (op) { |
| 200 | case GrStencilOp::kKeep: |
| 201 | return MTLStencilOperationKeep; |
| 202 | case GrStencilOp::kZero: |
| 203 | return MTLStencilOperationZero; |
| 204 | case GrStencilOp::kReplace: |
| 205 | return MTLStencilOperationReplace; |
| 206 | case GrStencilOp::kInvert: |
| 207 | return MTLStencilOperationInvert; |
| 208 | case GrStencilOp::kIncWrap: |
| 209 | return MTLStencilOperationIncrementWrap; |
| 210 | case GrStencilOp::kDecWrap: |
| 211 | return MTLStencilOperationDecrementWrap; |
| 212 | case GrStencilOp::kIncClamp: |
| 213 | return MTLStencilOperationIncrementClamp; |
| 214 | case GrStencilOp::kDecClamp: |
| 215 | return MTLStencilOperationDecrementClamp; |
| 216 | } |
| 217 | } |
| 218 | |
| 219 | MTLStencilDescriptor* skia_stencil_to_mtl(GrStencilSettings::Face face) { |
| 220 | MTLStencilDescriptor* result = [[MTLStencilDescriptor alloc] init]; |
| 221 | switch (face.fTest) { |
| 222 | case GrStencilTest::kAlways: |
| 223 | result.stencilCompareFunction = MTLCompareFunctionAlways; |
| 224 | break; |
| 225 | case GrStencilTest::kNever: |
| 226 | result.stencilCompareFunction = MTLCompareFunctionNever; |
| 227 | break; |
| 228 | case GrStencilTest::kGreater: |
| 229 | result.stencilCompareFunction = MTLCompareFunctionGreater; |
| 230 | break; |
| 231 | case GrStencilTest::kGEqual: |
| 232 | result.stencilCompareFunction = MTLCompareFunctionGreaterEqual; |
| 233 | break; |
| 234 | case GrStencilTest::kLess: |
| 235 | result.stencilCompareFunction = MTLCompareFunctionLess; |
| 236 | break; |
| 237 | case GrStencilTest::kLEqual: |
| 238 | result.stencilCompareFunction = MTLCompareFunctionLessEqual; |
| 239 | break; |
| 240 | case GrStencilTest::kEqual: |
| 241 | result.stencilCompareFunction = MTLCompareFunctionEqual; |
| 242 | break; |
| 243 | case GrStencilTest::kNotEqual: |
| 244 | result.stencilCompareFunction = MTLCompareFunctionNotEqual; |
| 245 | break; |
| 246 | } |
| 247 | result.readMask = face.fTestMask; |
| 248 | result.writeMask = face.fWriteMask; |
| 249 | result.depthStencilPassOperation = skia_stencil_op_to_mtl(face.fPassOp); |
| 250 | result.stencilFailureOperation = skia_stencil_op_to_mtl(face.fFailOp); |
| 251 | return result; |
| 252 | } |
| 253 | |
| 254 | void GrMtlPipelineState::setDepthStencilState(id<MTLRenderCommandEncoder> renderCmdEncoder) { |
| 255 | if (fStencil.isDisabled()) { |
| 256 | MTLDepthStencilDescriptor* desc = [[MTLDepthStencilDescriptor alloc] init]; |
| 257 | id<MTLDepthStencilState> state = [fGpu->device() newDepthStencilStateWithDescriptor:desc]; |
| 258 | [renderCmdEncoder setDepthStencilState:state]; |
| 259 | } |
| 260 | else { |
| 261 | MTLDepthStencilDescriptor* desc = [[MTLDepthStencilDescriptor alloc] init]; |
| 262 | desc.frontFaceStencil = skia_stencil_to_mtl(fStencil.front()); |
| 263 | if (fStencil.isTwoSided()) { |
| 264 | desc.backFaceStencil = skia_stencil_to_mtl(fStencil.back()); |
| 265 | [renderCmdEncoder setStencilFrontReferenceValue:fStencil.front().fRef |
| 266 | backReferenceValue:fStencil.back().fRef]; |
| 267 | } |
| 268 | else { |
| 269 | desc.backFaceStencil = desc.frontFaceStencil; |
| 270 | [renderCmdEncoder setStencilReferenceValue:fStencil.front().fRef]; |
| 271 | } |
| 272 | id<MTLDepthStencilState> state = [fGpu->device() newDepthStencilStateWithDescriptor:desc]; |
| 273 | [renderCmdEncoder setDepthStencilState:state]; |
| 274 | } |
| 275 | } |