cdalton | c7103a1 | 2014-08-11 14:05:05 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2014 Google Inc. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license that can be |
| 5 | * found in the LICENSE file. |
| 6 | */ |
| 7 | |
| 8 | #include "gl/GrGLPathRendering.h" |
cdalton | c7103a1 | 2014-08-11 14:05:05 -0700 | [diff] [blame] | 9 | #include "gl/GrGLNameAllocator.h" |
| 10 | #include "gl/GrGLUtil.h" |
kkinnunen | ccdaa04 | 2014-08-20 01:36:23 -0700 | [diff] [blame] | 11 | #include "gl/GrGpuGL.h" |
cdalton | c7103a1 | 2014-08-11 14:05:05 -0700 | [diff] [blame] | 12 | |
kkinnunen | ccdaa04 | 2014-08-20 01:36:23 -0700 | [diff] [blame] | 13 | #include "GrGLPath.h" |
| 14 | #include "GrGLPathRange.h" |
| 15 | #include "GrGLPathRendering.h" |
| 16 | |
| 17 | #define GL_CALL(X) GR_GL_CALL(fGpu->glInterface(), X) |
| 18 | #define GL_CALL_RET(RET, X) GR_GL_CALL_RET(fGpu->glInterface(), RET, X) |
| 19 | |
| 20 | |
| 21 | static const GrGLenum gXformType2GLType[] = { |
| 22 | GR_GL_NONE, |
| 23 | GR_GL_TRANSLATE_X, |
| 24 | GR_GL_TRANSLATE_Y, |
| 25 | GR_GL_TRANSLATE_2D, |
| 26 | GR_GL_TRANSPOSE_AFFINE_2D |
| 27 | }; |
| 28 | |
| 29 | GR_STATIC_ASSERT(0 == GrPathRendering::kNone_PathTransformType); |
| 30 | GR_STATIC_ASSERT(1 == GrPathRendering::kTranslateX_PathTransformType); |
| 31 | GR_STATIC_ASSERT(2 == GrPathRendering::kTranslateY_PathTransformType); |
| 32 | GR_STATIC_ASSERT(3 == GrPathRendering::kTranslate_PathTransformType); |
| 33 | GR_STATIC_ASSERT(4 == GrPathRendering::kAffine_PathTransformType); |
| 34 | GR_STATIC_ASSERT(GrPathRendering::kAffine_PathTransformType == GrPathRendering::kLast_PathTransformType); |
| 35 | |
| 36 | static GrGLenum gr_stencil_op_to_gl_path_rendering_fill_mode(GrStencilOp op) { |
| 37 | switch (op) { |
| 38 | default: |
| 39 | SkFAIL("Unexpected path fill."); |
| 40 | /* fallthrough */; |
| 41 | case kIncClamp_StencilOp: |
| 42 | return GR_GL_COUNT_UP; |
| 43 | case kInvert_StencilOp: |
| 44 | return GR_GL_INVERT; |
| 45 | } |
| 46 | } |
cdalton | c7103a1 | 2014-08-11 14:05:05 -0700 | [diff] [blame] | 47 | |
kkinnunen | ccdaa04 | 2014-08-20 01:36:23 -0700 | [diff] [blame] | 48 | GrGLPathRendering::GrGLPathRendering(GrGpuGL* gpu) |
| 49 | : fGpu(gpu) { |
kkinnunen | 5b65357 | 2014-08-20 04:13:27 -0700 | [diff] [blame] | 50 | const GrGLInterface* glInterface = gpu->glInterface(); |
| 51 | fCaps.stencilThenCoverSupport = |
| 52 | NULL != glInterface->fFunctions.fStencilThenCoverFillPath && |
| 53 | NULL != glInterface->fFunctions.fStencilThenCoverStrokePath && |
| 54 | NULL != glInterface->fFunctions.fStencilThenCoverFillPathInstanced && |
| 55 | NULL != glInterface->fFunctions.fStencilThenCoverStrokePathInstanced; |
| 56 | fCaps.fragmentInputGenSupport = |
| 57 | NULL != glInterface->fFunctions.fProgramPathFragmentInputGen; |
kkinnunen | ccdaa04 | 2014-08-20 01:36:23 -0700 | [diff] [blame] | 58 | fHWPathTexGenSettings.reset(fGpu->glCaps().maxFixedFunctionTextureCoords()); |
cdalton | c7103a1 | 2014-08-11 14:05:05 -0700 | [diff] [blame] | 59 | } |
| 60 | |
| 61 | GrGLPathRendering::~GrGLPathRendering() { |
| 62 | } |
| 63 | |
| 64 | void GrGLPathRendering::abandonGpuResources() { |
| 65 | fPathNameAllocator.reset(NULL); |
| 66 | } |
| 67 | |
kkinnunen | ccdaa04 | 2014-08-20 01:36:23 -0700 | [diff] [blame] | 68 | void GrGLPathRendering::resetContext() { |
| 69 | fHWProjectionMatrixState.invalidate(); |
| 70 | // we don't use the model view matrix. |
| 71 | GL_CALL(MatrixLoadIdentity(GR_GL_MODELVIEW)); |
| 72 | |
| 73 | for (int i = 0; i < fGpu->glCaps().maxFixedFunctionTextureCoords(); ++i) { |
kkinnunen | 5b65357 | 2014-08-20 04:13:27 -0700 | [diff] [blame] | 74 | GL_CALL(PathTexGen(GR_GL_TEXTURE0 + i, GR_GL_NONE, 0, NULL)); |
kkinnunen | ccdaa04 | 2014-08-20 01:36:23 -0700 | [diff] [blame] | 75 | fHWPathTexGenSettings[i].fMode = GR_GL_NONE; |
| 76 | fHWPathTexGenSettings[i].fNumComponents = 0; |
| 77 | } |
| 78 | fHWActivePathTexGenSets = 0; |
| 79 | fHWPathStencilSettings.invalidate(); |
| 80 | } |
| 81 | |
| 82 | GrPath* GrGLPathRendering::createPath(const SkPath& inPath, const SkStrokeRec& stroke) { |
| 83 | return SkNEW_ARGS(GrGLPath, (fGpu, inPath, stroke)); |
| 84 | } |
| 85 | |
| 86 | GrPathRange* GrGLPathRendering::createPathRange(size_t size, const SkStrokeRec& stroke) { |
| 87 | return SkNEW_ARGS(GrGLPathRange, (fGpu, size, stroke)); |
| 88 | } |
| 89 | |
kkinnunen | 5b65357 | 2014-08-20 04:13:27 -0700 | [diff] [blame] | 90 | void GrGLPathRendering::stencilPath(const GrPath* path, SkPath::FillType fill) { |
| 91 | GrGLuint id = static_cast<const GrGLPath*>(path)->pathID(); |
| 92 | SkASSERT(NULL != fGpu->drawState()->getRenderTarget()); |
| 93 | SkASSERT(NULL != fGpu->drawState()->getRenderTarget()->getStencilBuffer()); |
| 94 | |
| 95 | this->flushPathStencilSettings(fill); |
| 96 | SkASSERT(!fHWPathStencilSettings.isTwoSided()); |
| 97 | |
| 98 | GrGLenum fillMode = |
| 99 | gr_stencil_op_to_gl_path_rendering_fill_mode(fHWPathStencilSettings.passOp(GrStencilSettings::kFront_Face)); |
| 100 | GrGLint writeMask = fHWPathStencilSettings.writeMask(GrStencilSettings::kFront_Face); |
| 101 | GL_CALL(StencilFillPath(id, fillMode, writeMask)); |
| 102 | } |
| 103 | |
| 104 | void GrGLPathRendering::drawPath(const GrPath* path, SkPath::FillType fill) { |
| 105 | GrGLuint id = static_cast<const GrGLPath*>(path)->pathID(); |
| 106 | SkASSERT(NULL != fGpu->drawState()->getRenderTarget()); |
| 107 | SkASSERT(NULL != fGpu->drawState()->getRenderTarget()->getStencilBuffer()); |
| 108 | SkASSERT(!fGpu->fCurrentProgram->hasVertexShader()); |
| 109 | |
| 110 | this->flushPathStencilSettings(fill); |
| 111 | SkASSERT(!fHWPathStencilSettings.isTwoSided()); |
| 112 | |
| 113 | const SkStrokeRec& stroke = path->getStroke(); |
| 114 | |
| 115 | SkPath::FillType nonInvertedFill = SkPath::ConvertToNonInverseFillType(fill); |
| 116 | |
| 117 | GrGLenum fillMode = |
| 118 | gr_stencil_op_to_gl_path_rendering_fill_mode(fHWPathStencilSettings.passOp(GrStencilSettings::kFront_Face)); |
| 119 | GrGLint writeMask = fHWPathStencilSettings.writeMask(GrStencilSettings::kFront_Face); |
| 120 | |
| 121 | if (nonInvertedFill == fill) { |
| 122 | if (stroke.needToApply()) { |
| 123 | if (SkStrokeRec::kStrokeAndFill_Style == stroke.getStyle()) { |
| 124 | GL_CALL(StencilFillPath(id, fillMode, writeMask)); |
| 125 | } |
| 126 | this->stencilThenCoverStrokePath(id, 0xffff, writeMask, GR_GL_BOUNDING_BOX); |
| 127 | } else { |
| 128 | this->stencilThenCoverFillPath(id, fillMode, writeMask, GR_GL_BOUNDING_BOX); |
| 129 | } |
| 130 | } else { |
| 131 | if (stroke.isFillStyle() || SkStrokeRec::kStrokeAndFill_Style == stroke.getStyle()) { |
| 132 | GL_CALL(StencilFillPath(id, fillMode, writeMask)); |
| 133 | } |
| 134 | if (stroke.needToApply()) { |
| 135 | GL_CALL(StencilStrokePath(id, 0xffff, writeMask)); |
| 136 | } |
| 137 | |
| 138 | GrDrawState* drawState = fGpu->drawState(); |
| 139 | GrDrawState::AutoViewMatrixRestore avmr; |
| 140 | SkRect bounds = SkRect::MakeLTRB(0, 0, |
| 141 | SkIntToScalar(drawState->getRenderTarget()->width()), |
| 142 | SkIntToScalar(drawState->getRenderTarget()->height())); |
| 143 | SkMatrix vmi; |
| 144 | // mapRect through persp matrix may not be correct |
| 145 | if (!drawState->getViewMatrix().hasPerspective() && drawState->getViewInverse(&vmi)) { |
| 146 | vmi.mapRect(&bounds); |
| 147 | // theoretically could set bloat = 0, instead leave it because of matrix inversion |
| 148 | // precision. |
| 149 | SkScalar bloat = drawState->getViewMatrix().getMaxScale() * SK_ScalarHalf; |
| 150 | bounds.outset(bloat, bloat); |
| 151 | } else { |
| 152 | avmr.setIdentity(drawState); |
| 153 | } |
| 154 | |
| 155 | fGpu->drawSimpleRect(bounds); |
| 156 | } |
| 157 | } |
| 158 | |
| 159 | void GrGLPathRendering::drawPaths(const GrPathRange* pathRange, const uint32_t indices[], int count, |
| 160 | const float transforms[], PathTransformType transformsType, |
| 161 | SkPath::FillType fill) { |
| 162 | SkASSERT(fGpu->caps()->pathRenderingSupport()); |
| 163 | SkASSERT(NULL != fGpu->drawState()->getRenderTarget()); |
| 164 | SkASSERT(NULL != fGpu->drawState()->getRenderTarget()->getStencilBuffer()); |
| 165 | SkASSERT(!fGpu->fCurrentProgram->hasVertexShader()); |
| 166 | |
| 167 | GrGLuint baseID = static_cast<const GrGLPathRange*>(pathRange)->basePathID(); |
| 168 | |
| 169 | this->flushPathStencilSettings(fill); |
| 170 | SkASSERT(!fHWPathStencilSettings.isTwoSided()); |
| 171 | |
| 172 | const SkStrokeRec& stroke = pathRange->getStroke(); |
| 173 | |
| 174 | SkPath::FillType nonInvertedFill = |
| 175 | SkPath::ConvertToNonInverseFillType(fill); |
| 176 | |
| 177 | GrGLenum fillMode = |
| 178 | gr_stencil_op_to_gl_path_rendering_fill_mode( |
| 179 | fHWPathStencilSettings.passOp(GrStencilSettings::kFront_Face)); |
| 180 | GrGLint writeMask = |
| 181 | fHWPathStencilSettings.writeMask(GrStencilSettings::kFront_Face); |
| 182 | |
| 183 | if (nonInvertedFill == fill) { |
| 184 | if (stroke.needToApply()) { |
| 185 | if (SkStrokeRec::kStrokeAndFill_Style == stroke.getStyle()) { |
| 186 | GL_CALL(StencilFillPathInstanced( |
| 187 | count, GR_GL_UNSIGNED_INT, indices, baseID, fillMode, |
| 188 | writeMask, gXformType2GLType[transformsType], |
| 189 | transforms)); |
| 190 | } |
| 191 | this->stencilThenCoverStrokePathInstanced( |
| 192 | count, GR_GL_UNSIGNED_INT, indices, baseID, 0xffff, writeMask, |
| 193 | GR_GL_BOUNDING_BOX_OF_BOUNDING_BOXES, |
| 194 | gXformType2GLType[transformsType], transforms); |
| 195 | } else { |
| 196 | this->stencilThenCoverFillPathInstanced( |
| 197 | count, GR_GL_UNSIGNED_INT, indices, baseID, fillMode, writeMask, |
| 198 | GR_GL_BOUNDING_BOX_OF_BOUNDING_BOXES, |
| 199 | gXformType2GLType[transformsType], transforms); |
| 200 | } |
| 201 | } else { |
| 202 | if (stroke.isFillStyle() || SkStrokeRec::kStrokeAndFill_Style == stroke.getStyle()) { |
| 203 | GL_CALL(StencilFillPathInstanced( |
| 204 | count, GR_GL_UNSIGNED_INT, indices, baseID, fillMode, |
| 205 | writeMask, gXformType2GLType[transformsType], |
| 206 | transforms)); |
| 207 | } |
| 208 | if (stroke.needToApply()) { |
| 209 | GL_CALL(StencilStrokePathInstanced( |
| 210 | count, GR_GL_UNSIGNED_INT, indices, baseID, 0xffff, |
| 211 | writeMask, gXformType2GLType[transformsType], |
| 212 | transforms)); |
| 213 | } |
| 214 | |
| 215 | GrDrawState* drawState = fGpu->drawState(); |
| 216 | GrDrawState::AutoViewMatrixRestore avmr; |
| 217 | SkRect bounds = SkRect::MakeLTRB(0, 0, |
| 218 | SkIntToScalar(drawState->getRenderTarget()->width()), |
| 219 | SkIntToScalar(drawState->getRenderTarget()->height())); |
| 220 | SkMatrix vmi; |
| 221 | // mapRect through persp matrix may not be correct |
| 222 | if (!drawState->getViewMatrix().hasPerspective() && drawState->getViewInverse(&vmi)) { |
| 223 | vmi.mapRect(&bounds); |
| 224 | // theoretically could set bloat = 0, instead leave it because of matrix inversion |
| 225 | // precision. |
| 226 | SkScalar bloat = drawState->getViewMatrix().getMaxScale() * SK_ScalarHalf; |
| 227 | bounds.outset(bloat, bloat); |
| 228 | } else { |
| 229 | avmr.setIdentity(drawState); |
| 230 | } |
| 231 | |
| 232 | fGpu->drawSimpleRect(bounds); |
| 233 | } |
| 234 | } |
| 235 | |
kkinnunen | ccdaa04 | 2014-08-20 01:36:23 -0700 | [diff] [blame] | 236 | void GrGLPathRendering::enablePathTexGen(int unitIdx, PathTexGenComponents components, |
| 237 | const GrGLfloat* coefficients) { |
| 238 | SkASSERT(components >= kS_PathTexGenComponents && |
| 239 | components <= kSTR_PathTexGenComponents); |
| 240 | SkASSERT(fGpu->glCaps().maxFixedFunctionTextureCoords() >= unitIdx); |
| 241 | |
| 242 | if (GR_GL_OBJECT_LINEAR == fHWPathTexGenSettings[unitIdx].fMode && |
| 243 | components == fHWPathTexGenSettings[unitIdx].fNumComponents && |
| 244 | !memcmp(coefficients, fHWPathTexGenSettings[unitIdx].fCoefficients, |
| 245 | 3 * components * sizeof(GrGLfloat))) { |
| 246 | return; |
| 247 | } |
| 248 | |
| 249 | fGpu->setTextureUnit(unitIdx); |
| 250 | |
| 251 | fHWPathTexGenSettings[unitIdx].fNumComponents = components; |
kkinnunen | 5b65357 | 2014-08-20 04:13:27 -0700 | [diff] [blame] | 252 | GL_CALL(PathTexGen(GR_GL_TEXTURE0 + unitIdx, GR_GL_OBJECT_LINEAR, components, coefficients)); |
kkinnunen | ccdaa04 | 2014-08-20 01:36:23 -0700 | [diff] [blame] | 253 | |
| 254 | memcpy(fHWPathTexGenSettings[unitIdx].fCoefficients, coefficients, |
| 255 | 3 * components * sizeof(GrGLfloat)); |
| 256 | } |
| 257 | |
| 258 | void GrGLPathRendering::enablePathTexGen(int unitIdx, PathTexGenComponents components, |
| 259 | const SkMatrix& matrix) { |
| 260 | GrGLfloat coefficients[3 * 3]; |
| 261 | SkASSERT(components >= kS_PathTexGenComponents && |
| 262 | components <= kSTR_PathTexGenComponents); |
| 263 | |
| 264 | coefficients[0] = SkScalarToFloat(matrix[SkMatrix::kMScaleX]); |
| 265 | coefficients[1] = SkScalarToFloat(matrix[SkMatrix::kMSkewX]); |
| 266 | coefficients[2] = SkScalarToFloat(matrix[SkMatrix::kMTransX]); |
| 267 | |
| 268 | if (components >= kST_PathTexGenComponents) { |
| 269 | coefficients[3] = SkScalarToFloat(matrix[SkMatrix::kMSkewY]); |
| 270 | coefficients[4] = SkScalarToFloat(matrix[SkMatrix::kMScaleY]); |
| 271 | coefficients[5] = SkScalarToFloat(matrix[SkMatrix::kMTransY]); |
| 272 | } |
| 273 | |
| 274 | if (components >= kSTR_PathTexGenComponents) { |
| 275 | coefficients[6] = SkScalarToFloat(matrix[SkMatrix::kMPersp0]); |
| 276 | coefficients[7] = SkScalarToFloat(matrix[SkMatrix::kMPersp1]); |
| 277 | coefficients[8] = SkScalarToFloat(matrix[SkMatrix::kMPersp2]); |
| 278 | } |
| 279 | |
| 280 | this->enablePathTexGen(unitIdx, components, coefficients); |
| 281 | } |
| 282 | |
| 283 | void GrGLPathRendering::flushPathTexGenSettings(int numUsedTexCoordSets) { |
| 284 | SkASSERT(fGpu->glCaps().maxFixedFunctionTextureCoords() >= numUsedTexCoordSets); |
| 285 | |
| 286 | // Only write the inactive path tex gens, since active path tex gens were |
| 287 | // written when they were enabled. |
| 288 | |
| 289 | SkDEBUGCODE( |
| 290 | for (int i = 0; i < numUsedTexCoordSets; i++) { |
| 291 | SkASSERT(0 != fHWPathTexGenSettings[i].fNumComponents); |
| 292 | } |
| 293 | ); |
| 294 | |
| 295 | for (int i = numUsedTexCoordSets; i < fHWActivePathTexGenSets; i++) { |
| 296 | SkASSERT(0 != fHWPathTexGenSettings[i].fNumComponents); |
| 297 | |
| 298 | fGpu->setTextureUnit(i); |
| 299 | GL_CALL(PathTexGen(GR_GL_TEXTURE0 + i, GR_GL_NONE, 0, NULL)); |
| 300 | fHWPathTexGenSettings[i].fNumComponents = 0; |
| 301 | } |
| 302 | |
| 303 | fHWActivePathTexGenSets = numUsedTexCoordSets; |
| 304 | } |
| 305 | |
kkinnunen | 5b65357 | 2014-08-20 04:13:27 -0700 | [diff] [blame] | 306 | void GrGLPathRendering::setProgramPathFragmentInputTransform(GrGLuint program, GrGLint location, |
| 307 | GrGLenum genMode, GrGLint components, |
| 308 | const SkMatrix& matrix) { |
| 309 | SkASSERT(caps().fragmentInputGenSupport); |
| 310 | GrGLfloat coefficients[3 * 3]; |
| 311 | SkASSERT(components >= 1 && components <= 3); |
kkinnunen | ccdaa04 | 2014-08-20 01:36:23 -0700 | [diff] [blame] | 312 | |
kkinnunen | 5b65357 | 2014-08-20 04:13:27 -0700 | [diff] [blame] | 313 | coefficients[0] = SkScalarToFloat(matrix[SkMatrix::kMScaleX]); |
| 314 | coefficients[1] = SkScalarToFloat(matrix[SkMatrix::kMSkewX]); |
| 315 | coefficients[2] = SkScalarToFloat(matrix[SkMatrix::kMTransX]); |
kkinnunen | ccdaa04 | 2014-08-20 01:36:23 -0700 | [diff] [blame] | 316 | |
kkinnunen | 5b65357 | 2014-08-20 04:13:27 -0700 | [diff] [blame] | 317 | if (components >= 2) { |
| 318 | coefficients[3] = SkScalarToFloat(matrix[SkMatrix::kMSkewY]); |
| 319 | coefficients[4] = SkScalarToFloat(matrix[SkMatrix::kMScaleY]); |
| 320 | coefficients[5] = SkScalarToFloat(matrix[SkMatrix::kMTransY]); |
kkinnunen | ccdaa04 | 2014-08-20 01:36:23 -0700 | [diff] [blame] | 321 | } |
kkinnunen | ccdaa04 | 2014-08-20 01:36:23 -0700 | [diff] [blame] | 322 | |
kkinnunen | 5b65357 | 2014-08-20 04:13:27 -0700 | [diff] [blame] | 323 | if (components >= 3) { |
| 324 | coefficients[6] = SkScalarToFloat(matrix[SkMatrix::kMPersp0]); |
| 325 | coefficients[7] = SkScalarToFloat(matrix[SkMatrix::kMPersp1]); |
| 326 | coefficients[8] = SkScalarToFloat(matrix[SkMatrix::kMPersp2]); |
kkinnunen | ccdaa04 | 2014-08-20 01:36:23 -0700 | [diff] [blame] | 327 | } |
kkinnunen | ccdaa04 | 2014-08-20 01:36:23 -0700 | [diff] [blame] | 328 | |
kkinnunen | 5b65357 | 2014-08-20 04:13:27 -0700 | [diff] [blame] | 329 | GL_CALL(ProgramPathFragmentInputGen(program, location, genMode, components, coefficients)); |
kkinnunen | ccdaa04 | 2014-08-20 01:36:23 -0700 | [diff] [blame] | 330 | } |
| 331 | |
| 332 | void GrGLPathRendering::setProjectionMatrix(const SkMatrix& matrix, |
| 333 | const SkISize& renderTargetSize, |
| 334 | GrSurfaceOrigin renderTargetOrigin) { |
| 335 | |
| 336 | SkASSERT(fGpu->glCaps().pathRenderingSupport()); |
| 337 | |
| 338 | if (renderTargetOrigin == fHWProjectionMatrixState.fRenderTargetOrigin && |
| 339 | renderTargetSize == fHWProjectionMatrixState.fRenderTargetSize && |
| 340 | matrix.cheapEqualTo(fHWProjectionMatrixState.fViewMatrix)) { |
| 341 | return; |
| 342 | } |
| 343 | |
| 344 | fHWProjectionMatrixState.fViewMatrix = matrix; |
| 345 | fHWProjectionMatrixState.fRenderTargetSize = renderTargetSize; |
| 346 | fHWProjectionMatrixState.fRenderTargetOrigin = renderTargetOrigin; |
| 347 | |
| 348 | GrGLfloat glMatrix[4 * 4]; |
| 349 | fHWProjectionMatrixState.getRTAdjustedGLMatrix<4>(glMatrix); |
| 350 | GL_CALL(MatrixLoadf(GR_GL_PROJECTION, glMatrix)); |
| 351 | } |
| 352 | |
cdalton | c7103a1 | 2014-08-11 14:05:05 -0700 | [diff] [blame] | 353 | GrGLuint GrGLPathRendering::genPaths(GrGLsizei range) { |
| 354 | if (range > 1) { |
| 355 | GrGLuint name; |
| 356 | GL_CALL_RET(name, GenPaths(range)); |
| 357 | return name; |
| 358 | } |
| 359 | |
| 360 | if (NULL == fPathNameAllocator.get()) { |
| 361 | static const int range = 65536; |
| 362 | GrGLuint firstName; |
| 363 | GL_CALL_RET(firstName, GenPaths(range)); |
| 364 | fPathNameAllocator.reset(SkNEW_ARGS(GrGLNameAllocator, (firstName, firstName + range))); |
| 365 | } |
| 366 | |
| 367 | // When allocating names one at a time, pull from a client-side pool of |
| 368 | // available names in order to save a round trip to the GL server. |
| 369 | GrGLuint name = fPathNameAllocator->allocateName(); |
| 370 | |
| 371 | if (0 == name) { |
| 372 | // Our reserved path names are all in use. Fall back on GenPaths. |
| 373 | GL_CALL_RET(name, GenPaths(1)); |
| 374 | } |
| 375 | |
| 376 | return name; |
| 377 | } |
| 378 | |
kkinnunen | 5b65357 | 2014-08-20 04:13:27 -0700 | [diff] [blame] | 379 | void GrGLPathRendering::deletePaths(GrGLuint path, GrGLsizei range) { |
cdalton | c7103a1 | 2014-08-11 14:05:05 -0700 | [diff] [blame] | 380 | if (range > 1) { |
| 381 | // It is not supported to delete names in ranges that were allocated |
| 382 | // individually using GrGLPathNameAllocator. |
| 383 | SkASSERT(NULL == fPathNameAllocator.get() || |
| 384 | path + range <= fPathNameAllocator->firstName() || |
| 385 | path >= fPathNameAllocator->endName()); |
| 386 | GL_CALL(DeletePaths(path, range)); |
| 387 | return; |
| 388 | } |
| 389 | |
| 390 | if (NULL == fPathNameAllocator.get() || |
| 391 | path < fPathNameAllocator->firstName() || |
| 392 | path >= fPathNameAllocator->endName()) { |
| 393 | // If we aren't inside fPathNameAllocator's range then this name was |
| 394 | // generated by the GenPaths fallback (or else was never allocated). |
| 395 | GL_CALL(DeletePaths(path, 1)); |
| 396 | return; |
| 397 | } |
| 398 | |
| 399 | // Make the path empty to save memory, but don't free the name in the driver. |
| 400 | GL_CALL(PathCommands(path, 0, NULL, 0, GR_GL_FLOAT, NULL)); |
| 401 | fPathNameAllocator->free(path); |
| 402 | } |
| 403 | |
kkinnunen | 5b65357 | 2014-08-20 04:13:27 -0700 | [diff] [blame] | 404 | void GrGLPathRendering::flushPathStencilSettings(SkPath::FillType fill) { |
| 405 | GrStencilSettings pathStencilSettings; |
| 406 | fGpu->getPathStencilSettingsForFillType(fill, &pathStencilSettings); |
| 407 | if (fHWPathStencilSettings != pathStencilSettings) { |
| 408 | // Just the func, ref, and mask is set here. The op and write mask are params to the call |
| 409 | // that draws the path to the SB (glStencilFillPath) |
| 410 | GrGLenum func = |
| 411 | GrToGLStencilFunc(pathStencilSettings.func(GrStencilSettings::kFront_Face)); |
| 412 | GL_CALL(PathStencilFunc(func, pathStencilSettings.funcRef(GrStencilSettings::kFront_Face), |
| 413 | pathStencilSettings.funcMask(GrStencilSettings::kFront_Face))); |
| 414 | |
| 415 | fHWPathStencilSettings = pathStencilSettings; |
| 416 | } |
cdalton | c7103a1 | 2014-08-11 14:05:05 -0700 | [diff] [blame] | 417 | } |
| 418 | |
kkinnunen | 5b65357 | 2014-08-20 04:13:27 -0700 | [diff] [blame] | 419 | inline void GrGLPathRendering::stencilThenCoverFillPath(GrGLuint path, GrGLenum fillMode, |
cdalton | c7103a1 | 2014-08-11 14:05:05 -0700 | [diff] [blame] | 420 | GrGLuint mask, GrGLenum coverMode) { |
kkinnunen | 5b65357 | 2014-08-20 04:13:27 -0700 | [diff] [blame] | 421 | if (caps().stencilThenCoverSupport) { |
| 422 | GL_CALL(StencilThenCoverFillPath(path, fillMode, mask, coverMode)); |
| 423 | return; |
| 424 | } |
cdalton | c7103a1 | 2014-08-11 14:05:05 -0700 | [diff] [blame] | 425 | GL_CALL(StencilFillPath(path, fillMode, mask)); |
| 426 | GL_CALL(CoverFillPath(path, coverMode)); |
| 427 | } |
| 428 | |
kkinnunen | 5b65357 | 2014-08-20 04:13:27 -0700 | [diff] [blame] | 429 | inline void GrGLPathRendering::stencilThenCoverStrokePath(GrGLuint path, GrGLint reference, |
cdalton | c7103a1 | 2014-08-11 14:05:05 -0700 | [diff] [blame] | 430 | GrGLuint mask, GrGLenum coverMode) { |
kkinnunen | 5b65357 | 2014-08-20 04:13:27 -0700 | [diff] [blame] | 431 | if (caps().stencilThenCoverSupport) { |
| 432 | GL_CALL(StencilThenCoverStrokePath(path, reference, mask, coverMode)); |
| 433 | return; |
| 434 | } |
cdalton | c7103a1 | 2014-08-11 14:05:05 -0700 | [diff] [blame] | 435 | GL_CALL(StencilStrokePath(path, reference, mask)); |
| 436 | GL_CALL(CoverStrokePath(path, coverMode)); |
| 437 | } |
| 438 | |
kkinnunen | 5b65357 | 2014-08-20 04:13:27 -0700 | [diff] [blame] | 439 | inline void GrGLPathRendering::stencilThenCoverFillPathInstanced( |
cdalton | c7103a1 | 2014-08-11 14:05:05 -0700 | [diff] [blame] | 440 | GrGLsizei numPaths, GrGLenum pathNameType, const GrGLvoid *paths, |
| 441 | GrGLuint pathBase, GrGLenum fillMode, GrGLuint mask, GrGLenum coverMode, |
| 442 | GrGLenum transformType, const GrGLfloat *transformValues) { |
kkinnunen | 5b65357 | 2014-08-20 04:13:27 -0700 | [diff] [blame] | 443 | if (caps().stencilThenCoverSupport) { |
| 444 | GL_CALL(StencilThenCoverFillPathInstanced(numPaths, pathNameType, paths, pathBase, fillMode, |
| 445 | mask, coverMode, transformType, transformValues)); |
| 446 | return; |
| 447 | } |
cdalton | c7103a1 | 2014-08-11 14:05:05 -0700 | [diff] [blame] | 448 | GL_CALL(StencilFillPathInstanced(numPaths, pathNameType, paths, pathBase, |
| 449 | fillMode, mask, transformType, transformValues)); |
| 450 | GL_CALL(CoverFillPathInstanced(numPaths, pathNameType, paths, pathBase, |
| 451 | coverMode, transformType, transformValues)); |
| 452 | } |
| 453 | |
kkinnunen | 5b65357 | 2014-08-20 04:13:27 -0700 | [diff] [blame] | 454 | inline void GrGLPathRendering::stencilThenCoverStrokePathInstanced( |
| 455 | GrGLsizei numPaths, GrGLenum pathNameType, const GrGLvoid *paths, |
| 456 | GrGLuint pathBase, GrGLint reference, GrGLuint mask, GrGLenum coverMode, |
| 457 | GrGLenum transformType, const GrGLfloat *transformValues) { |
| 458 | if (caps().stencilThenCoverSupport) { |
| 459 | GL_CALL(StencilThenCoverStrokePathInstanced(numPaths, pathNameType, paths, pathBase, |
| 460 | reference, mask, coverMode, transformType, |
| 461 | transformValues)); |
| 462 | return; |
| 463 | } |
| 464 | |
cdalton | c7103a1 | 2014-08-11 14:05:05 -0700 | [diff] [blame] | 465 | GL_CALL(StencilStrokePathInstanced(numPaths, pathNameType, paths, pathBase, |
| 466 | reference, mask, transformType, transformValues)); |
| 467 | GL_CALL(CoverStrokePathInstanced(numPaths, pathNameType, paths, pathBase, |
| 468 | coverMode, transformType, transformValues)); |
| 469 | } |