blob: b0537c570b745b1ea2c3134d56b73eb0408cafc0 [file] [log] [blame]
Mike Kleinc6142d82019-03-25 10:54:59 -05001/*
2 * Copyright 2014 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Mike Kleinc0bd9f92019-04-23 12:05:21 -05008#include "include/core/SkExecutor.h"
9#include "include/gpu/GrContextOptions.h"
10#include "tools/flags/CommonFlags.h"
Mike Kleinc6142d82019-03-25 10:54:59 -050011
12DEFINE_int(gpuThreads,
13 2,
14 "Create this many extra threads to assist with GPU work, "
15 "including software path rendering. Defaults to two.");
16
17static DEFINE_bool(cachePathMasks, true,
18 "Allows path mask textures to be cached in GPU configs.");
Chris Daltond1e67162020-09-23 11:55:27 -060019static DEFINE_bool(allPathsVolatile, false,
20 "Causes all GPU paths to be processed as if 'setIsVolatile' had been called.");
Mike Kleinc6142d82019-03-25 10:54:59 -050021
Chris Daltoned6e8272020-04-23 11:44:26 -060022static DEFINE_bool(gs, true, "Enables support for geometry shaders (if hw allows).");
23static DEFINE_bool(ts, true, "Enables support for tessellation shaders (if hw allows.).");
Mike Kleinc6142d82019-03-25 10:54:59 -050024
Chris Dalton31634282020-09-17 12:16:54 -060025static DEFINE_int(maxTessellationSegments, 0,
26 "Overrides the max number of tessellation segments supported by the caps.");
27
Chris Daltona8fbeba2019-03-30 00:31:23 -060028static DEFINE_bool(cc, false, "Allow coverage counting shortcuts to render paths?");
29
Mike Kleinc6142d82019-03-25 10:54:59 -050030static DEFINE_string(pr, "",
31 "Set of enabled gpu path renderers. Defined as a list of: "
Chris Dalton0a22b1e2020-03-26 11:52:15 -060032 "[~]none [~]dashline [~]tess [~]nvpr [~]ccpr [~]aahairline [~]aaconvex "
33 "[~]aalinearizing [~]small [~]tri] [~]all");
Mike Kleinc6142d82019-03-25 10:54:59 -050034
Chris Dalton52586232019-12-27 13:47:25 -070035static DEFINE_int(internalSamples, 4,
36 "Number of samples for internal draws that use MSAA or mixed samples.");
37
Mike Kleinc6142d82019-03-25 10:54:59 -050038static DEFINE_bool(disableDriverCorrectnessWorkarounds, false,
39 "Disables all GPU driver correctness workarounds");
40
Greg Danielf41b2bd2019-08-22 16:19:24 -040041static DEFINE_bool(reduceOpsTaskSplitting, false, "Improve opsTask sorting");
42static DEFINE_bool(dontReduceOpsTaskSplitting, false, "Allow more opsTask splitting");
Mike Kleinc6142d82019-03-25 10:54:59 -050043
Mike Kleinc6142d82019-03-25 10:54:59 -050044static GpuPathRenderers get_named_pathrenderers_flags(const char* name) {
45 if (!strcmp(name, "none")) {
46 return GpuPathRenderers::kNone;
47 } else if (!strcmp(name, "dashline")) {
48 return GpuPathRenderers::kDashLine;
Chris Dalton0a22b1e2020-03-26 11:52:15 -060049 } else if (!strcmp(name, "tess")) {
50 return GpuPathRenderers::kTessellation;
Mike Kleinc6142d82019-03-25 10:54:59 -050051 } else if (!strcmp(name, "nvpr")) {
52 return GpuPathRenderers::kStencilAndCover;
53 } else if (!strcmp(name, "ccpr")) {
54 return GpuPathRenderers::kCoverageCounting;
55 } else if (!strcmp(name, "aahairline")) {
56 return GpuPathRenderers::kAAHairline;
57 } else if (!strcmp(name, "aaconvex")) {
58 return GpuPathRenderers::kAAConvex;
59 } else if (!strcmp(name, "aalinearizing")) {
60 return GpuPathRenderers::kAALinearizing;
61 } else if (!strcmp(name, "small")) {
62 return GpuPathRenderers::kSmall;
Chris Dalton17dc4182020-03-25 16:18:16 -060063 } else if (!strcmp(name, "tri")) {
64 return GpuPathRenderers::kTriangulating;
Chris Dalton37ae4b02019-12-28 14:51:11 -070065 } else if (!strcmp(name, "default")) {
66 return GpuPathRenderers::kDefault;
Mike Kleinc6142d82019-03-25 10:54:59 -050067 }
John Stiles616da102020-06-12 14:07:41 -040068 SK_ABORT("error: unknown named path renderer \"%s\"\n", name);
Mike Kleinc6142d82019-03-25 10:54:59 -050069}
70
71static GpuPathRenderers collect_gpu_path_renderers_from_flags() {
72 if (FLAGS_pr.isEmpty()) {
Chris Dalton37ae4b02019-12-28 14:51:11 -070073 return GpuPathRenderers::kDefault;
Mike Kleinc6142d82019-03-25 10:54:59 -050074 }
Chris Daltona8fbeba2019-03-30 00:31:23 -060075
Mike Kleinc6142d82019-03-25 10:54:59 -050076 GpuPathRenderers gpuPathRenderers = ('~' == FLAGS_pr[0][0])
Chris Dalton37ae4b02019-12-28 14:51:11 -070077 ? GpuPathRenderers::kDefault
Chris Daltona8fbeba2019-03-30 00:31:23 -060078 : GpuPathRenderers::kNone;
Mike Kleinc6142d82019-03-25 10:54:59 -050079
80 for (int i = 0; i < FLAGS_pr.count(); ++i) {
81 const char* name = FLAGS_pr[i];
82 if (name[0] == '~') {
83 gpuPathRenderers &= ~get_named_pathrenderers_flags(&name[1]);
84 } else {
85 gpuPathRenderers |= get_named_pathrenderers_flags(name);
86 }
87 }
88 return gpuPathRenderers;
89}
90
91void SetCtxOptionsFromCommonFlags(GrContextOptions* ctxOptions) {
92 static std::unique_ptr<SkExecutor> gGpuExecutor = (0 != FLAGS_gpuThreads)
93 ? SkExecutor::MakeFIFOThreadPool(FLAGS_gpuThreads)
94 : nullptr;
95
96 ctxOptions->fExecutor = gGpuExecutor.get();
Chris Daltona8fbeba2019-03-30 00:31:23 -060097 ctxOptions->fDisableCoverageCountingPaths = !FLAGS_cc;
Mike Kleinc6142d82019-03-25 10:54:59 -050098 ctxOptions->fAllowPathMaskCaching = FLAGS_cachePathMasks;
Chris Daltond1e67162020-09-23 11:55:27 -060099 ctxOptions->fAllPathsVolatile = FLAGS_allPathsVolatile;
Chris Daltoned6e8272020-04-23 11:44:26 -0600100 ctxOptions->fSuppressGeometryShaders = !FLAGS_gs;
101 ctxOptions->fSuppressTessellationShaders = !FLAGS_ts;
Chris Dalton31634282020-09-17 12:16:54 -0600102 ctxOptions->fMaxTessellationSegmentsOverride = FLAGS_maxTessellationSegments;
Mike Kleinc6142d82019-03-25 10:54:59 -0500103 ctxOptions->fGpuPathRenderers = collect_gpu_path_renderers_from_flags();
Chris Dalton52586232019-12-27 13:47:25 -0700104 ctxOptions->fInternalMultisampleCount = FLAGS_internalSamples;
Mike Kleinc6142d82019-03-25 10:54:59 -0500105 ctxOptions->fDisableDriverCorrectnessWorkarounds = FLAGS_disableDriverCorrectnessWorkarounds;
106
Greg Danielf41b2bd2019-08-22 16:19:24 -0400107 if (FLAGS_reduceOpsTaskSplitting) {
108 SkASSERT(!FLAGS_dontReduceOpsTaskSplitting);
Greg Daniel93138742019-08-22 17:15:39 -0400109 ctxOptions->fReduceOpsTaskSplitting = GrContextOptions::Enable::kYes;
Greg Danielf41b2bd2019-08-22 16:19:24 -0400110 } else if (FLAGS_dontReduceOpsTaskSplitting) {
Greg Daniel93138742019-08-22 17:15:39 -0400111 ctxOptions->fReduceOpsTaskSplitting = GrContextOptions::Enable::kNo;
Mike Kleinc6142d82019-03-25 10:54:59 -0500112 }
113}