blob: e1fc75dfa73f9485a129b5e3aa0c466b9c4e3ab0 [file] [log] [blame]
Mike Kleinc6142d82019-03-25 10:54:59 -05001/*
2 * Copyright 2014 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Mike Kleinc0bd9f92019-04-23 12:05:21 -05008#include "include/core/SkExecutor.h"
9#include "include/gpu/GrContextOptions.h"
10#include "tools/flags/CommonFlags.h"
Mike Kleinc6142d82019-03-25 10:54:59 -050011
12DEFINE_int(gpuThreads,
13 2,
14 "Create this many extra threads to assist with GPU work, "
15 "including software path rendering. Defaults to two.");
16
17static DEFINE_bool(cachePathMasks, true,
18 "Allows path mask textures to be cached in GPU configs.");
Chris Daltond1e67162020-09-23 11:55:27 -060019static DEFINE_bool(allPathsVolatile, false,
20 "Causes all GPU paths to be processed as if 'setIsVolatile' had been called.");
Mike Kleinc6142d82019-03-25 10:54:59 -050021
Chris Daltoned6e8272020-04-23 11:44:26 -060022static DEFINE_bool(gs, true, "Enables support for geometry shaders (if hw allows).");
Chris Dalton15f51842020-12-15 19:55:10 -070023static DEFINE_bool(hwtess, false, "Enables support for tessellation shaders (if hw allows.).");
Mike Kleinc6142d82019-03-25 10:54:59 -050024
Chris Dalton31634282020-09-17 12:16:54 -060025static DEFINE_int(maxTessellationSegments, 0,
26 "Overrides the max number of tessellation segments supported by the caps.");
27
Mike Kleinc6142d82019-03-25 10:54:59 -050028static DEFINE_string(pr, "",
29 "Set of enabled gpu path renderers. Defined as a list of: "
Robert Phillips84fd1c22021-03-23 13:18:36 -040030 "[~]none [~]dashline [~]ccpr [~]aahairline [~]aaconvex [~]aalinearizing "
Chris Daltoneae5c162020-12-29 10:18:21 -070031 "[~]small [~]tri [~]tess [~]all");
Mike Kleinc6142d82019-03-25 10:54:59 -050032
Chris Dalton57ab06c2021-04-22 12:57:28 -060033static DEFINE_int(internalSamples, 4, "Number of samples for internal draws that use MSAA.");
Chris Dalton52586232019-12-27 13:47:25 -070034
Mike Kleinc6142d82019-03-25 10:54:59 -050035static DEFINE_bool(disableDriverCorrectnessWorkarounds, false,
36 "Disables all GPU driver correctness workarounds");
37
Adlai Hollerd37a0852021-04-22 11:08:12 -040038static DEFINE_bool(dontReduceOpsTaskSplitting, false,
39 "Don't reorder tasks to reduce render passes");
Mike Kleinc6142d82019-03-25 10:54:59 -050040
Adlai Holler8e2a8b22021-04-14 11:52:25 -040041static DEFINE_int(gpuResourceCacheLimit, -1,
42 "Maximum number of bytes to use for budgeted GPU resources. "
43 "Default is -1, which means GrResourceCache::kDefaultMaxSize.");
44
Mike Kleinc6142d82019-03-25 10:54:59 -050045static GpuPathRenderers get_named_pathrenderers_flags(const char* name) {
46 if (!strcmp(name, "none")) {
47 return GpuPathRenderers::kNone;
48 } else if (!strcmp(name, "dashline")) {
49 return GpuPathRenderers::kDashLine;
Mike Kleinc6142d82019-03-25 10:54:59 -050050 } else if (!strcmp(name, "ccpr")) {
51 return GpuPathRenderers::kCoverageCounting;
52 } else if (!strcmp(name, "aahairline")) {
53 return GpuPathRenderers::kAAHairline;
54 } else if (!strcmp(name, "aaconvex")) {
55 return GpuPathRenderers::kAAConvex;
56 } else if (!strcmp(name, "aalinearizing")) {
57 return GpuPathRenderers::kAALinearizing;
58 } else if (!strcmp(name, "small")) {
59 return GpuPathRenderers::kSmall;
Chris Dalton17dc4182020-03-25 16:18:16 -060060 } else if (!strcmp(name, "tri")) {
61 return GpuPathRenderers::kTriangulating;
Chris Daltoneae5c162020-12-29 10:18:21 -070062 } else if (!strcmp(name, "tess")) {
63 return GpuPathRenderers::kTessellation;
Chris Dalton37ae4b02019-12-28 14:51:11 -070064 } else if (!strcmp(name, "default")) {
65 return GpuPathRenderers::kDefault;
Mike Kleinc6142d82019-03-25 10:54:59 -050066 }
John Stiles616da102020-06-12 14:07:41 -040067 SK_ABORT("error: unknown named path renderer \"%s\"\n", name);
Mike Kleinc6142d82019-03-25 10:54:59 -050068}
69
70static GpuPathRenderers collect_gpu_path_renderers_from_flags() {
71 if (FLAGS_pr.isEmpty()) {
Chris Dalton37ae4b02019-12-28 14:51:11 -070072 return GpuPathRenderers::kDefault;
Mike Kleinc6142d82019-03-25 10:54:59 -050073 }
Chris Daltona8fbeba2019-03-30 00:31:23 -060074
Mike Kleinc6142d82019-03-25 10:54:59 -050075 GpuPathRenderers gpuPathRenderers = ('~' == FLAGS_pr[0][0])
Chris Dalton37ae4b02019-12-28 14:51:11 -070076 ? GpuPathRenderers::kDefault
Chris Daltona8fbeba2019-03-30 00:31:23 -060077 : GpuPathRenderers::kNone;
Mike Kleinc6142d82019-03-25 10:54:59 -050078
79 for (int i = 0; i < FLAGS_pr.count(); ++i) {
80 const char* name = FLAGS_pr[i];
81 if (name[0] == '~') {
82 gpuPathRenderers &= ~get_named_pathrenderers_flags(&name[1]);
83 } else {
84 gpuPathRenderers |= get_named_pathrenderers_flags(name);
85 }
86 }
87 return gpuPathRenderers;
88}
89
90void SetCtxOptionsFromCommonFlags(GrContextOptions* ctxOptions) {
91 static std::unique_ptr<SkExecutor> gGpuExecutor = (0 != FLAGS_gpuThreads)
92 ? SkExecutor::MakeFIFOThreadPool(FLAGS_gpuThreads)
93 : nullptr;
94
95 ctxOptions->fExecutor = gGpuExecutor.get();
Mike Kleinc6142d82019-03-25 10:54:59 -050096 ctxOptions->fAllowPathMaskCaching = FLAGS_cachePathMasks;
Chris Daltond1e67162020-09-23 11:55:27 -060097 ctxOptions->fAllPathsVolatile = FLAGS_allPathsVolatile;
Chris Daltoned6e8272020-04-23 11:44:26 -060098 ctxOptions->fSuppressGeometryShaders = !FLAGS_gs;
Chris Dalton4ac9aad2021-02-24 17:41:44 -070099 ctxOptions->fEnableExperimentalHardwareTessellation = FLAGS_hwtess;
Chris Dalton31634282020-09-17 12:16:54 -0600100 ctxOptions->fMaxTessellationSegmentsOverride = FLAGS_maxTessellationSegments;
Mike Kleinc6142d82019-03-25 10:54:59 -0500101 ctxOptions->fGpuPathRenderers = collect_gpu_path_renderers_from_flags();
Chris Dalton52586232019-12-27 13:47:25 -0700102 ctxOptions->fInternalMultisampleCount = FLAGS_internalSamples;
Mike Kleinc6142d82019-03-25 10:54:59 -0500103 ctxOptions->fDisableDriverCorrectnessWorkarounds = FLAGS_disableDriverCorrectnessWorkarounds;
Adlai Holler8e2a8b22021-04-14 11:52:25 -0400104 ctxOptions->fResourceCacheLimitOverride = FLAGS_gpuResourceCacheLimit;
Mike Kleinc6142d82019-03-25 10:54:59 -0500105
Adlai Hollercbe46342021-05-21 17:08:56 +0000106 if (FLAGS_dontReduceOpsTaskSplitting) {
107 ctxOptions->fReduceOpsTaskSplitting = GrContextOptions::Enable::kNo;
108 } else {
109 ctxOptions->fReduceOpsTaskSplitting = GrContextOptions::Enable::kYes;
110 }
Mike Kleinc6142d82019-03-25 10:54:59 -0500111}