blob: cb8a439ccc6a1355a90d1bfc9008ccf1e9cc5acd [file] [log] [blame]
csmartdalton008b9d82017-02-22 12:00:42 -07001/*
2 * Copyright 2017 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Chris Dalton040238b2017-12-18 14:22:34 -07008#ifndef SK_COMMON_FLAGS_GPU_H
9#define SK_COMMON_FLAGS_GPU_H
csmartdalton008b9d82017-02-22 12:00:42 -070010
Brian Osman195c05b2017-08-30 15:14:04 -040011#include "GrTypesPriv.h"
csmartdalton008b9d82017-02-22 12:00:42 -070012#include "SkCommandLineFlags.h"
13#include "SkTypes.h"
14
Chris Dalton040238b2017-12-18 14:22:34 -070015DECLARE_int32(gpuThreads);
16DECLARE_bool(cachePathMasks);
17DECLARE_bool(noGS);
csmartdalton008b9d82017-02-22 12:00:42 -070018DECLARE_string(pr);
19
Brian Osman195c05b2017-08-30 15:14:04 -040020inline GpuPathRenderers get_named_pathrenderers_flags(const char* name) {
Chris Dalton9acfc6c2018-07-26 12:34:49 -060021 if (!strcmp(name, "none")) {
22 return GpuPathRenderers::kNone;
csmartdalton008b9d82017-02-22 12:00:42 -070023 } else if (!strcmp(name, "dashline")) {
24 return GpuPathRenderers::kDashLine;
25 } else if (!strcmp(name, "nvpr")) {
26 return GpuPathRenderers::kStencilAndCover;
Chris Dalton9acfc6c2018-07-26 12:34:49 -060027 } else if (!strcmp(name, "ccpr")) {
28 return GpuPathRenderers::kCoverageCounting;
29 } else if (!strcmp(name, "aahairline")) {
30 return GpuPathRenderers::kAAHairline;
csmartdalton008b9d82017-02-22 12:00:42 -070031 } else if (!strcmp(name, "aaconvex")) {
32 return GpuPathRenderers::kAAConvex;
33 } else if (!strcmp(name, "aalinearizing")) {
34 return GpuPathRenderers::kAALinearizing;
Jim Van Verth83010462017-03-16 08:45:39 -040035 } else if (!strcmp(name, "small")) {
36 return GpuPathRenderers::kSmall;
csmartdalton008b9d82017-02-22 12:00:42 -070037 } else if (!strcmp(name, "tess")) {
Brian Osman8a9de3d2017-03-01 14:59:05 -050038 return GpuPathRenderers::kTessellating;
Chris Dalton9acfc6c2018-07-26 12:34:49 -060039 } else if (!strcmp(name, "all")) {
40 return GpuPathRenderers::kAll;
csmartdalton008b9d82017-02-22 12:00:42 -070041 }
42 SK_ABORT(SkStringPrintf("error: unknown named path renderer \"%s\"\n", name).c_str());
43 return GpuPathRenderers::kNone;
44}
45
Brian Osman195c05b2017-08-30 15:14:04 -040046inline GpuPathRenderers CollectGpuPathRenderersFromFlags() {
csmartdalton008b9d82017-02-22 12:00:42 -070047 if (FLAGS_pr.isEmpty()) {
Chris Dalton9acfc6c2018-07-26 12:34:49 -060048 return GpuPathRenderers::kAll;
csmartdalton008b9d82017-02-22 12:00:42 -070049 }
Chris Dalton9acfc6c2018-07-26 12:34:49 -060050 GpuPathRenderers gpuPathRenderers = '~' == FLAGS_pr[0][0]
51 ? GpuPathRenderers::kAll : GpuPathRenderers::kNone;
csmartdalton008b9d82017-02-22 12:00:42 -070052 for (int i = 0; i < FLAGS_pr.count(); ++i) {
53 const char* name = FLAGS_pr[i];
54 if (name[0] == '~') {
55 gpuPathRenderers &= ~get_named_pathrenderers_flags(&name[1]);
56 } else {
57 gpuPathRenderers |= get_named_pathrenderers_flags(name);
58 }
59 }
60 return gpuPathRenderers;
61}
62
Chris Dalton040238b2017-12-18 14:22:34 -070063/**
64 * Helper to set GrContextOptions from common GPU flags.
65 */
66void SetCtxOptionsFromCommonFlags(struct GrContextOptions*);
67
csmartdalton008b9d82017-02-22 12:00:42 -070068#endif