blob: 0a53beb889b9f7ae1fe198d6b540780875e11e62 [file] [log] [blame]
csmartdalton008b9d82017-02-22 12:00:42 -07001/*
2 * Copyright 2017 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#ifndef SK_COMMON_FLAGS_PATH_RENDERER_H
9#define SK_COMMON_FLAGS_PATH_RENDERER_H
10
11#if SK_SUPPORT_GPU
12
csmartdaltone2d78ec2017-02-22 14:29:48 -070013#include "GrContextOptions.h"
csmartdalton008b9d82017-02-22 12:00:42 -070014#include "SkCommandLineFlags.h"
15#include "SkTypes.h"
16
17DECLARE_string(pr);
18
Brian Salomon42c456f2017-03-06 11:29:48 -050019#define DEFINE_pathrenderer_flag \
20 DEFINE_string(pr, "all", \
21 "Set of enabled gpu path renderers. Defined as a list of: " \
22 "[[~]all [~]dashline [~]nvpr [~]msaa [~]aahairline [~]aaconvex " \
Jim Van Verth83010462017-03-16 08:45:39 -040023 "[~]aalinearizing [~]small [~]tess [~]grdefault]")
csmartdalton008b9d82017-02-22 12:00:42 -070024
25inline GrContextOptions::GpuPathRenderers get_named_pathrenderers_flags(const char* name) {
26 using GpuPathRenderers = GrContextOptions::GpuPathRenderers;
27 if (!strcmp(name, "all")) {
28 return GpuPathRenderers::kAll;
29 } else if (!strcmp(name, "dashline")) {
30 return GpuPathRenderers::kDashLine;
31 } else if (!strcmp(name, "nvpr")) {
32 return GpuPathRenderers::kStencilAndCover;
33 } else if (!strcmp(name, "msaa")) {
34 return GpuPathRenderers::kMSAA;
35 } else if (!strcmp(name, "aahairline")) {
36 return GpuPathRenderers::kAAHairline;
37 } else if (!strcmp(name, "aaconvex")) {
38 return GpuPathRenderers::kAAConvex;
39 } else if (!strcmp(name, "aalinearizing")) {
40 return GpuPathRenderers::kAALinearizing;
Jim Van Verth83010462017-03-16 08:45:39 -040041 } else if (!strcmp(name, "small")) {
42 return GpuPathRenderers::kSmall;
Chris Dalton1a325d22017-07-14 15:17:41 -060043 } else if (!strcmp(name, "ccpr")) {
44 return GpuPathRenderers::kCoverageCounting;
csmartdalton008b9d82017-02-22 12:00:42 -070045 } else if (!strcmp(name, "tess")) {
Brian Osman8a9de3d2017-03-01 14:59:05 -050046 return GpuPathRenderers::kTessellating;
csmartdalton008b9d82017-02-22 12:00:42 -070047 } else if (!strcmp(name, "grdefault")) {
48 return GpuPathRenderers::kDefault;
Brian Osman5929f5c2017-08-22 09:47:50 -040049 } else if (!strcmp(name, "none")) {
50 return GpuPathRenderers::kNone;
csmartdalton008b9d82017-02-22 12:00:42 -070051 }
52 SK_ABORT(SkStringPrintf("error: unknown named path renderer \"%s\"\n", name).c_str());
53 return GpuPathRenderers::kNone;
54}
55
56inline GrContextOptions::GpuPathRenderers CollectGpuPathRenderersFromFlags() {
57 using GpuPathRenderers = GrContextOptions::GpuPathRenderers;
58 if (FLAGS_pr.isEmpty()) {
59 return GpuPathRenderers::kAll;
60 }
61 GpuPathRenderers gpuPathRenderers = '~' == FLAGS_pr[0][0] ?
62 GpuPathRenderers::kAll : GpuPathRenderers::kNone;
63 for (int i = 0; i < FLAGS_pr.count(); ++i) {
64 const char* name = FLAGS_pr[i];
65 if (name[0] == '~') {
66 gpuPathRenderers &= ~get_named_pathrenderers_flags(&name[1]);
67 } else {
68 gpuPathRenderers |= get_named_pathrenderers_flags(name);
69 }
70 }
71 return gpuPathRenderers;
72}
73
74#endif // SK_SUPPORT_GPU
75
76#endif