blob: ac293a1e79b508f3369d42098ad28f5810bca9a8 [file] [log] [blame]
csmartdalton008b9d82017-02-22 12:00:42 -07001/*
2 * Copyright 2017 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#ifndef SK_COMMON_FLAGS_PATH_RENDERER_H
9#define SK_COMMON_FLAGS_PATH_RENDERER_H
10
11#if SK_SUPPORT_GPU
12
csmartdaltone2d78ec2017-02-22 14:29:48 -070013#include "GrContextOptions.h"
csmartdalton008b9d82017-02-22 12:00:42 -070014#include "SkCommandLineFlags.h"
15#include "SkTypes.h"
16
17DECLARE_string(pr);
18
Brian Salomon42c456f2017-03-06 11:29:48 -050019#define DEFINE_pathrenderer_flag \
20 DEFINE_string(pr, "all", \
21 "Set of enabled gpu path renderers. Defined as a list of: " \
22 "[[~]all [~]dashline [~]nvpr [~]msaa [~]aahairline [~]aaconvex " \
Jim Van Verth83010462017-03-16 08:45:39 -040023 "[~]aalinearizing [~]small [~]tess [~]grdefault]")
csmartdalton008b9d82017-02-22 12:00:42 -070024
25inline GrContextOptions::GpuPathRenderers get_named_pathrenderers_flags(const char* name) {
26 using GpuPathRenderers = GrContextOptions::GpuPathRenderers;
27 if (!strcmp(name, "all")) {
28 return GpuPathRenderers::kAll;
29 } else if (!strcmp(name, "dashline")) {
30 return GpuPathRenderers::kDashLine;
31 } else if (!strcmp(name, "nvpr")) {
32 return GpuPathRenderers::kStencilAndCover;
33 } else if (!strcmp(name, "msaa")) {
34 return GpuPathRenderers::kMSAA;
35 } else if (!strcmp(name, "aahairline")) {
36 return GpuPathRenderers::kAAHairline;
37 } else if (!strcmp(name, "aaconvex")) {
38 return GpuPathRenderers::kAAConvex;
39 } else if (!strcmp(name, "aalinearizing")) {
40 return GpuPathRenderers::kAALinearizing;
Jim Van Verth83010462017-03-16 08:45:39 -040041 } else if (!strcmp(name, "small")) {
42 return GpuPathRenderers::kSmall;
Chris Dalton1a325d22017-07-14 15:17:41 -060043 } else if (!strcmp(name, "ccpr")) {
44 return GpuPathRenderers::kCoverageCounting;
csmartdalton008b9d82017-02-22 12:00:42 -070045 } else if (!strcmp(name, "tess")) {
Brian Osman8a9de3d2017-03-01 14:59:05 -050046 return GpuPathRenderers::kTessellating;
csmartdalton008b9d82017-02-22 12:00:42 -070047 } else if (!strcmp(name, "grdefault")) {
48 return GpuPathRenderers::kDefault;
49 }
50 SK_ABORT(SkStringPrintf("error: unknown named path renderer \"%s\"\n", name).c_str());
51 return GpuPathRenderers::kNone;
52}
53
54inline GrContextOptions::GpuPathRenderers CollectGpuPathRenderersFromFlags() {
55 using GpuPathRenderers = GrContextOptions::GpuPathRenderers;
56 if (FLAGS_pr.isEmpty()) {
57 return GpuPathRenderers::kAll;
58 }
59 GpuPathRenderers gpuPathRenderers = '~' == FLAGS_pr[0][0] ?
60 GpuPathRenderers::kAll : GpuPathRenderers::kNone;
61 for (int i = 0; i < FLAGS_pr.count(); ++i) {
62 const char* name = FLAGS_pr[i];
63 if (name[0] == '~') {
64 gpuPathRenderers &= ~get_named_pathrenderers_flags(&name[1]);
65 } else {
66 gpuPathRenderers |= get_named_pathrenderers_flags(name);
67 }
68 }
69 return gpuPathRenderers;
70}
71
72#endif // SK_SUPPORT_GPU
73
74#endif