cdalton | 855d83f | 2014-09-18 13:51:53 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2014 Google Inc. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license that can be |
| 5 | * found in the LICENSE file. |
| 6 | */ |
| 7 | |
| 8 | #include "GrPathRange.h" |
| 9 | #include "SkPath.h" |
| 10 | |
cdalton | 855d83f | 2014-09-18 13:51:53 -0700 | [diff] [blame] | 11 | GrPathRange::GrPathRange(GrGpu* gpu, |
kkinnunen | 50b58e6 | 2015-05-18 23:02:07 -0700 | [diff] [blame] | 12 | PathGenerator* pathGenerator) |
kkinnunen | 2e6055b | 2016-04-22 01:48:29 -0700 | [diff] [blame] | 13 | : INHERITED(gpu), |
cdalton | 855d83f | 2014-09-18 13:51:53 -0700 | [diff] [blame] | 14 | fPathGenerator(SkRef(pathGenerator)), |
kkinnunen | 50b58e6 | 2015-05-18 23:02:07 -0700 | [diff] [blame] | 15 | fNumPaths(fPathGenerator->getNumPaths()) { |
cdalton | 855d83f | 2014-09-18 13:51:53 -0700 | [diff] [blame] | 16 | const int numGroups = (fNumPaths + kPathsPerGroup - 1) / kPathsPerGroup; |
| 17 | fGeneratedPaths.reset((numGroups + 7) / 8); // 1 bit per path group. |
| 18 | memset(&fGeneratedPaths.front(), 0, fGeneratedPaths.count()); |
| 19 | } |
| 20 | |
| 21 | GrPathRange::GrPathRange(GrGpu* gpu, |
kkinnunen | 50b58e6 | 2015-05-18 23:02:07 -0700 | [diff] [blame] | 22 | int numPaths) |
kkinnunen | 2e6055b | 2016-04-22 01:48:29 -0700 | [diff] [blame] | 23 | : INHERITED(gpu), |
kkinnunen | 50b58e6 | 2015-05-18 23:02:07 -0700 | [diff] [blame] | 24 | fNumPaths(numPaths) { |
cdalton | 855d83f | 2014-09-18 13:51:53 -0700 | [diff] [blame] | 25 | } |
| 26 | |
cdalton | d120105 | 2015-10-05 15:56:34 -0700 | [diff] [blame] | 27 | void GrPathRange::loadPathsIfNeeded(const void* indices, PathIndexType indexType, int count) const { |
cdalton | 55b24af | 2014-11-25 11:00:56 -0800 | [diff] [blame] | 28 | switch (indexType) { |
cdalton | 8585dd2 | 2015-10-08 08:04:09 -0700 | [diff] [blame] | 29 | case kU8_PathIndexType: |
| 30 | return this->loadPathsIfNeeded(reinterpret_cast<const uint8_t*>(indices), count); |
| 31 | case kU16_PathIndexType: |
| 32 | return this->loadPathsIfNeeded(reinterpret_cast<const uint16_t*>(indices), count); |
| 33 | case kU32_PathIndexType: |
| 34 | return this->loadPathsIfNeeded(reinterpret_cast<const uint32_t*>(indices), count); |
| 35 | default: |
Ben Wagner | b4aab9a | 2017-08-16 10:53:04 -0400 | [diff] [blame^] | 36 | SK_ABORT("Unknown path index type"); |
cdalton | 55b24af | 2014-11-25 11:00:56 -0800 | [diff] [blame] | 37 | } |
| 38 | } |
| 39 | |
cdalton | d120105 | 2015-10-05 15:56:34 -0700 | [diff] [blame] | 40 | #ifdef SK_DEBUG |
cdalton | 55b24af | 2014-11-25 11:00:56 -0800 | [diff] [blame] | 41 | |
cdalton | d120105 | 2015-10-05 15:56:34 -0700 | [diff] [blame] | 42 | void GrPathRange::assertPathsLoaded(const void* indices, PathIndexType indexType, int count) const { |
| 43 | switch (indexType) { |
cdalton | 8585dd2 | 2015-10-08 08:04:09 -0700 | [diff] [blame] | 44 | case kU8_PathIndexType: |
| 45 | return this->assertPathsLoaded(reinterpret_cast<const uint8_t*>(indices), count); |
| 46 | case kU16_PathIndexType: |
| 47 | return this->assertPathsLoaded(reinterpret_cast<const uint16_t*>(indices), count); |
| 48 | case kU32_PathIndexType: |
| 49 | return this->assertPathsLoaded(reinterpret_cast<const uint32_t*>(indices), count); |
| 50 | default: |
Ben Wagner | b4aab9a | 2017-08-16 10:53:04 -0400 | [diff] [blame^] | 51 | SK_ABORT("Unknown path index type"); |
cdalton | 855d83f | 2014-09-18 13:51:53 -0700 | [diff] [blame] | 52 | } |
| 53 | } |
cdalton | d120105 | 2015-10-05 15:56:34 -0700 | [diff] [blame] | 54 | |
| 55 | #endif |