blob: 8618700961af2c17459f2b9b98c5fac692094073 [file] [log] [blame]
cdalton397536c2016-03-25 12:15:03 -07001/*
2 * Copyright 2016 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
Mike Kleinc0bd9f92019-04-23 12:05:21 -05008#include "include/core/SkTraceMemoryDump.h"
9#include "src/gpu/GrGpuResourcePriv.h"
10#include "src/gpu/gl/GrGLBuffer.h"
11#include "src/gpu/gl/GrGLGpu.h"
cdalton397536c2016-03-25 12:15:03 -070012
13#define GL_CALL(X) GR_GL_CALL(this->glGpu()->glInterface(), X)
14#define GL_CALL_RET(RET, X) GR_GL_CALL_RET(this->glGpu()->glInterface(), RET, X)
15
16#if GR_GL_CHECK_ALLOC_WITH_GET_ERROR
17 #define CLEAR_ERROR_BEFORE_ALLOC(iface) GrGLClearErr(iface)
18 #define GL_ALLOC_CALL(iface, call) GR_GL_CALL_NOERRCHECK(iface, call)
19 #define CHECK_ALLOC_ERROR(iface) GR_GL_GET_ERROR(iface)
20#else
21 #define CLEAR_ERROR_BEFORE_ALLOC(iface)
22 #define GL_ALLOC_CALL(iface, call) GR_GL_CALL(iface, call)
23 #define CHECK_ALLOC_ERROR(iface) GR_GL_NO_ERROR
24#endif
25
26#ifdef SK_DEBUG
27#define VALIDATE() this->validate()
28#else
29#define VALIDATE() do {} while(false)
30#endif
31
Brian Salomonae64c192019-02-05 09:41:37 -050032sk_sp<GrGLBuffer> GrGLBuffer::Make(GrGLGpu* gpu, size_t size, GrGpuBufferType intendedType,
Brian Salomon12d22642019-01-29 14:38:50 -050033 GrAccessPattern accessPattern, const void* data) {
Jim Van Verth2e5eaf02017-06-21 15:55:46 -040034 if (gpu->glCaps().transferBufferType() == GrGLCaps::kNone_TransferBufferType &&
Brian Salomonae64c192019-02-05 09:41:37 -050035 (GrGpuBufferType::kXferCpuToGpu == intendedType ||
36 GrGpuBufferType::kXferGpuToCpu == intendedType)) {
Jim Van Verth2e5eaf02017-06-21 15:55:46 -040037 return nullptr;
38 }
39
Hal Canary144caf52016-11-07 17:57:18 -050040 sk_sp<GrGLBuffer> buffer(new GrGLBuffer(gpu, size, intendedType, accessPattern, data));
csmartdalton485a1202016-07-13 10:16:32 -070041 if (0 == buffer->bufferID()) {
cdalton397536c2016-03-25 12:15:03 -070042 return nullptr;
43 }
Brian Salomon12d22642019-01-29 14:38:50 -050044 return buffer;
cdalton397536c2016-03-25 12:15:03 -070045}
46
47// GL_STREAM_DRAW triggers an optimization in Chromium's GPU process where a client's vertex buffer
48// objects are implemented as client-side-arrays on tile-deferred architectures.
49#define DYNAMIC_DRAW_PARAM GR_GL_STREAM_DRAW
50
Brian Salomonae64c192019-02-05 09:41:37 -050051inline static GrGLenum gr_to_gl_access_pattern(GrGpuBufferType bufferType,
cdaltone2e71c22016-04-07 18:13:29 -070052 GrAccessPattern accessPattern) {
Brian Salomonae64c192019-02-05 09:41:37 -050053 auto drawUsage = [](GrAccessPattern pattern) {
54 switch (pattern) {
55 case kDynamic_GrAccessPattern:
56 // TODO: Do we really want to use STREAM_DRAW here on non-Chromium?
57 return DYNAMIC_DRAW_PARAM;
58 case kStatic_GrAccessPattern:
59 return GR_GL_STATIC_DRAW;
60 case kStream_GrAccessPattern:
61 return GR_GL_STREAM_DRAW;
62 }
63 SK_ABORT("Unexpected access pattern");
64 return GR_GL_STATIC_DRAW;
cdalton397536c2016-03-25 12:15:03 -070065 };
cdaltone2e71c22016-04-07 18:13:29 -070066
Brian Salomonae64c192019-02-05 09:41:37 -050067 auto readUsage = [](GrAccessPattern pattern) {
68 switch (pattern) {
69 case kDynamic_GrAccessPattern:
70 return GR_GL_DYNAMIC_READ;
71 case kStatic_GrAccessPattern:
72 return GR_GL_STATIC_READ;
73 case kStream_GrAccessPattern:
74 return GR_GL_STREAM_READ;
75 }
76 SK_ABORT("Unexpected access pattern");
77 return GR_GL_STATIC_READ;
cdalton397536c2016-03-25 12:15:03 -070078 };
cdaltone2e71c22016-04-07 18:13:29 -070079
Brian Salomonae64c192019-02-05 09:41:37 -050080 auto usageType = [&drawUsage, &readUsage](GrGpuBufferType type, GrAccessPattern pattern) {
81 switch (type) {
82 case GrGpuBufferType::kVertex:
83 case GrGpuBufferType::kIndex:
84 case GrGpuBufferType::kXferCpuToGpu:
85 return drawUsage(pattern);
86 case GrGpuBufferType::kXferGpuToCpu:
87 return readUsage(pattern);
88 }
89 SK_ABORT("Unexpected gpu buffer type.");
90 return GR_GL_STATIC_DRAW;
cdaltone2e71c22016-04-07 18:13:29 -070091 };
92
Brian Salomonae64c192019-02-05 09:41:37 -050093 return usageType(bufferType, accessPattern);
cdalton397536c2016-03-25 12:15:03 -070094}
95
Brian Salomonae64c192019-02-05 09:41:37 -050096GrGLBuffer::GrGLBuffer(GrGLGpu* gpu, size_t size, GrGpuBufferType intendedType,
csmartdalton485a1202016-07-13 10:16:32 -070097 GrAccessPattern accessPattern, const void* data)
Brian Salomonae64c192019-02-05 09:41:37 -050098 : INHERITED(gpu, size, intendedType, accessPattern)
99 , fIntendedType(intendedType)
100 , fBufferID(0)
101 , fUsage(gr_to_gl_access_pattern(intendedType, accessPattern))
102 , fGLSizeInBytes(0)
103 , fHasAttachedToTexture(false) {
csmartdalton485a1202016-07-13 10:16:32 -0700104 GL_CALL(GenBuffers(1, &fBufferID));
105 if (fBufferID) {
106 GrGLenum target = gpu->bindBuffer(fIntendedType, this);
107 CLEAR_ERROR_BEFORE_ALLOC(gpu->glInterface());
108 // make sure driver can allocate memory for this buffer
109 GL_ALLOC_CALL(gpu->glInterface(), BufferData(target,
110 (GrGLsizeiptr) size,
111 data,
112 fUsage));
113 if (CHECK_ALLOC_ERROR(gpu->glInterface()) != GR_GL_NO_ERROR) {
114 GL_CALL(DeleteBuffers(1, &fBufferID));
115 fBufferID = 0;
cdalton397536c2016-03-25 12:15:03 -0700116 } else {
csmartdalton485a1202016-07-13 10:16:32 -0700117 fGLSizeInBytes = size;
cdalton397536c2016-03-25 12:15:03 -0700118 }
119 }
120 VALIDATE();
kkinnunen2e6055b2016-04-22 01:48:29 -0700121 this->registerWithCache(SkBudgeted::kYes);
Robert Phillipsa5b39fa2017-06-08 15:34:16 -0400122 if (!fBufferID) {
123 this->resourcePriv().removeScratchKey();
124 }
cdalton397536c2016-03-25 12:15:03 -0700125}
126
127inline GrGLGpu* GrGLBuffer::glGpu() const {
128 SkASSERT(!this->wasDestroyed());
129 return static_cast<GrGLGpu*>(this->getGpu());
130}
131
132inline const GrGLCaps& GrGLBuffer::glCaps() const {
133 return this->glGpu()->glCaps();
134}
135
136void GrGLBuffer::onRelease() {
Yuqian Li40aa85f2019-07-02 13:45:00 -0700137 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
138
cdalton397536c2016-03-25 12:15:03 -0700139 if (!this->wasDestroyed()) {
140 VALIDATE();
141 // make sure we've not been abandoned or already released
csmartdalton485a1202016-07-13 10:16:32 -0700142 if (fBufferID) {
cdaltone2e71c22016-04-07 18:13:29 -0700143 GL_CALL(DeleteBuffers(1, &fBufferID));
cdalton397536c2016-03-25 12:15:03 -0700144 fBufferID = 0;
145 fGLSizeInBytes = 0;
146 }
147 fMapPtr = nullptr;
148 VALIDATE();
149 }
150
151 INHERITED::onRelease();
152}
153
154void GrGLBuffer::onAbandon() {
155 fBufferID = 0;
156 fGLSizeInBytes = 0;
157 fMapPtr = nullptr;
cdalton397536c2016-03-25 12:15:03 -0700158 VALIDATE();
159 INHERITED::onAbandon();
160}
161
162void GrGLBuffer::onMap() {
Robert Phillipsa5b39fa2017-06-08 15:34:16 -0400163 SkASSERT(fBufferID);
Brian Salomon68aeec02019-04-16 11:01:13 -0400164 SkASSERT(!this->wasDestroyed());
cdalton397536c2016-03-25 12:15:03 -0700165 VALIDATE();
166 SkASSERT(!this->isMapped());
167
cdaltone2e71c22016-04-07 18:13:29 -0700168 // TODO: Make this a function parameter.
Brian Salomonae64c192019-02-05 09:41:37 -0500169 bool readOnly = (GrGpuBufferType::kXferGpuToCpu == fIntendedType);
cdalton397536c2016-03-25 12:15:03 -0700170
171 // Handling dirty context is done in the bindBuffer call
172 switch (this->glCaps().mapBufferType()) {
173 case GrGLCaps::kNone_MapBufferType:
Brian Salomon68aeec02019-04-16 11:01:13 -0400174 return;
cdaltone2e71c22016-04-07 18:13:29 -0700175 case GrGLCaps::kMapBuffer_MapBufferType: {
176 GrGLenum target = this->glGpu()->bindBuffer(fIntendedType, this);
Brian Salomon68aeec02019-04-16 11:01:13 -0400177 if (!readOnly) {
178 // Let driver know it can discard the old data
179 if (this->glCaps().useBufferDataNullHint() || fGLSizeInBytes != this->size()) {
180 GL_CALL(BufferData(target, this->size(), nullptr, fUsage));
181 }
cdalton397536c2016-03-25 12:15:03 -0700182 }
cdaltone2e71c22016-04-07 18:13:29 -0700183 GL_CALL_RET(fMapPtr, MapBuffer(target, readOnly ? GR_GL_READ_ONLY : GR_GL_WRITE_ONLY));
cdalton397536c2016-03-25 12:15:03 -0700184 break;
cdaltone2e71c22016-04-07 18:13:29 -0700185 }
cdalton397536c2016-03-25 12:15:03 -0700186 case GrGLCaps::kMapBufferRange_MapBufferType: {
cdaltone2e71c22016-04-07 18:13:29 -0700187 GrGLenum target = this->glGpu()->bindBuffer(fIntendedType, this);
cdalton397536c2016-03-25 12:15:03 -0700188 // Make sure the GL buffer size agrees with fDesc before mapping.
Brian Salomondbf70722019-02-07 11:31:24 -0500189 if (fGLSizeInBytes != this->size()) {
190 GL_CALL(BufferData(target, this->size(), nullptr, fUsage));
cdalton397536c2016-03-25 12:15:03 -0700191 }
Brian Salomon68aeec02019-04-16 11:01:13 -0400192 GrGLbitfield access;
193 if (readOnly) {
194 access = GR_GL_MAP_READ_BIT;
195 } else {
196 access = GR_GL_MAP_WRITE_BIT;
197 if (GrGpuBufferType::kXferCpuToGpu != fIntendedType) {
198 // TODO: Make this a function parameter.
199 access |= GR_GL_MAP_INVALIDATE_BUFFER_BIT;
200 }
cdalton397536c2016-03-25 12:15:03 -0700201 }
Brian Salomon68aeec02019-04-16 11:01:13 -0400202 GL_CALL_RET(fMapPtr, MapBufferRange(target, 0, this->size(), access));
cdalton397536c2016-03-25 12:15:03 -0700203 break;
204 }
cdaltone2e71c22016-04-07 18:13:29 -0700205 case GrGLCaps::kChromium_MapBufferType: {
206 GrGLenum target = this->glGpu()->bindBuffer(fIntendedType, this);
cdalton397536c2016-03-25 12:15:03 -0700207 // Make sure the GL buffer size agrees with fDesc before mapping.
Brian Salomondbf70722019-02-07 11:31:24 -0500208 if (fGLSizeInBytes != this->size()) {
209 GL_CALL(BufferData(target, this->size(), nullptr, fUsage));
cdalton397536c2016-03-25 12:15:03 -0700210 }
Brian Salomondbf70722019-02-07 11:31:24 -0500211 GL_CALL_RET(fMapPtr, MapBufferSubData(target, 0, this->size(),
212 readOnly ? GR_GL_READ_ONLY : GR_GL_WRITE_ONLY));
cdalton397536c2016-03-25 12:15:03 -0700213 break;
cdaltone2e71c22016-04-07 18:13:29 -0700214 }
cdalton397536c2016-03-25 12:15:03 -0700215 }
Brian Salomondbf70722019-02-07 11:31:24 -0500216 fGLSizeInBytes = this->size();
cdalton397536c2016-03-25 12:15:03 -0700217 VALIDATE();
218}
219
220void GrGLBuffer::onUnmap() {
Robert Phillipsa5b39fa2017-06-08 15:34:16 -0400221 SkASSERT(fBufferID);
cdalton397536c2016-03-25 12:15:03 -0700222 VALIDATE();
223 SkASSERT(this->isMapped());
224 if (0 == fBufferID) {
225 fMapPtr = nullptr;
226 return;
227 }
228 // bind buffer handles the dirty context
229 switch (this->glCaps().mapBufferType()) {
230 case GrGLCaps::kNone_MapBufferType:
231 SkDEBUGFAIL("Shouldn't get here.");
232 return;
233 case GrGLCaps::kMapBuffer_MapBufferType: // fall through
cdaltone2e71c22016-04-07 18:13:29 -0700234 case GrGLCaps::kMapBufferRange_MapBufferType: {
235 GrGLenum target = this->glGpu()->bindBuffer(fIntendedType, this);
236 GL_CALL(UnmapBuffer(target));
cdalton397536c2016-03-25 12:15:03 -0700237 break;
cdaltone2e71c22016-04-07 18:13:29 -0700238 }
cdalton397536c2016-03-25 12:15:03 -0700239 case GrGLCaps::kChromium_MapBufferType:
cdaltone2e71c22016-04-07 18:13:29 -0700240 this->glGpu()->bindBuffer(fIntendedType, this); // TODO: Is this needed?
cdalton397536c2016-03-25 12:15:03 -0700241 GL_CALL(UnmapBufferSubData(fMapPtr));
242 break;
243 }
244 fMapPtr = nullptr;
245}
246
247bool GrGLBuffer::onUpdateData(const void* src, size_t srcSizeInBytes) {
Robert Phillipsa5b39fa2017-06-08 15:34:16 -0400248 SkASSERT(fBufferID);
cdalton397536c2016-03-25 12:15:03 -0700249 if (this->wasDestroyed()) {
250 return false;
251 }
252
253 SkASSERT(!this->isMapped());
cdalton397536c2016-03-25 12:15:03 -0700254 VALIDATE();
Brian Salomondbf70722019-02-07 11:31:24 -0500255 if (srcSizeInBytes > this->size()) {
cdalton397536c2016-03-25 12:15:03 -0700256 return false;
257 }
Brian Salomondbf70722019-02-07 11:31:24 -0500258 SkASSERT(srcSizeInBytes <= this->size());
cdalton397536c2016-03-25 12:15:03 -0700259 // bindbuffer handles dirty context
cdaltone2e71c22016-04-07 18:13:29 -0700260 GrGLenum target = this->glGpu()->bindBuffer(fIntendedType, this);
cdalton397536c2016-03-25 12:15:03 -0700261
Robert Phillipsf2ec0242018-03-01 16:51:25 -0500262 if (this->glCaps().useBufferDataNullHint()) {
Brian Salomondbf70722019-02-07 11:31:24 -0500263 if (this->size() == srcSizeInBytes) {
Robert Phillipsf2ec0242018-03-01 16:51:25 -0500264 GL_CALL(BufferData(target, (GrGLsizeiptr) srcSizeInBytes, src, fUsage));
265 } else {
266 // Before we call glBufferSubData we give the driver a hint using
267 // glBufferData with nullptr. This makes the old buffer contents
268 // inaccessible to future draws. The GPU may still be processing
269 // draws that reference the old contents. With this hint it can
270 // assign a different allocation for the new contents to avoid
271 // flushing the gpu past draws consuming the old contents.
272 // TODO I think we actually want to try calling bufferData here
Brian Salomondbf70722019-02-07 11:31:24 -0500273 GL_CALL(BufferData(target, this->size(), nullptr, fUsage));
Robert Phillipsf2ec0242018-03-01 16:51:25 -0500274 GL_CALL(BufferSubData(target, 0, (GrGLsizeiptr) srcSizeInBytes, src));
275 }
Brian Salomondbf70722019-02-07 11:31:24 -0500276 fGLSizeInBytes = this->size();
cdalton397536c2016-03-25 12:15:03 -0700277 } else {
Robert Phillipsf2ec0242018-03-01 16:51:25 -0500278 // Note that we're cheating on the size here. Currently no methods
279 // allow a partial update that preserves contents of non-updated
280 // portions of the buffer (map() does a glBufferData(..size, nullptr..))
281 GL_CALL(BufferData(target, srcSizeInBytes, src, fUsage));
282 fGLSizeInBytes = srcSizeInBytes;
cdalton397536c2016-03-25 12:15:03 -0700283 }
cdalton397536c2016-03-25 12:15:03 -0700284 VALIDATE();
285 return true;
286}
287
288void GrGLBuffer::setMemoryBacking(SkTraceMemoryDump* traceMemoryDump,
289 const SkString& dumpName) const {
290 SkString buffer_id;
291 buffer_id.appendU32(this->bufferID());
292 traceMemoryDump->setMemoryBacking(dumpName.c_str(), "gl_buffer",
293 buffer_id.c_str());
294}
295
296#ifdef SK_DEBUG
297
298void GrGLBuffer::validate() const {
cdalton397536c2016-03-25 12:15:03 -0700299 SkASSERT(0 != fBufferID || 0 == fGLSizeInBytes);
Brian Salomondbf70722019-02-07 11:31:24 -0500300 SkASSERT(nullptr == fMapPtr || fGLSizeInBytes <= this->size());
cdalton397536c2016-03-25 12:15:03 -0700301}
302
303#endif