blob: 715d4a9bef505e545e81729ba3f891f0f4c3ee81 [file] [log] [blame]
epoger@google.comec3ed6a2011-07-28 14:26:00 +00001
bsalomon@google.com1c13c962011-02-14 16:51:21 +00002/*
epoger@google.comec3ed6a2011-07-28 14:26:00 +00003 * Copyright 2010 Google Inc.
4 *
5 * Use of this source code is governed by a BSD-style license that can be
6 * found in the LICENSE file.
bsalomon@google.com1c13c962011-02-14 16:51:21 +00007 */
8
epoger@google.comec3ed6a2011-07-28 14:26:00 +00009
bsalomon@google.com1c13c962011-02-14 16:51:21 +000010#include "GrBufferAllocPool.h"
11#include "GrTypes.h"
12#include "GrVertexBuffer.h"
13#include "GrIndexBuffer.h"
14#include "GrGpu.h"
15
16#if GR_DEBUG
17 #define VALIDATE validate
18#else
bsalomon@google.com25fb21f2011-06-21 18:17:25 +000019 static void VALIDATE(bool x = false) {}
bsalomon@google.com1c13c962011-02-14 16:51:21 +000020#endif
21
bsalomon@google.com25fb21f2011-06-21 18:17:25 +000022// page size
bsalomon@google.com1c13c962011-02-14 16:51:21 +000023#define GrBufferAllocPool_MIN_BLOCK_SIZE ((size_t)1 << 12)
24
25GrBufferAllocPool::GrBufferAllocPool(GrGpu* gpu,
26 BufferType bufferType,
27 bool frequentResetHint,
28 size_t blockSize,
29 int preallocBufferCnt) :
30 fBlocks(GrMax(8, 2*preallocBufferCnt)) {
bsalomon@google.com11f0b512011-03-29 20:52:23 +000031
bsalomon@google.com1c13c962011-02-14 16:51:21 +000032 GrAssert(NULL != gpu);
33 fGpu = gpu;
bsalomon@google.com11f0b512011-03-29 20:52:23 +000034 fGpu->ref();
35 fGpuIsReffed = true;
36
bsalomon@google.com1c13c962011-02-14 16:51:21 +000037 fBufferType = bufferType;
38 fFrequentResetHint = frequentResetHint;
bsalomon@google.com1c13c962011-02-14 16:51:21 +000039 fBufferPtr = NULL;
40 fMinBlockSize = GrMax(GrBufferAllocPool_MIN_BLOCK_SIZE, blockSize);
41
bsalomon@google.com25fb21f2011-06-21 18:17:25 +000042 fBytesInUse = 0;
43
bsalomon@google.com1c13c962011-02-14 16:51:21 +000044 fPreallocBuffersInUse = 0;
45 fFirstPreallocBuffer = 0;
46 for (int i = 0; i < preallocBufferCnt; ++i) {
47 GrGeometryBuffer* buffer = this->createBuffer(fMinBlockSize);
48 if (NULL != buffer) {
49 *fPreallocBuffers.append() = buffer;
50 buffer->ref();
51 }
52 }
53}
54
55GrBufferAllocPool::~GrBufferAllocPool() {
56 VALIDATE();
57 if (fBlocks.count()) {
58 GrGeometryBuffer* buffer = fBlocks.back().fBuffer;
59 if (buffer->isLocked()) {
60 buffer->unlock();
61 }
62 }
bsalomon@google.com1c13c962011-02-14 16:51:21 +000063 while (!fBlocks.empty()) {
64 destroyBlock();
65 }
bsalomon@google.com11f0b512011-03-29 20:52:23 +000066 fPreallocBuffers.unrefAll();
67 releaseGpuRef();
68}
69
70void GrBufferAllocPool::releaseGpuRef() {
71 if (fGpuIsReffed) {
72 fGpu->unref();
73 fGpuIsReffed = false;
74 }
bsalomon@google.com1c13c962011-02-14 16:51:21 +000075}
76
77void GrBufferAllocPool::reset() {
78 VALIDATE();
bsalomon@google.com25fb21f2011-06-21 18:17:25 +000079 fBytesInUse = 0;
bsalomon@google.com1c13c962011-02-14 16:51:21 +000080 if (fBlocks.count()) {
81 GrGeometryBuffer* buffer = fBlocks.back().fBuffer;
82 if (buffer->isLocked()) {
83 buffer->unlock();
84 }
85 }
86 while (!fBlocks.empty()) {
87 destroyBlock();
88 }
89 if (fPreallocBuffers.count()) {
90 // must set this after above loop.
91 fFirstPreallocBuffer = (fFirstPreallocBuffer + fPreallocBuffersInUse) %
92 fPreallocBuffers.count();
93 }
bsalomon@google.com18c9c192011-09-22 21:01:31 +000094 fCpuData.reset(fGpu->getCaps().fBufferLockSupport ? 0 : fMinBlockSize);
bsalomon@google.com1c13c962011-02-14 16:51:21 +000095 GrAssert(0 == fPreallocBuffersInUse);
96 VALIDATE();
97}
98
99void GrBufferAllocPool::unlock() {
100 VALIDATE();
101
102 if (NULL != fBufferPtr) {
103 BufferBlock& block = fBlocks.back();
104 if (block.fBuffer->isLocked()) {
105 block.fBuffer->unlock();
106 } else {
bsalomon@google.comcee661a2011-07-26 12:32:36 +0000107 size_t flushSize = block.fBuffer->sizeInBytes() - block.fBytesFree;
bsalomon@google.com1c13c962011-02-14 16:51:21 +0000108 flushCpuData(fBlocks.back().fBuffer, flushSize);
109 }
110 fBufferPtr = NULL;
111 }
112 VALIDATE();
113}
114
115#if GR_DEBUG
bsalomon@google.com25fb21f2011-06-21 18:17:25 +0000116void GrBufferAllocPool::validate(bool unusedBlockAllowed) const {
bsalomon@google.com1c13c962011-02-14 16:51:21 +0000117 if (NULL != fBufferPtr) {
118 GrAssert(!fBlocks.empty());
119 if (fBlocks.back().fBuffer->isLocked()) {
120 GrGeometryBuffer* buf = fBlocks.back().fBuffer;
121 GrAssert(buf->lockPtr() == fBufferPtr);
122 } else {
123 GrAssert(fCpuData.get() == fBufferPtr);
bsalomon@google.com1c13c962011-02-14 16:51:21 +0000124 }
125 } else {
126 GrAssert(fBlocks.empty() || !fBlocks.back().fBuffer->isLocked());
127 }
bsalomon@google.com25fb21f2011-06-21 18:17:25 +0000128 size_t bytesInUse = 0;
bsalomon@google.com1c13c962011-02-14 16:51:21 +0000129 for (int i = 0; i < fBlocks.count() - 1; ++i) {
130 GrAssert(!fBlocks[i].fBuffer->isLocked());
131 }
bsalomon@google.com25fb21f2011-06-21 18:17:25 +0000132 for (int i = 0; i < fBlocks.count(); ++i) {
bsalomon@google.comcee661a2011-07-26 12:32:36 +0000133 size_t bytes = fBlocks[i].fBuffer->sizeInBytes() - fBlocks[i].fBytesFree;
bsalomon@google.com25fb21f2011-06-21 18:17:25 +0000134 bytesInUse += bytes;
135 GrAssert(bytes || unusedBlockAllowed);
136 }
137
138 GrAssert(bytesInUse == fBytesInUse);
139 if (unusedBlockAllowed) {
140 GrAssert((fBytesInUse && !fBlocks.empty()) ||
141 (!fBytesInUse && (fBlocks.count() < 2)));
142 } else {
143 GrAssert((0 == fBytesInUse) == fBlocks.empty());
144 }
bsalomon@google.com1c13c962011-02-14 16:51:21 +0000145}
146#endif
147
148void* GrBufferAllocPool::makeSpace(size_t size,
149 size_t alignment,
150 const GrGeometryBuffer** buffer,
151 size_t* offset) {
152 VALIDATE();
153
154 GrAssert(NULL != buffer);
155 GrAssert(NULL != offset);
156
157 if (NULL != fBufferPtr) {
158 BufferBlock& back = fBlocks.back();
bsalomon@google.comcee661a2011-07-26 12:32:36 +0000159 size_t usedBytes = back.fBuffer->sizeInBytes() - back.fBytesFree;
bsalomon@google.com1c13c962011-02-14 16:51:21 +0000160 size_t pad = GrSizeAlignUpPad(usedBytes,
161 alignment);
162 if ((size + pad) <= back.fBytesFree) {
163 usedBytes += pad;
164 *offset = usedBytes;
165 *buffer = back.fBuffer;
166 back.fBytesFree -= size + pad;
bsalomon@google.com25fb21f2011-06-21 18:17:25 +0000167 fBytesInUse += size;
bsalomon@google.com1c13c962011-02-14 16:51:21 +0000168 return (void*)(reinterpret_cast<intptr_t>(fBufferPtr) + usedBytes);
169 }
170 }
171
bsalomon@google.com25fb21f2011-06-21 18:17:25 +0000172 // We could honor the space request using updateSubData on the current VB
173 // (if there is room). But we don't currently use draw calls to GL that
174 // allow the driver to know that previously issued draws won't read from
175 // the part of the buffer we update.
176
bsalomon@google.com1c13c962011-02-14 16:51:21 +0000177 if (!createBlock(size)) {
178 return NULL;
179 }
bsalomon@google.com1c13c962011-02-14 16:51:21 +0000180 GrAssert(NULL != fBufferPtr);
181
182 *offset = 0;
183 BufferBlock& back = fBlocks.back();
184 *buffer = back.fBuffer;
185 back.fBytesFree -= size;
bsalomon@google.com25fb21f2011-06-21 18:17:25 +0000186 fBytesInUse += size;
187 VALIDATE();
bsalomon@google.com1c13c962011-02-14 16:51:21 +0000188 return fBufferPtr;
189}
190
191int GrBufferAllocPool::currentBufferItems(size_t itemSize) const {
192 VALIDATE();
193 if (NULL != fBufferPtr) {
194 const BufferBlock& back = fBlocks.back();
bsalomon@google.comcee661a2011-07-26 12:32:36 +0000195 size_t usedBytes = back.fBuffer->sizeInBytes() - back.fBytesFree;
bsalomon@google.com1c13c962011-02-14 16:51:21 +0000196 size_t pad = GrSizeAlignUpPad(usedBytes, itemSize);
197 return (back.fBytesFree - pad) / itemSize;
198 } else if (fPreallocBuffersInUse < fPreallocBuffers.count()) {
199 return fMinBlockSize / itemSize;
200 }
201 return 0;
202}
203
204int GrBufferAllocPool::preallocatedBuffersRemaining() const {
205 return fPreallocBuffers.count() - fPreallocBuffersInUse;
206}
207
208int GrBufferAllocPool::preallocatedBufferCount() const {
209 return fPreallocBuffers.count();
210}
211
212void GrBufferAllocPool::putBack(size_t bytes) {
213 VALIDATE();
bsalomon@google.com25fb21f2011-06-21 18:17:25 +0000214
215 while (bytes) {
216 // caller shouldnt try to put back more than they've taken
217 GrAssert(!fBlocks.empty());
218 BufferBlock& block = fBlocks.back();
bsalomon@google.comcee661a2011-07-26 12:32:36 +0000219 size_t bytesUsed = block.fBuffer->sizeInBytes() - block.fBytesFree;
bsalomon@google.com1c13c962011-02-14 16:51:21 +0000220 if (bytes >= bytesUsed) {
bsalomon@google.com1c13c962011-02-14 16:51:21 +0000221 bytes -= bytesUsed;
bsalomon@google.com25fb21f2011-06-21 18:17:25 +0000222 fBytesInUse -= bytesUsed;
bsalomon@google.com6513cd02011-08-05 20:12:30 +0000223 // if we locked a vb to satisfy the make space and we're releasing
224 // beyond it, then unlock it.
225 if (block.fBuffer->isLocked()) {
226 block.fBuffer->unlock();
227 }
228 this->destroyBlock();
bsalomon@google.com1c13c962011-02-14 16:51:21 +0000229 } else {
bsalomon@google.com25fb21f2011-06-21 18:17:25 +0000230 block.fBytesFree += bytes;
231 fBytesInUse -= bytes;
232 bytes = 0;
233 break;
bsalomon@google.com1c13c962011-02-14 16:51:21 +0000234 }
235 }
236 VALIDATE();
bsalomon@google.com1c13c962011-02-14 16:51:21 +0000237}
238
239bool GrBufferAllocPool::createBlock(size_t requestSize) {
240
241 size_t size = GrMax(requestSize, fMinBlockSize);
242 GrAssert(size >= GrBufferAllocPool_MIN_BLOCK_SIZE);
243
244 VALIDATE();
245
246 BufferBlock& block = fBlocks.push_back();
247
248 if (size == fMinBlockSize &&
249 fPreallocBuffersInUse < fPreallocBuffers.count()) {
250
251 uint32_t nextBuffer = (fPreallocBuffersInUse + fFirstPreallocBuffer) %
252 fPreallocBuffers.count();
253 block.fBuffer = fPreallocBuffers[nextBuffer];
254 block.fBuffer->ref();
255 ++fPreallocBuffersInUse;
256 } else {
257 block.fBuffer = this->createBuffer(size);
258 if (NULL == block.fBuffer) {
259 fBlocks.pop_back();
260 return false;
261 }
262 }
263
264 block.fBytesFree = size;
265 if (NULL != fBufferPtr) {
266 GrAssert(fBlocks.count() > 1);
267 BufferBlock& prev = fBlocks.fromBack(1);
268 if (prev.fBuffer->isLocked()) {
269 prev.fBuffer->unlock();
270 } else {
271 flushCpuData(prev.fBuffer,
bsalomon@google.comcee661a2011-07-26 12:32:36 +0000272 prev.fBuffer->sizeInBytes() - prev.fBytesFree);
bsalomon@google.com1c13c962011-02-14 16:51:21 +0000273 }
274 fBufferPtr = NULL;
275 }
276
277 GrAssert(NULL == fBufferPtr);
278
bsalomon@google.com18c9c192011-09-22 21:01:31 +0000279 if (fGpu->getCaps().fBufferLockSupport &&
bsalomon@google.com1c13c962011-02-14 16:51:21 +0000280 size > GR_GEOM_BUFFER_LOCK_THRESHOLD &&
281 (!fFrequentResetHint || requestSize > GR_GEOM_BUFFER_LOCK_THRESHOLD)) {
282 fBufferPtr = block.fBuffer->lock();
283 }
284
285 if (NULL == fBufferPtr) {
bsalomon@google.com7d4679a2011-09-02 22:06:24 +0000286 fBufferPtr = fCpuData.reset(size);
bsalomon@google.com1c13c962011-02-14 16:51:21 +0000287 }
288
bsalomon@google.com25fb21f2011-06-21 18:17:25 +0000289 VALIDATE(true);
bsalomon@google.com1c13c962011-02-14 16:51:21 +0000290
291 return true;
292}
293
294void GrBufferAllocPool::destroyBlock() {
295 GrAssert(!fBlocks.empty());
296
297 BufferBlock& block = fBlocks.back();
298 if (fPreallocBuffersInUse > 0) {
299 uint32_t prevPreallocBuffer = (fPreallocBuffersInUse +
300 fFirstPreallocBuffer +
301 (fPreallocBuffers.count() - 1)) %
302 fPreallocBuffers.count();
303 if (block.fBuffer == fPreallocBuffers[prevPreallocBuffer]) {
304 --fPreallocBuffersInUse;
305 }
306 }
307 GrAssert(!block.fBuffer->isLocked());
308 block.fBuffer->unref();
309 fBlocks.pop_back();
310 fBufferPtr = NULL;
311}
312
313void GrBufferAllocPool::flushCpuData(GrGeometryBuffer* buffer,
314 size_t flushSize) {
315 GrAssert(NULL != buffer);
316 GrAssert(!buffer->isLocked());
317 GrAssert(fCpuData.get() == fBufferPtr);
bsalomon@google.comcee661a2011-07-26 12:32:36 +0000318 GrAssert(flushSize <= buffer->sizeInBytes());
bsalomon@google.com1c13c962011-02-14 16:51:21 +0000319
320 bool updated = false;
bsalomon@google.com18c9c192011-09-22 21:01:31 +0000321 if (fGpu->getCaps().fBufferLockSupport &&
bsalomon@google.com1c13c962011-02-14 16:51:21 +0000322 flushSize > GR_GEOM_BUFFER_LOCK_THRESHOLD) {
323 void* data = buffer->lock();
324 if (NULL != data) {
325 memcpy(data, fBufferPtr, flushSize);
326 buffer->unlock();
327 updated = true;
328 }
329 }
330 buffer->updateData(fBufferPtr, flushSize);
331}
332
333GrGeometryBuffer* GrBufferAllocPool::createBuffer(size_t size) {
334 if (kIndex_BufferType == fBufferType) {
335 return fGpu->createIndexBuffer(size, true);
336 } else {
337 GrAssert(kVertex_BufferType == fBufferType);
338 return fGpu->createVertexBuffer(size, true);
339 }
340}
341
342////////////////////////////////////////////////////////////////////////////////
343
344GrVertexBufferAllocPool::GrVertexBufferAllocPool(GrGpu* gpu,
345 bool frequentResetHint,
346 size_t bufferSize,
347 int preallocBufferCnt)
348: GrBufferAllocPool(gpu,
349 kVertex_BufferType,
350 frequentResetHint,
351 bufferSize,
352 preallocBufferCnt) {
353}
354
355void* GrVertexBufferAllocPool::makeSpace(GrVertexLayout layout,
356 int vertexCount,
357 const GrVertexBuffer** buffer,
358 int* startVertex) {
359
360 GrAssert(vertexCount >= 0);
361 GrAssert(NULL != buffer);
362 GrAssert(NULL != startVertex);
363
364 size_t vSize = GrDrawTarget::VertexSize(layout);
bsalomon@google.com8b484412011-04-18 19:07:44 +0000365 size_t offset = 0; // assign to suppress warning
366 const GrGeometryBuffer* geomBuffer = NULL; // assign to suppress warning
bsalomon@google.com1c13c962011-02-14 16:51:21 +0000367 void* ptr = INHERITED::makeSpace(vSize * vertexCount,
368 vSize,
369 &geomBuffer,
370 &offset);
371
372 *buffer = (const GrVertexBuffer*) geomBuffer;
373 GrAssert(0 == offset % vSize);
374 *startVertex = offset / vSize;
375 return ptr;
376}
377
378bool GrVertexBufferAllocPool::appendVertices(GrVertexLayout layout,
379 int vertexCount,
380 const void* vertices,
381 const GrVertexBuffer** buffer,
382 int* startVertex) {
383 void* space = makeSpace(layout, vertexCount, buffer, startVertex);
384 if (NULL != space) {
385 memcpy(space,
386 vertices,
387 GrDrawTarget::VertexSize(layout) * vertexCount);
388 return true;
389 } else {
390 return false;
391 }
392}
393
394int GrVertexBufferAllocPool::preallocatedBufferVertices(GrVertexLayout layout) const {
395 return INHERITED::preallocatedBufferSize() /
396 GrDrawTarget::VertexSize(layout);
397}
398
399int GrVertexBufferAllocPool::currentBufferVertices(GrVertexLayout layout) const {
400 return currentBufferItems(GrDrawTarget::VertexSize(layout));
401}
402
403////////////////////////////////////////////////////////////////////////////////
404
405GrIndexBufferAllocPool::GrIndexBufferAllocPool(GrGpu* gpu,
406 bool frequentResetHint,
407 size_t bufferSize,
408 int preallocBufferCnt)
409: GrBufferAllocPool(gpu,
410 kIndex_BufferType,
411 frequentResetHint,
412 bufferSize,
413 preallocBufferCnt) {
414}
415
416void* GrIndexBufferAllocPool::makeSpace(int indexCount,
417 const GrIndexBuffer** buffer,
418 int* startIndex) {
419
420 GrAssert(indexCount >= 0);
421 GrAssert(NULL != buffer);
422 GrAssert(NULL != startIndex);
423
bsalomon@google.com8b484412011-04-18 19:07:44 +0000424 size_t offset = 0; // assign to suppress warning
425 const GrGeometryBuffer* geomBuffer = NULL; // assign to suppress warning
bsalomon@google.com1c13c962011-02-14 16:51:21 +0000426 void* ptr = INHERITED::makeSpace(indexCount * sizeof(uint16_t),
427 sizeof(uint16_t),
428 &geomBuffer,
429 &offset);
430
431 *buffer = (const GrIndexBuffer*) geomBuffer;
432 GrAssert(0 == offset % sizeof(uint16_t));
433 *startIndex = offset / sizeof(uint16_t);
434 return ptr;
435}
436
437bool GrIndexBufferAllocPool::appendIndices(int indexCount,
438 const void* indices,
439 const GrIndexBuffer** buffer,
440 int* startIndex) {
441 void* space = makeSpace(indexCount, buffer, startIndex);
442 if (NULL != space) {
443 memcpy(space, indices, sizeof(uint16_t) * indexCount);
444 return true;
445 } else {
446 return false;
447 }
448}
449
450int GrIndexBufferAllocPool::preallocatedBufferIndices() const {
451 return INHERITED::preallocatedBufferSize() / sizeof(uint16_t);
452}
453
454int GrIndexBufferAllocPool::currentBufferIndices() const {
455 return currentBufferItems(sizeof(uint16_t));
456}