blob: 35f0c5e14c4ff8be40065a83cba3b3b39d9cb41c [file] [log] [blame]
bsalomon@google.com1c13c962011-02-14 16:51:21 +00001/*
2 Copyright 2010 Google Inc.
3
4 Licensed under the Apache License, Version 2.0 (the "License");
5 you may not use this file except in compliance with the License.
6 You may obtain a copy of the License at
7
8 http://www.apache.org/licenses/LICENSE-2.0
9
10 Unless required by applicable law or agreed to in writing, software
11 distributed under the License is distributed on an "AS IS" BASIS,
12 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 See the License for the specific language governing permissions and
14 limitations under the License.
15 */
16
17#include "GrBufferAllocPool.h"
18#include "GrTypes.h"
19#include "GrVertexBuffer.h"
20#include "GrIndexBuffer.h"
21#include "GrGpu.h"
22
23#if GR_DEBUG
24 #define VALIDATE validate
25#else
26 #define VALIDATE()
27#endif
28
29#define GrBufferAllocPool_MIN_BLOCK_SIZE ((size_t)1 << 12)
30
31GrBufferAllocPool::GrBufferAllocPool(GrGpu* gpu,
32 BufferType bufferType,
33 bool frequentResetHint,
34 size_t blockSize,
35 int preallocBufferCnt) :
36 fBlocks(GrMax(8, 2*preallocBufferCnt)) {
37 GrAssert(NULL != gpu);
38 fGpu = gpu;
39 fBufferType = bufferType;
40 fFrequentResetHint = frequentResetHint;
41 fGpu->ref();
42 fBufferPtr = NULL;
43 fMinBlockSize = GrMax(GrBufferAllocPool_MIN_BLOCK_SIZE, blockSize);
44
45 fPreallocBuffersInUse = 0;
46 fFirstPreallocBuffer = 0;
47 for (int i = 0; i < preallocBufferCnt; ++i) {
48 GrGeometryBuffer* buffer = this->createBuffer(fMinBlockSize);
49 if (NULL != buffer) {
50 *fPreallocBuffers.append() = buffer;
51 buffer->ref();
52 }
53 }
54}
55
56GrBufferAllocPool::~GrBufferAllocPool() {
57 VALIDATE();
58 if (fBlocks.count()) {
59 GrGeometryBuffer* buffer = fBlocks.back().fBuffer;
60 if (buffer->isLocked()) {
61 buffer->unlock();
62 }
63 }
64 fPreallocBuffers.unrefAll();
65 while (!fBlocks.empty()) {
66 destroyBlock();
67 }
68 fGpu->unref();
69}
70
71void GrBufferAllocPool::reset() {
72 VALIDATE();
73 if (fBlocks.count()) {
74 GrGeometryBuffer* buffer = fBlocks.back().fBuffer;
75 if (buffer->isLocked()) {
76 buffer->unlock();
77 }
78 }
79 while (!fBlocks.empty()) {
80 destroyBlock();
81 }
82 if (fPreallocBuffers.count()) {
83 // must set this after above loop.
84 fFirstPreallocBuffer = (fFirstPreallocBuffer + fPreallocBuffersInUse) %
85 fPreallocBuffers.count();
86 }
87 fCpuData.realloc(fGpu->supportsBufferLocking() ? 0 : fMinBlockSize);
88 GrAssert(0 == fPreallocBuffersInUse);
89 VALIDATE();
90}
91
92void GrBufferAllocPool::unlock() {
93 VALIDATE();
94
95 if (NULL != fBufferPtr) {
96 BufferBlock& block = fBlocks.back();
97 if (block.fBuffer->isLocked()) {
98 block.fBuffer->unlock();
99 } else {
100 size_t flushSize = block.fBuffer->size() - block.fBytesFree;
101 flushCpuData(fBlocks.back().fBuffer, flushSize);
102 }
103 fBufferPtr = NULL;
104 }
105 VALIDATE();
106}
107
108#if GR_DEBUG
109void GrBufferAllocPool::validate() const {
110 if (NULL != fBufferPtr) {
111 GrAssert(!fBlocks.empty());
112 if (fBlocks.back().fBuffer->isLocked()) {
113 GrGeometryBuffer* buf = fBlocks.back().fBuffer;
114 GrAssert(buf->lockPtr() == fBufferPtr);
115 } else {
116 GrAssert(fCpuData.get() == fBufferPtr);
117 GrAssert(fCpuData.size() == fBlocks.back().fBuffer->size());
118 }
119 } else {
120 GrAssert(fBlocks.empty() || !fBlocks.back().fBuffer->isLocked());
121 }
122 for (int i = 0; i < fBlocks.count() - 1; ++i) {
123 GrAssert(!fBlocks[i].fBuffer->isLocked());
124 }
125}
126#endif
127
128void* GrBufferAllocPool::makeSpace(size_t size,
129 size_t alignment,
130 const GrGeometryBuffer** buffer,
131 size_t* offset) {
132 VALIDATE();
133
134 GrAssert(NULL != buffer);
135 GrAssert(NULL != offset);
136
137 if (NULL != fBufferPtr) {
138 BufferBlock& back = fBlocks.back();
139 size_t usedBytes = back.fBuffer->size() - back.fBytesFree;
140 size_t pad = GrSizeAlignUpPad(usedBytes,
141 alignment);
142 if ((size + pad) <= back.fBytesFree) {
143 usedBytes += pad;
144 *offset = usedBytes;
145 *buffer = back.fBuffer;
146 back.fBytesFree -= size + pad;
147 return (void*)(reinterpret_cast<intptr_t>(fBufferPtr) + usedBytes);
148 }
149 }
150
151 if (!createBlock(size)) {
152 return NULL;
153 }
154 VALIDATE();
155 GrAssert(NULL != fBufferPtr);
156
157 *offset = 0;
158 BufferBlock& back = fBlocks.back();
159 *buffer = back.fBuffer;
160 back.fBytesFree -= size;
161 return fBufferPtr;
162}
163
164int GrBufferAllocPool::currentBufferItems(size_t itemSize) const {
165 VALIDATE();
166 if (NULL != fBufferPtr) {
167 const BufferBlock& back = fBlocks.back();
168 size_t usedBytes = back.fBuffer->size() - back.fBytesFree;
169 size_t pad = GrSizeAlignUpPad(usedBytes, itemSize);
170 return (back.fBytesFree - pad) / itemSize;
171 } else if (fPreallocBuffersInUse < fPreallocBuffers.count()) {
172 return fMinBlockSize / itemSize;
173 }
174 return 0;
175}
176
177int GrBufferAllocPool::preallocatedBuffersRemaining() const {
178 return fPreallocBuffers.count() - fPreallocBuffersInUse;
179}
180
181int GrBufferAllocPool::preallocatedBufferCount() const {
182 return fPreallocBuffers.count();
183}
184
185void GrBufferAllocPool::putBack(size_t bytes) {
186 VALIDATE();
187 if (NULL != fBufferPtr) {
188 BufferBlock& back = fBlocks.back();
189 size_t bytesUsed = back.fBuffer->size() - back.fBytesFree;
190 if (bytes >= bytesUsed) {
191 destroyBlock();
192 bytes -= bytesUsed;
193 } else {
194 back.fBytesFree += bytes;
195 return;
196 }
197 }
198 VALIDATE();
199 GrAssert(NULL == fBufferPtr);
200 // we don't partially roll-back buffers because our VB semantics say locking
201 // a VB discards its previous content.
202 // We could honor it by being sure we use updateSubData and not lock
203 // we will roll-back fully released buffers, though.
204 while (!fBlocks.empty() &&
205 bytes >= fBlocks.back().fBuffer->size()) {
206 bytes -= fBlocks.back().fBuffer->size();
207 destroyBlock();
208 }
209 VALIDATE();
210}
211
212bool GrBufferAllocPool::createBlock(size_t requestSize) {
213
214 size_t size = GrMax(requestSize, fMinBlockSize);
215 GrAssert(size >= GrBufferAllocPool_MIN_BLOCK_SIZE);
216
217 VALIDATE();
218
219 BufferBlock& block = fBlocks.push_back();
220
221 if (size == fMinBlockSize &&
222 fPreallocBuffersInUse < fPreallocBuffers.count()) {
223
224 uint32_t nextBuffer = (fPreallocBuffersInUse + fFirstPreallocBuffer) %
225 fPreallocBuffers.count();
226 block.fBuffer = fPreallocBuffers[nextBuffer];
227 block.fBuffer->ref();
228 ++fPreallocBuffersInUse;
229 } else {
230 block.fBuffer = this->createBuffer(size);
231 if (NULL == block.fBuffer) {
232 fBlocks.pop_back();
233 return false;
234 }
235 }
236
237 block.fBytesFree = size;
238 if (NULL != fBufferPtr) {
239 GrAssert(fBlocks.count() > 1);
240 BufferBlock& prev = fBlocks.fromBack(1);
241 if (prev.fBuffer->isLocked()) {
242 prev.fBuffer->unlock();
243 } else {
244 flushCpuData(prev.fBuffer,
245 prev.fBuffer->size() - prev.fBytesFree);
246 }
247 fBufferPtr = NULL;
248 }
249
250 GrAssert(NULL == fBufferPtr);
251
252 if (fGpu->supportsBufferLocking() &&
253 size > GR_GEOM_BUFFER_LOCK_THRESHOLD &&
254 (!fFrequentResetHint || requestSize > GR_GEOM_BUFFER_LOCK_THRESHOLD)) {
255 fBufferPtr = block.fBuffer->lock();
256 }
257
258 if (NULL == fBufferPtr) {
259 fBufferPtr = fCpuData.realloc(size);
260 }
261
262 VALIDATE();
263
264 return true;
265}
266
267void GrBufferAllocPool::destroyBlock() {
268 GrAssert(!fBlocks.empty());
269
270 BufferBlock& block = fBlocks.back();
271 if (fPreallocBuffersInUse > 0) {
272 uint32_t prevPreallocBuffer = (fPreallocBuffersInUse +
273 fFirstPreallocBuffer +
274 (fPreallocBuffers.count() - 1)) %
275 fPreallocBuffers.count();
276 if (block.fBuffer == fPreallocBuffers[prevPreallocBuffer]) {
277 --fPreallocBuffersInUse;
278 }
279 }
280 GrAssert(!block.fBuffer->isLocked());
281 block.fBuffer->unref();
282 fBlocks.pop_back();
283 fBufferPtr = NULL;
284}
285
286void GrBufferAllocPool::flushCpuData(GrGeometryBuffer* buffer,
287 size_t flushSize) {
288 GrAssert(NULL != buffer);
289 GrAssert(!buffer->isLocked());
290 GrAssert(fCpuData.get() == fBufferPtr);
291 GrAssert(fCpuData.size() == buffer->size());
292 GrAssert(flushSize <= buffer->size());
293
294 bool updated = false;
295 if (fGpu->supportsBufferLocking() &&
296 flushSize > GR_GEOM_BUFFER_LOCK_THRESHOLD) {
297 void* data = buffer->lock();
298 if (NULL != data) {
299 memcpy(data, fBufferPtr, flushSize);
300 buffer->unlock();
301 updated = true;
302 }
303 }
304 buffer->updateData(fBufferPtr, flushSize);
305}
306
307GrGeometryBuffer* GrBufferAllocPool::createBuffer(size_t size) {
308 if (kIndex_BufferType == fBufferType) {
309 return fGpu->createIndexBuffer(size, true);
310 } else {
311 GrAssert(kVertex_BufferType == fBufferType);
312 return fGpu->createVertexBuffer(size, true);
313 }
314}
315
316////////////////////////////////////////////////////////////////////////////////
317
318GrVertexBufferAllocPool::GrVertexBufferAllocPool(GrGpu* gpu,
319 bool frequentResetHint,
320 size_t bufferSize,
321 int preallocBufferCnt)
322: GrBufferAllocPool(gpu,
323 kVertex_BufferType,
324 frequentResetHint,
325 bufferSize,
326 preallocBufferCnt) {
327}
328
329void* GrVertexBufferAllocPool::makeSpace(GrVertexLayout layout,
330 int vertexCount,
331 const GrVertexBuffer** buffer,
332 int* startVertex) {
333
334 GrAssert(vertexCount >= 0);
335 GrAssert(NULL != buffer);
336 GrAssert(NULL != startVertex);
337
338 size_t vSize = GrDrawTarget::VertexSize(layout);
339 size_t offset;
340 const GrGeometryBuffer* geomBuffer;
341 void* ptr = INHERITED::makeSpace(vSize * vertexCount,
342 vSize,
343 &geomBuffer,
344 &offset);
345
346 *buffer = (const GrVertexBuffer*) geomBuffer;
347 GrAssert(0 == offset % vSize);
348 *startVertex = offset / vSize;
349 return ptr;
350}
351
352bool GrVertexBufferAllocPool::appendVertices(GrVertexLayout layout,
353 int vertexCount,
354 const void* vertices,
355 const GrVertexBuffer** buffer,
356 int* startVertex) {
357 void* space = makeSpace(layout, vertexCount, buffer, startVertex);
358 if (NULL != space) {
359 memcpy(space,
360 vertices,
361 GrDrawTarget::VertexSize(layout) * vertexCount);
362 return true;
363 } else {
364 return false;
365 }
366}
367
368int GrVertexBufferAllocPool::preallocatedBufferVertices(GrVertexLayout layout) const {
369 return INHERITED::preallocatedBufferSize() /
370 GrDrawTarget::VertexSize(layout);
371}
372
373int GrVertexBufferAllocPool::currentBufferVertices(GrVertexLayout layout) const {
374 return currentBufferItems(GrDrawTarget::VertexSize(layout));
375}
376
377////////////////////////////////////////////////////////////////////////////////
378
379GrIndexBufferAllocPool::GrIndexBufferAllocPool(GrGpu* gpu,
380 bool frequentResetHint,
381 size_t bufferSize,
382 int preallocBufferCnt)
383: GrBufferAllocPool(gpu,
384 kIndex_BufferType,
385 frequentResetHint,
386 bufferSize,
387 preallocBufferCnt) {
388}
389
390void* GrIndexBufferAllocPool::makeSpace(int indexCount,
391 const GrIndexBuffer** buffer,
392 int* startIndex) {
393
394 GrAssert(indexCount >= 0);
395 GrAssert(NULL != buffer);
396 GrAssert(NULL != startIndex);
397
398 size_t offset;
399 const GrGeometryBuffer* geomBuffer;
400 void* ptr = INHERITED::makeSpace(indexCount * sizeof(uint16_t),
401 sizeof(uint16_t),
402 &geomBuffer,
403 &offset);
404
405 *buffer = (const GrIndexBuffer*) geomBuffer;
406 GrAssert(0 == offset % sizeof(uint16_t));
407 *startIndex = offset / sizeof(uint16_t);
408 return ptr;
409}
410
411bool GrIndexBufferAllocPool::appendIndices(int indexCount,
412 const void* indices,
413 const GrIndexBuffer** buffer,
414 int* startIndex) {
415 void* space = makeSpace(indexCount, buffer, startIndex);
416 if (NULL != space) {
417 memcpy(space, indices, sizeof(uint16_t) * indexCount);
418 return true;
419 } else {
420 return false;
421 }
422}
423
424int GrIndexBufferAllocPool::preallocatedBufferIndices() const {
425 return INHERITED::preallocatedBufferSize() / sizeof(uint16_t);
426}
427
428int GrIndexBufferAllocPool::currentBufferIndices() const {
429 return currentBufferItems(sizeof(uint16_t));
430}