blob: 385acb0c31d9cf989c43bbad9a257cf7148ec9be [file] [log] [blame]
Naseer Ahmed29a26812012-06-14 00:56:20 -07001/*
2 * Copyright (C) 2008 The Android Open Source Project
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -08003 * Copyright (c) 2010-2013, The Linux Foundation. All rights reserved.
4 *
5 * Not a Contribution, Apache license notifications and license are retained
6 * for attribution purposes only.
Naseer Ahmed29a26812012-06-14 00:56:20 -07007 *
8 * Licensed under the Apache License, Version 2.0 (the "License");
9 * you may not use this file except in compliance with the License.
10 * You may obtain a copy of the License at
11 *
12 * http://www.apache.org/licenses/LICENSE-2.0
13 *
14 * Unless required by applicable law or agreed to in writing, software
15 * distributed under the License is distributed on an "AS IS" BASIS,
16 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17 * See the License for the specific language governing permissions and
18 * limitations under the License.
19 */
Naseer Ahmed29a26812012-06-14 00:56:20 -070020#include <cutils/log.h>
Arun Kumar K.Reb128aa2012-12-18 12:38:28 -080021#include <sys/resource.h>
22#include <sys/prctl.h>
Naseer Ahmed29a26812012-06-14 00:56:20 -070023
24#include <stdint.h>
25#include <string.h>
26#include <unistd.h>
27#include <errno.h>
28#include <fcntl.h>
29
30#include <sys/ioctl.h>
31#include <sys/types.h>
32#include <sys/mman.h>
33
34#include <linux/msm_kgsl.h>
35
36#include <EGL/eglplatform.h>
37#include <cutils/native_handle.h>
38#include <cutils/ashmem.h>
39#include <linux/ashmem.h>
40#include <gralloc_priv.h>
41
42#include <copybit.h>
43#include <alloc_controller.h>
44#include <memalloc.h>
45
46#include "c2d2.h"
47#include "software_converter.h"
48
49#include <dlfcn.h>
50
51using gralloc::IMemAlloc;
52using gralloc::IonController;
53using gralloc::alloc_data;
Naseer Ahmed29a26812012-06-14 00:56:20 -070054
55C2D_STATUS (*LINK_c2dCreateSurface)( uint32 *surface_id,
56 uint32 surface_bits,
57 C2D_SURFACE_TYPE surface_type,
58 void *surface_definition );
59
60C2D_STATUS (*LINK_c2dUpdateSurface)( uint32 surface_id,
61 uint32 surface_bits,
62 C2D_SURFACE_TYPE surface_type,
63 void *surface_definition );
64
65C2D_STATUS (*LINK_c2dReadSurface)( uint32 surface_id,
66 C2D_SURFACE_TYPE surface_type,
67 void *surface_definition,
68 int32 x, int32 y );
69
70C2D_STATUS (*LINK_c2dDraw)( uint32 target_id,
71 uint32 target_config, C2D_RECT *target_scissor,
72 uint32 target_mask_id, uint32 target_color_key,
73 C2D_OBJECT *objects_list, uint32 num_objects );
74
75C2D_STATUS (*LINK_c2dFinish)( uint32 target_id);
76
77C2D_STATUS (*LINK_c2dFlush)( uint32 target_id, c2d_ts_handle *timestamp);
78
79C2D_STATUS (*LINK_c2dWaitTimestamp)( c2d_ts_handle timestamp );
80
81C2D_STATUS (*LINK_c2dDestroySurface)( uint32 surface_id );
82
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -080083C2D_STATUS (*LINK_c2dMapAddr) ( int mem_fd, void * hostptr, uint32 len,
84 uint32 offset, uint32 flags, void ** gpuaddr);
Naseer Ahmed29a26812012-06-14 00:56:20 -070085
86C2D_STATUS (*LINK_c2dUnMapAddr) ( void * gpuaddr);
87
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -080088C2D_STATUS (*LINK_c2dGetDriverCapabilities) ( C2D_DRIVER_INFO * driver_info);
Arun Kumar K.Reb128aa2012-12-18 12:38:28 -080089
90/* create a fence fd for the timestamp */
91C2D_STATUS (*LINK_c2dCreateFenceFD) ( uint32 target_id, c2d_ts_handle timestamp,
92 int32 *fd);
Naseer Ahmed183939d2013-03-14 20:44:17 -040093
94C2D_STATUS (*LINK_c2dFillSurface) ( uint32 surface_id, uint32 fill_color,
95 C2D_RECT * fill_rect);
96
Naseer Ahmed29a26812012-06-14 00:56:20 -070097/******************************************************************************/
98
99#if defined(COPYBIT_Z180)
100#define MAX_SCALE_FACTOR (4096)
101#define MAX_DIMENSION (4096)
102#else
103#error "Unsupported HW version"
104#endif
105
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -0800106// The following defines can be changed as required i.e. as we encounter
107// complex use cases.
Sushil Chauhanb00f59d2013-04-29 18:35:54 -0700108#define MAX_RGB_SURFACES 32 // Max. RGB layers currently supported per draw
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -0800109#define MAX_YUV_2_PLANE_SURFACES 4// Max. 2-plane YUV layers currently supported per draw
110#define MAX_YUV_3_PLANE_SURFACES 1// Max. 3-plane YUV layers currently supported per draw
111// +1 for the destination surface. We cannot have multiple destination surfaces.
112#define MAX_SURFACES (MAX_RGB_SURFACES + MAX_YUV_2_PLANE_SURFACES + MAX_YUV_3_PLANE_SURFACES + 1)
113#define NUM_SURFACE_TYPES 3 // RGB_SURFACE + YUV_SURFACE_2_PLANES + YUV_SURFACE_3_PLANES
114#define MAX_BLIT_OBJECT_COUNT 50 // Max. blit objects that can be passed per draw
Naseer Ahmed29a26812012-06-14 00:56:20 -0700115
116enum {
117 RGB_SURFACE,
118 YUV_SURFACE_2_PLANES,
119 YUV_SURFACE_3_PLANES
120};
121
122enum eConversionType {
123 CONVERT_TO_ANDROID_FORMAT,
124 CONVERT_TO_C2D_FORMAT
125};
126
127enum eC2DFlags {
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -0800128 FLAGS_PREMULTIPLIED_ALPHA = 1<<0,
129 FLAGS_YUV_DESTINATION = 1<<1,
130 FLAGS_TEMP_SRC_DST = 1<<2
Naseer Ahmed29a26812012-06-14 00:56:20 -0700131};
132
Naseer Ahmed01d3fd32012-07-14 21:08:13 -0700133static gralloc::IAllocController* sAlloc = 0;
Naseer Ahmed29a26812012-06-14 00:56:20 -0700134/******************************************************************************/
135
136/** State information for each device instance */
137struct copybit_context_t {
138 struct copybit_device_t device;
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -0800139 // Templates for the various source surfaces. These templates are created
140 // to avoid the expensive create/destroy C2D Surfaces
141 C2D_OBJECT_STR blit_rgb_object[MAX_RGB_SURFACES];
142 C2D_OBJECT_STR blit_yuv_2_plane_object[MAX_YUV_2_PLANE_SURFACES];
143 C2D_OBJECT_STR blit_yuv_3_plane_object[MAX_YUV_3_PLANE_SURFACES];
144 C2D_OBJECT_STR blit_list[MAX_BLIT_OBJECT_COUNT]; // Z-ordered list of blit objects
145 C2D_DRIVER_INFO c2d_driver_info;
Naseer Ahmed29a26812012-06-14 00:56:20 -0700146 void *libc2d2;
147 alloc_data temp_src_buffer;
148 alloc_data temp_dst_buffer;
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -0800149 unsigned int dst[NUM_SURFACE_TYPES]; // dst surfaces
150 unsigned int mapped_gpu_addr[MAX_SURFACES]; // GPU addresses mapped inside copybit
151 int blit_rgb_count; // Total RGB surfaces being blit
152 int blit_yuv_2_plane_count; // Total 2 plane YUV surfaces being
153 int blit_yuv_3_plane_count; // Total 3 plane YUV surfaces being blit
154 int blit_count; // Total blit objects.
155 unsigned int trg_transform; /* target transform */
Naseer Ahmed29a26812012-06-14 00:56:20 -0700156 int fb_width;
157 int fb_height;
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -0800158 int src_global_alpha;
159 int config_mask;
160 int dst_surface_type;
161 bool is_premultiplied_alpha;
Arun Kumar K.Reb128aa2012-12-18 12:38:28 -0800162 void* time_stamp;
Arun Kumar K.R14031eb2013-04-15 14:26:43 -0700163 bool dst_surface_mapped; // Set when dst surface is mapped to GPU addr
164 void* dst_surface_base; // Stores the dst surface addr
Arun Kumar K.Reb128aa2012-12-18 12:38:28 -0800165
166 // used for signaling the wait thread
167 bool wait_timestamp;
168 pthread_t wait_thread_id;
169 bool stop_thread;
170 pthread_mutex_t wait_cleanup_lock;
171 pthread_cond_t wait_cleanup_cond;
172
Naseer Ahmed29a26812012-06-14 00:56:20 -0700173};
174
175struct bufferInfo {
176 int width;
177 int height;
178 int format;
179};
180
181struct yuvPlaneInfo {
182 int yStride; //luma stride
183 int plane1_stride;
184 int plane2_stride;
185 int plane1_offset;
186 int plane2_offset;
187};
188
189/**
190 * Common hardware methods
191 */
192
193static int open_copybit(const struct hw_module_t* module, const char* name,
194 struct hw_device_t** device);
195
196static struct hw_module_methods_t copybit_module_methods = {
197open: open_copybit
198};
199
200/*
201 * The COPYBIT Module
202 */
203struct copybit_module_t HAL_MODULE_INFO_SYM = {
204common: {
205tag: HARDWARE_MODULE_TAG,
206 version_major: 1,
207 version_minor: 0,
208 id: COPYBIT_HARDWARE_MODULE_ID,
209 name: "QCT COPYBIT C2D 2.0 Module",
210 author: "Qualcomm",
211 methods: &copybit_module_methods
212 }
213};
214
215
Arun Kumar K.Reb128aa2012-12-18 12:38:28 -0800216/* thread function which waits on the timeStamp and cleans up the surfaces */
217static void* c2d_wait_loop(void* ptr) {
218 copybit_context_t* ctx = (copybit_context_t*)(ptr);
219 char thread_name[64] = "copybitWaitThr";
220 prctl(PR_SET_NAME, (unsigned long) &thread_name, 0, 0, 0);
221 setpriority(PRIO_PROCESS, 0, HAL_PRIORITY_URGENT_DISPLAY);
222
223 while(ctx->stop_thread == false) {
224 pthread_mutex_lock(&ctx->wait_cleanup_lock);
225 while(ctx->wait_timestamp == false && !ctx->stop_thread) {
226 pthread_cond_wait(&(ctx->wait_cleanup_cond),
227 &(ctx->wait_cleanup_lock));
228 }
229 if(ctx->wait_timestamp) {
230 if(LINK_c2dWaitTimestamp(ctx->time_stamp)) {
231 ALOGE("%s: LINK_c2dWaitTimeStamp ERROR!!", __FUNCTION__);
232 }
233 ctx->wait_timestamp = false;
234 // Unmap any mapped addresses.
235 for (int i = 0; i < MAX_SURFACES; i++) {
236 if (ctx->mapped_gpu_addr[i]) {
237 LINK_c2dUnMapAddr( (void*)ctx->mapped_gpu_addr[i]);
238 ctx->mapped_gpu_addr[i] = 0;
239 }
240 }
241 // Reset the counts after the draw.
242 ctx->blit_rgb_count = 0;
243 ctx->blit_yuv_2_plane_count = 0;
244 ctx->blit_yuv_3_plane_count = 0;
245 ctx->blit_count = 0;
Arun Kumar K.R14031eb2013-04-15 14:26:43 -0700246 ctx->dst_surface_mapped = false;
247 ctx->dst_surface_base = 0;
Arun Kumar K.Reb128aa2012-12-18 12:38:28 -0800248 }
249 pthread_mutex_unlock(&ctx->wait_cleanup_lock);
250 if(ctx->stop_thread)
251 break;
252 }
253 pthread_exit(NULL);
254 return NULL;
255}
256
257
Naseer Ahmed29a26812012-06-14 00:56:20 -0700258/* convert COPYBIT_FORMAT to C2D format */
259static int get_format(int format) {
260 switch (format) {
261 case HAL_PIXEL_FORMAT_RGB_565: return C2D_COLOR_FORMAT_565_RGB;
262 case HAL_PIXEL_FORMAT_RGBX_8888: return C2D_COLOR_FORMAT_8888_ARGB |
263 C2D_FORMAT_SWAP_RB |
264 C2D_FORMAT_DISABLE_ALPHA;
265 case HAL_PIXEL_FORMAT_RGBA_8888: return C2D_COLOR_FORMAT_8888_ARGB |
266 C2D_FORMAT_SWAP_RB;
267 case HAL_PIXEL_FORMAT_BGRA_8888: return C2D_COLOR_FORMAT_8888_ARGB;
268 case HAL_PIXEL_FORMAT_RGBA_5551: return C2D_COLOR_FORMAT_5551_RGBA;
269 case HAL_PIXEL_FORMAT_RGBA_4444: return C2D_COLOR_FORMAT_4444_RGBA;
270 case HAL_PIXEL_FORMAT_YCbCr_420_SP: return C2D_COLOR_FORMAT_420_NV12;
271 case HAL_PIXEL_FORMAT_NV12_ENCODEABLE:return C2D_COLOR_FORMAT_420_NV12;
272 case HAL_PIXEL_FORMAT_YCrCb_420_SP: return C2D_COLOR_FORMAT_420_NV21;
273 case HAL_PIXEL_FORMAT_YCbCr_420_SP_TILED: return C2D_COLOR_FORMAT_420_NV12 |
274 C2D_FORMAT_MACROTILED;
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -0800275 default: ALOGE("%s: invalid format (0x%x",
276 __FUNCTION__, format);
277 return -EINVAL;
Naseer Ahmed29a26812012-06-14 00:56:20 -0700278 }
279 return -EINVAL;
280}
281
282/* Get the C2D formats needed for conversion to YUV */
283static int get_c2d_format_for_yuv_destination(int halFormat) {
284 switch (halFormat) {
285 // We do not swap the RB when the target is YUV
286 case HAL_PIXEL_FORMAT_RGBX_8888: return C2D_COLOR_FORMAT_8888_ARGB |
287 C2D_FORMAT_DISABLE_ALPHA;
288 case HAL_PIXEL_FORMAT_RGBA_8888: return C2D_COLOR_FORMAT_8888_ARGB;
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -0800289 // The U and V need to be interchanged when the target is YUV
Naseer Ahmed29a26812012-06-14 00:56:20 -0700290 case HAL_PIXEL_FORMAT_YCbCr_420_SP: return C2D_COLOR_FORMAT_420_NV21;
291 case HAL_PIXEL_FORMAT_NV12_ENCODEABLE:return C2D_COLOR_FORMAT_420_NV21;
292 case HAL_PIXEL_FORMAT_YCrCb_420_SP: return C2D_COLOR_FORMAT_420_NV12;
293 default: return get_format(halFormat);
294 }
295 return -EINVAL;
296}
297
298/* ------------------------------------------------------------------- *//*!
299 * \internal
300 * \brief Get the bpp for a particular color format
301 * \param color format
302 * \return bits per pixel
303 *//* ------------------------------------------------------------------- */
304int c2diGetBpp(int32 colorformat)
305{
306
307 int c2dBpp = 0;
308
309 switch(colorformat&0xFF)
310 {
311 case C2D_COLOR_FORMAT_4444_RGBA:
312 case C2D_COLOR_FORMAT_4444_ARGB:
313 case C2D_COLOR_FORMAT_1555_ARGB:
314 case C2D_COLOR_FORMAT_565_RGB:
315 case C2D_COLOR_FORMAT_5551_RGBA:
316 c2dBpp = 16;
317 break;
318 case C2D_COLOR_FORMAT_8888_RGBA:
319 case C2D_COLOR_FORMAT_8888_ARGB:
320 c2dBpp = 32;
321 break;
322 case C2D_COLOR_FORMAT_8_L:
323 case C2D_COLOR_FORMAT_8_A:
324 c2dBpp = 8;
325 break;
326 case C2D_COLOR_FORMAT_4_A:
327 c2dBpp = 4;
328 break;
329 case C2D_COLOR_FORMAT_1:
330 c2dBpp = 1;
331 break;
332 default:
333 ALOGE("%s ERROR", __func__);
334 break;
335 }
336 return c2dBpp;
337}
338
Arun Kumar K.R14031eb2013-04-15 14:26:43 -0700339static uint32 c2d_get_gpuaddr(copybit_context_t* ctx,
340 struct private_handle_t *handle, int &mapped_idx)
Naseer Ahmed29a26812012-06-14 00:56:20 -0700341{
Arun Kumar K.R14031eb2013-04-15 14:26:43 -0700342 uint32 memtype, *gpuaddr = 0;
Naseer Ahmed29a26812012-06-14 00:56:20 -0700343 C2D_STATUS rc;
Arun Kumar K.R14031eb2013-04-15 14:26:43 -0700344 int freeindex = 0;
345 bool mapaddr = false;
Naseer Ahmed29a26812012-06-14 00:56:20 -0700346
347 if(!handle)
348 return 0;
349
350 if (handle->flags & (private_handle_t::PRIV_FLAGS_USES_PMEM |
351 private_handle_t::PRIV_FLAGS_USES_PMEM_ADSP))
352 memtype = KGSL_USER_MEM_TYPE_PMEM;
353 else if (handle->flags & private_handle_t::PRIV_FLAGS_USES_ASHMEM)
354 memtype = KGSL_USER_MEM_TYPE_ASHMEM;
355 else if (handle->flags & private_handle_t::PRIV_FLAGS_USES_ION)
356 memtype = KGSL_USER_MEM_TYPE_ION;
357 else {
358 ALOGE("Invalid handle flags: 0x%x", handle->flags);
359 return 0;
360 }
361
Arun Kumar K.R14031eb2013-04-15 14:26:43 -0700362 // Check for a freeindex in the mapped_gpu_addr list
363 for (freeindex = 0; freeindex < MAX_SURFACES; freeindex++) {
364 if (ctx->mapped_gpu_addr[freeindex] == 0) {
365 // free index is available
366 // map GPU addr and use this as mapped_idx
367 mapaddr = true;
368 break;
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -0800369 }
Naseer Ahmed29a26812012-06-14 00:56:20 -0700370 }
Arun Kumar K.R14031eb2013-04-15 14:26:43 -0700371
372 if(mapaddr) {
373 rc = LINK_c2dMapAddr(handle->fd, (void*)handle->base, handle->size,
374 handle->offset, memtype, (void**)&gpuaddr);
375
376 if (rc == C2D_STATUS_OK) {
377 // We have mapped the GPU address inside copybit. We need to unmap
378 // this address after the blit. Store this address
379 ctx->mapped_gpu_addr[freeindex] = (uint32) gpuaddr;
380 mapped_idx = freeindex;
381 }
382 }
383 return (uint32) gpuaddr;
Naseer Ahmed29a26812012-06-14 00:56:20 -0700384}
385
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -0800386static void unmap_gpuaddr(copybit_context_t* ctx, int mapped_idx)
387{
388 if (!ctx || (mapped_idx == -1))
389 return;
390
391 if (ctx->mapped_gpu_addr[mapped_idx]) {
392 LINK_c2dUnMapAddr( (void*)ctx->mapped_gpu_addr[mapped_idx]);
393 ctx->mapped_gpu_addr[mapped_idx] = 0;
394 }
395}
396
Naseer Ahmed29a26812012-06-14 00:56:20 -0700397static int is_supported_rgb_format(int format)
398{
399 switch(format) {
400 case HAL_PIXEL_FORMAT_RGBA_8888:
401 case HAL_PIXEL_FORMAT_RGBX_8888:
402 case HAL_PIXEL_FORMAT_RGB_565:
403 case HAL_PIXEL_FORMAT_BGRA_8888:
404 case HAL_PIXEL_FORMAT_RGBA_5551:
405 case HAL_PIXEL_FORMAT_RGBA_4444: {
406 return COPYBIT_SUCCESS;
407 }
408 default:
409 return COPYBIT_FAILURE;
410 }
411}
412
413static int get_num_planes(int format)
414{
415 switch(format) {
416 case HAL_PIXEL_FORMAT_YCbCr_420_SP:
417 case HAL_PIXEL_FORMAT_YCrCb_420_SP:
418 case HAL_PIXEL_FORMAT_NV12_ENCODEABLE:
419 case HAL_PIXEL_FORMAT_YCbCr_420_SP_TILED: {
420 return 2;
421 }
422 case HAL_PIXEL_FORMAT_YV12: {
423 return 3;
424 }
425 default:
426 return COPYBIT_FAILURE;
427 }
428}
429
430static int is_supported_yuv_format(int format)
431{
432 switch(format) {
433 case HAL_PIXEL_FORMAT_YCbCr_420_SP:
434 case HAL_PIXEL_FORMAT_YCrCb_420_SP:
435 case HAL_PIXEL_FORMAT_NV12_ENCODEABLE:
436 case HAL_PIXEL_FORMAT_YCbCr_420_SP_TILED: {
437 return COPYBIT_SUCCESS;
438 }
439 default:
440 return COPYBIT_FAILURE;
441 }
442}
443
444static int is_valid_destination_format(int format)
445{
446 if (format == HAL_PIXEL_FORMAT_YCbCr_420_SP_TILED) {
447 // C2D does not support NV12Tile as a destination format.
448 return COPYBIT_FAILURE;
449 }
450 return COPYBIT_SUCCESS;
451}
452
453static int calculate_yuv_offset_and_stride(const bufferInfo& info,
454 yuvPlaneInfo& yuvInfo)
455{
456 int width = info.width;
457 int height = info.height;
458 int format = info.format;
459
460 int aligned_height = 0;
461 int aligned_width = 0, size = 0;
462
463 switch (format) {
464 case HAL_PIXEL_FORMAT_YCbCr_420_SP_TILED: {
465 /* NV12 Tile buffers have their luma height aligned to 32bytes and width
466 * aligned to 128 bytes. The chroma offset starts at an 8K boundary
467 */
468 aligned_height = ALIGN(height, 32);
469 aligned_width = ALIGN(width, 128);
470 size = aligned_width * aligned_height;
471 yuvInfo.plane1_offset = ALIGN(size,8192);
472 yuvInfo.yStride = aligned_width;
473 yuvInfo.plane1_stride = aligned_width;
474 break;
475 }
476 case HAL_PIXEL_FORMAT_YCbCr_420_SP:
477 case HAL_PIXEL_FORMAT_NV12_ENCODEABLE:
478 case HAL_PIXEL_FORMAT_YCrCb_420_SP: {
479 aligned_width = ALIGN(width, 32);
480 yuvInfo.yStride = aligned_width;
481 yuvInfo.plane1_stride = aligned_width;
482 if (HAL_PIXEL_FORMAT_NV12_ENCODEABLE == format) {
483 // The encoder requires a 2K aligned chroma offset
484 yuvInfo.plane1_offset = ALIGN(aligned_width * height, 2048);
485 } else
486 yuvInfo.plane1_offset = aligned_width * height;
487
488 break;
489 }
490 default: {
491 return COPYBIT_FAILURE;
492 }
493 }
494 return COPYBIT_SUCCESS;
495}
496
497/** create C2D surface from copybit image */
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -0800498static int set_image(copybit_context_t* ctx, uint32 surfaceId,
499 const struct copybit_image_t *rhs,
500 const eC2DFlags flags, int &mapped_idx)
Naseer Ahmed29a26812012-06-14 00:56:20 -0700501{
502 struct private_handle_t* handle = (struct private_handle_t*)rhs->handle;
503 C2D_SURFACE_TYPE surfaceType;
504 int status = COPYBIT_SUCCESS;
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -0800505 uint32 gpuaddr = 0;
506 int c2d_format;
507 mapped_idx = -1;
Naseer Ahmed29a26812012-06-14 00:56:20 -0700508
509 if (flags & FLAGS_YUV_DESTINATION) {
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -0800510 c2d_format = get_c2d_format_for_yuv_destination(rhs->format);
Naseer Ahmed29a26812012-06-14 00:56:20 -0700511 } else {
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -0800512 c2d_format = get_format(rhs->format);
Naseer Ahmed29a26812012-06-14 00:56:20 -0700513 }
514
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -0800515 if(c2d_format == -EINVAL) {
Naseer Ahmed29a26812012-06-14 00:56:20 -0700516 ALOGE("%s: invalid format", __FUNCTION__);
517 return -EINVAL;
518 }
519
520 if(handle == NULL) {
521 ALOGE("%s: invalid handle", __func__);
522 return -EINVAL;
523 }
524
525 if (handle->gpuaddr == 0) {
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -0800526 gpuaddr = c2d_get_gpuaddr(ctx, handle, mapped_idx);
527 if(!gpuaddr) {
Naseer Ahmed29a26812012-06-14 00:56:20 -0700528 ALOGE("%s: c2d_get_gpuaddr failed", __FUNCTION__);
529 return COPYBIT_FAILURE;
530 }
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -0800531 } else {
532 gpuaddr = handle->gpuaddr;
Naseer Ahmed29a26812012-06-14 00:56:20 -0700533 }
534
535 /* create C2D surface */
536 if(is_supported_rgb_format(rhs->format) == COPYBIT_SUCCESS) {
537 /* RGB */
538 C2D_RGB_SURFACE_DEF surfaceDef;
539
540 surfaceType = (C2D_SURFACE_TYPE) (C2D_SURFACE_RGB_HOST | C2D_SURFACE_WITH_PHYS);
541
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -0800542 surfaceDef.phys = (void*) gpuaddr;
Naseer Ahmed29a26812012-06-14 00:56:20 -0700543 surfaceDef.buffer = (void*) (handle->base);
544
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -0800545 surfaceDef.format = c2d_format |
Naseer Ahmed29a26812012-06-14 00:56:20 -0700546 ((flags & FLAGS_PREMULTIPLIED_ALPHA) ? C2D_FORMAT_PREMULTIPLIED : 0);
547 surfaceDef.width = rhs->w;
548 surfaceDef.height = rhs->h;
549 int aligned_width = ALIGN(surfaceDef.width,32);
550 surfaceDef.stride = (aligned_width * c2diGetBpp(surfaceDef.format))>>3;
551
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -0800552 if(LINK_c2dUpdateSurface( surfaceId,C2D_TARGET | C2D_SOURCE, surfaceType,
553 &surfaceDef)) {
Naseer Ahmed29a26812012-06-14 00:56:20 -0700554 ALOGE("%s: RGB Surface c2dUpdateSurface ERROR", __FUNCTION__);
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -0800555 unmap_gpuaddr(ctx, mapped_idx);
Naseer Ahmed29a26812012-06-14 00:56:20 -0700556 status = COPYBIT_FAILURE;
557 }
558 } else if (is_supported_yuv_format(rhs->format) == COPYBIT_SUCCESS) {
559 C2D_YUV_SURFACE_DEF surfaceDef;
560 memset(&surfaceDef, 0, sizeof(surfaceDef));
561 surfaceType = (C2D_SURFACE_TYPE)(C2D_SURFACE_YUV_HOST | C2D_SURFACE_WITH_PHYS);
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -0800562 surfaceDef.format = c2d_format;
Naseer Ahmed29a26812012-06-14 00:56:20 -0700563
564 bufferInfo info;
565 info.width = rhs->w;
566 info.height = rhs->h;
567 info.format = rhs->format;
568
Naseer Ahmedb16edac2012-07-15 23:56:21 -0700569 yuvPlaneInfo yuvInfo = {0};
Naseer Ahmed29a26812012-06-14 00:56:20 -0700570 status = calculate_yuv_offset_and_stride(info, yuvInfo);
571 if(status != COPYBIT_SUCCESS) {
572 ALOGE("%s: calculate_yuv_offset_and_stride error", __FUNCTION__);
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -0800573 unmap_gpuaddr(ctx, mapped_idx);
Naseer Ahmed29a26812012-06-14 00:56:20 -0700574 }
575
576 surfaceDef.width = rhs->w;
577 surfaceDef.height = rhs->h;
578 surfaceDef.plane0 = (void*) (handle->base);
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -0800579 surfaceDef.phys0 = (void*) (gpuaddr);
Naseer Ahmed29a26812012-06-14 00:56:20 -0700580 surfaceDef.stride0 = yuvInfo.yStride;
581
582 surfaceDef.plane1 = (void*) (handle->base + yuvInfo.plane1_offset);
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -0800583 surfaceDef.phys1 = (void*) (gpuaddr + yuvInfo.plane1_offset);
Naseer Ahmed29a26812012-06-14 00:56:20 -0700584 surfaceDef.stride1 = yuvInfo.plane1_stride;
585 if (3 == get_num_planes(rhs->format)) {
586 surfaceDef.plane2 = (void*) (handle->base + yuvInfo.plane2_offset);
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -0800587 surfaceDef.phys2 = (void*) (gpuaddr + yuvInfo.plane2_offset);
Naseer Ahmed29a26812012-06-14 00:56:20 -0700588 surfaceDef.stride2 = yuvInfo.plane2_stride;
589 }
590
591 if(LINK_c2dUpdateSurface( surfaceId,C2D_TARGET | C2D_SOURCE, surfaceType,
592 &surfaceDef)) {
593 ALOGE("%s: YUV Surface c2dUpdateSurface ERROR", __FUNCTION__);
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -0800594 unmap_gpuaddr(ctx, mapped_idx);
Naseer Ahmed29a26812012-06-14 00:56:20 -0700595 status = COPYBIT_FAILURE;
596 }
597 } else {
598 ALOGE("%s: invalid format 0x%x", __FUNCTION__, rhs->format);
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -0800599 unmap_gpuaddr(ctx, mapped_idx);
Naseer Ahmed29a26812012-06-14 00:56:20 -0700600 status = COPYBIT_FAILURE;
601 }
602
603 return status;
Naseer Ahmed29a26812012-06-14 00:56:20 -0700604}
605
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -0800606/** copy the bits */
607static int msm_copybit(struct copybit_context_t *ctx, unsigned int target)
Naseer Ahmed29a26812012-06-14 00:56:20 -0700608{
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -0800609 if (ctx->blit_count == 0) {
610 return COPYBIT_SUCCESS;
Naseer Ahmed29a26812012-06-14 00:56:20 -0700611 }
612
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -0800613 for (int i = 0; i < ctx->blit_count; i++)
614 {
615 ctx->blit_list[i].next = &(ctx->blit_list[i+1]);
616 }
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -0800617 ctx->blit_list[ctx->blit_count-1].next = NULL;
Arun Kumar K.Rb2fc9562013-02-25 16:28:51 -0800618 uint32_t target_transform = ctx->trg_transform;
619 if (ctx->c2d_driver_info.capabilities_mask &
620 C2D_DRIVER_SUPPORTS_OVERRIDE_TARGET_ROTATE_OP) {
621 // For A3xx - set 0x0 as the transform is set in the config_mask
622 target_transform = 0x0;
623 }
624 if(LINK_c2dDraw(target, target_transform, 0x0, 0, 0, ctx->blit_list,
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -0800625 ctx->blit_count)) {
626 ALOGE("%s: LINK_c2dDraw ERROR", __FUNCTION__);
627 return COPYBIT_FAILURE;
Naseer Ahmed29a26812012-06-14 00:56:20 -0700628 }
Naseer Ahmed29a26812012-06-14 00:56:20 -0700629 return COPYBIT_SUCCESS;
Naseer Ahmed29a26812012-06-14 00:56:20 -0700630}
631
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -0800632
Arun Kumar K.Reb128aa2012-12-18 12:38:28 -0800633
634static int flush_get_fence_copybit (struct copybit_device_t *dev, int* fd)
635{
636 struct copybit_context_t* ctx = (struct copybit_context_t*)dev;
637 int status = COPYBIT_FAILURE;
638 if (!ctx)
639 return COPYBIT_FAILURE;
640 pthread_mutex_lock(&ctx->wait_cleanup_lock);
641 status = msm_copybit(ctx, ctx->dst[ctx->dst_surface_type]);
642
643 if(LINK_c2dFlush(ctx->dst[ctx->dst_surface_type], &ctx->time_stamp)) {
644 ALOGE("%s: LINK_c2dFlush ERROR", __FUNCTION__);
645 // unlock the mutex and return failure
646 pthread_mutex_unlock(&ctx->wait_cleanup_lock);
647 return COPYBIT_FAILURE;
648 }
649 if(LINK_c2dCreateFenceFD(ctx->dst[ctx->dst_surface_type], ctx->time_stamp,
650 fd)) {
651 ALOGE("%s: LINK_c2dCreateFenceFD ERROR", __FUNCTION__);
652 status = COPYBIT_FAILURE;
653 }
654 if(status == COPYBIT_SUCCESS) {
655 //signal the wait_thread
656 ctx->wait_timestamp = true;
657 pthread_cond_signal(&ctx->wait_cleanup_cond);
658 }
659 pthread_mutex_unlock(&ctx->wait_cleanup_lock);
660 return status;
661}
662
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -0800663static int finish_copybit(struct copybit_device_t *dev)
Naseer Ahmed29a26812012-06-14 00:56:20 -0700664{
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -0800665 struct copybit_context_t* ctx = (struct copybit_context_t*)dev;
666 if (!ctx)
667 return COPYBIT_FAILURE;
Naseer Ahmed29a26812012-06-14 00:56:20 -0700668
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -0800669 int status = msm_copybit(ctx, ctx->dst[ctx->dst_surface_type]);
Naseer Ahmed29a26812012-06-14 00:56:20 -0700670
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -0800671 if(LINK_c2dFinish(ctx->dst[ctx->dst_surface_type])) {
672 ALOGE("%s: LINK_c2dFinish ERROR", __FUNCTION__);
673 return COPYBIT_FAILURE;
Naseer Ahmed29a26812012-06-14 00:56:20 -0700674 }
675
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -0800676 // Unmap any mapped addresses.
677 for (int i = 0; i < MAX_SURFACES; i++) {
678 if (ctx->mapped_gpu_addr[i]) {
679 LINK_c2dUnMapAddr( (void*)ctx->mapped_gpu_addr[i]);
680 ctx->mapped_gpu_addr[i] = 0;
Naseer Ahmed29a26812012-06-14 00:56:20 -0700681 }
682 }
Naseer Ahmed29a26812012-06-14 00:56:20 -0700683
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -0800684 // Reset the counts after the draw.
685 ctx->blit_rgb_count = 0;
686 ctx->blit_yuv_2_plane_count = 0;
687 ctx->blit_yuv_3_plane_count = 0;
688 ctx->blit_count = 0;
Pawan Kumar74d9ea92013-05-03 14:25:49 +0530689 ctx->dst_surface_mapped = false;
690 ctx->dst_surface_base = 0;
691
Naseer Ahmed29a26812012-06-14 00:56:20 -0700692 return status;
693}
694
Naseer Ahmed183939d2013-03-14 20:44:17 -0400695static int clear_copybit(struct copybit_device_t *dev,
696 struct copybit_image_t const *buf,
697 struct copybit_rect_t *rect)
698{
Arun Kumar K.R71cfd812013-04-03 11:25:41 -0700699 int ret = COPYBIT_SUCCESS;
Naseer Ahmed183939d2013-03-14 20:44:17 -0400700 int flags = FLAGS_PREMULTIPLIED_ALPHA;
701 int mapped_dst_idx = -1;
702 struct copybit_context_t* ctx = (struct copybit_context_t*)dev;
703 C2D_RECT c2drect = {rect->l, rect->t, rect->r - rect->l, rect->b - rect->t};
Arun Kumar K.R71cfd812013-04-03 11:25:41 -0700704 pthread_mutex_lock(&ctx->wait_cleanup_lock);
Arun Kumar K.R14031eb2013-04-15 14:26:43 -0700705 if(!ctx->dst_surface_mapped) {
706 ret = set_image(ctx, ctx->dst[RGB_SURFACE], buf,
707 (eC2DFlags)flags, mapped_dst_idx);
708 if(ret) {
709 ALOGE("%s: set_image error", __FUNCTION__);
710 unmap_gpuaddr(ctx, mapped_dst_idx);
711 pthread_mutex_unlock(&ctx->wait_cleanup_lock);
712 return COPYBIT_FAILURE;
713 }
714 //clear_copybit is the first call made by HWC for each composition
715 //with the dest surface, hence set dst_surface_mapped.
716 ctx->dst_surface_mapped = true;
717 ctx->dst_surface_base = buf->base;
718 ret = LINK_c2dFillSurface(ctx->dst[RGB_SURFACE], 0x0, &c2drect);
Naseer Ahmed183939d2013-03-14 20:44:17 -0400719 }
Arun Kumar K.R71cfd812013-04-03 11:25:41 -0700720 pthread_mutex_unlock(&ctx->wait_cleanup_lock);
Naseer Ahmed183939d2013-03-14 20:44:17 -0400721 return ret;
722}
723
724
Naseer Ahmed29a26812012-06-14 00:56:20 -0700725/** setup rectangles */
726static void set_rects(struct copybit_context_t *ctx,
727 C2D_OBJECT *c2dObject,
728 const struct copybit_rect_t *dst,
729 const struct copybit_rect_t *src,
730 const struct copybit_rect_t *scissor)
731{
732 // Set the target rect.
733 if((ctx->trg_transform & C2D_TARGET_ROTATE_90) &&
734 (ctx->trg_transform & C2D_TARGET_ROTATE_180)) {
735 /* target rotation is 270 */
736 c2dObject->target_rect.x = (dst->t)<<16;
737 c2dObject->target_rect.y = ctx->fb_width?(ALIGN(ctx->fb_width,32)- dst->r):dst->r;
738 c2dObject->target_rect.y = c2dObject->target_rect.y<<16;
739 c2dObject->target_rect.height = ((dst->r) - (dst->l))<<16;
740 c2dObject->target_rect.width = ((dst->b) - (dst->t))<<16;
741 } else if(ctx->trg_transform & C2D_TARGET_ROTATE_90) {
742 c2dObject->target_rect.x = ctx->fb_height?(ctx->fb_height - dst->b):dst->b;
743 c2dObject->target_rect.x = c2dObject->target_rect.x<<16;
744 c2dObject->target_rect.y = (dst->l)<<16;
745 c2dObject->target_rect.height = ((dst->r) - (dst->l))<<16;
746 c2dObject->target_rect.width = ((dst->b) - (dst->t))<<16;
747 } else if(ctx->trg_transform & C2D_TARGET_ROTATE_180) {
748 c2dObject->target_rect.y = ctx->fb_height?(ctx->fb_height - dst->b):dst->b;
749 c2dObject->target_rect.y = c2dObject->target_rect.y<<16;
750 c2dObject->target_rect.x = ctx->fb_width?(ALIGN(ctx->fb_width,32) - dst->r):dst->r;
751 c2dObject->target_rect.x = c2dObject->target_rect.x<<16;
752 c2dObject->target_rect.height = ((dst->b) - (dst->t))<<16;
753 c2dObject->target_rect.width = ((dst->r) - (dst->l))<<16;
754 } else {
755 c2dObject->target_rect.x = (dst->l)<<16;
756 c2dObject->target_rect.y = (dst->t)<<16;
757 c2dObject->target_rect.height = ((dst->b) - (dst->t))<<16;
758 c2dObject->target_rect.width = ((dst->r) - (dst->l))<<16;
759 }
760 c2dObject->config_mask |= C2D_TARGET_RECT_BIT;
761
762 // Set the source rect
763 c2dObject->source_rect.x = (src->l)<<16;
764 c2dObject->source_rect.y = (src->t)<<16;
765 c2dObject->source_rect.height = ((src->b) - (src->t))<<16;
766 c2dObject->source_rect.width = ((src->r) - (src->l))<<16;
767 c2dObject->config_mask |= C2D_SOURCE_RECT_BIT;
768
769 // Set the scissor rect
770 c2dObject->scissor_rect.x = scissor->l;
771 c2dObject->scissor_rect.y = scissor->t;
772 c2dObject->scissor_rect.height = (scissor->b) - (scissor->t);
773 c2dObject->scissor_rect.width = (scissor->r) - (scissor->l);
774 c2dObject->config_mask |= C2D_SCISSOR_RECT_BIT;
775}
776
Naseer Ahmed29a26812012-06-14 00:56:20 -0700777/*****************************************************************************/
778
779/** Set a parameter to value */
780static int set_parameter_copybit(
781 struct copybit_device_t *dev,
782 int name,
783 int value)
784{
785 struct copybit_context_t* ctx = (struct copybit_context_t*)dev;
Arun Kumar K.Reb128aa2012-12-18 12:38:28 -0800786 int status = COPYBIT_SUCCESS;
Naseer Ahmed29a26812012-06-14 00:56:20 -0700787 if (!ctx) {
788 ALOGE("%s: null context", __FUNCTION__);
789 return -EINVAL;
790 }
791
Arun Kumar K.Reb128aa2012-12-18 12:38:28 -0800792 pthread_mutex_lock(&ctx->wait_cleanup_lock);
Naseer Ahmed29a26812012-06-14 00:56:20 -0700793 switch(name) {
Naseer Ahmed29a26812012-06-14 00:56:20 -0700794 case COPYBIT_PLANE_ALPHA:
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -0800795 {
Naseer Ahmed29a26812012-06-14 00:56:20 -0700796 if (value < 0) value = 0;
797 if (value >= 256) value = 255;
798
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -0800799 ctx->src_global_alpha = value;
800 if (value < 255)
801 ctx->config_mask |= C2D_GLOBAL_ALPHA_BIT;
Naseer Ahmed29a26812012-06-14 00:56:20 -0700802 else
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -0800803 ctx->config_mask &= ~C2D_GLOBAL_ALPHA_BIT;
804 }
805 break;
806 case COPYBIT_BLEND_MODE:
807 {
808 if (value == COPYBIT_BLENDING_NONE) {
809 ctx->config_mask |= C2D_ALPHA_BLEND_NONE;
810 ctx->is_premultiplied_alpha = true;
811 } else if (value == COPYBIT_BLENDING_PREMULT) {
812 ctx->is_premultiplied_alpha = true;
813 } else {
814 ctx->config_mask &= ~C2D_ALPHA_BLEND_NONE;
Naseer Ahmed29a26812012-06-14 00:56:20 -0700815 }
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -0800816 }
817 break;
818 case COPYBIT_TRANSFORM:
819 {
820 unsigned int transform = 0;
821 uint32 config_mask = 0;
822 config_mask |= C2D_OVERRIDE_GLOBAL_TARGET_ROTATE_CONFIG;
823 if((value & 0x7) == COPYBIT_TRANSFORM_ROT_180) {
824 transform = C2D_TARGET_ROTATE_180;
825 config_mask |= C2D_OVERRIDE_TARGET_ROTATE_180;
826 } else if((value & 0x7) == COPYBIT_TRANSFORM_ROT_270) {
827 transform = C2D_TARGET_ROTATE_90;
828 config_mask |= C2D_OVERRIDE_TARGET_ROTATE_90;
829 } else if(value == COPYBIT_TRANSFORM_ROT_90) {
830 transform = C2D_TARGET_ROTATE_270;
831 config_mask |= C2D_OVERRIDE_TARGET_ROTATE_270;
832 } else {
833 config_mask |= C2D_OVERRIDE_TARGET_ROTATE_0;
834 if(value & COPYBIT_TRANSFORM_FLIP_H) {
835 config_mask |= C2D_MIRROR_H_BIT;
836 } else if(value & COPYBIT_TRANSFORM_FLIP_V) {
837 config_mask |= C2D_MIRROR_V_BIT;
838 }
839 }
840
Arun Kumar K.Rb2fc9562013-02-25 16:28:51 -0800841 if (ctx->c2d_driver_info.capabilities_mask &
842 C2D_DRIVER_SUPPORTS_OVERRIDE_TARGET_ROTATE_OP) {
843 ctx->config_mask |= config_mask;
844 } else {
845 // The transform for this surface does not match the current
846 // target transform. Draw all previous surfaces. This will be
847 // changed once we have a new mechanism to send different
848 // target rotations to c2d.
849 finish_copybit(dev);
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -0800850 }
851 ctx->trg_transform = transform;
852 }
853 break;
Naseer Ahmed29a26812012-06-14 00:56:20 -0700854 case COPYBIT_FRAMEBUFFER_WIDTH:
855 ctx->fb_width = value;
856 break;
857 case COPYBIT_FRAMEBUFFER_HEIGHT:
858 ctx->fb_height = value;
859 break;
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -0800860 case COPYBIT_ROTATION_DEG:
861 case COPYBIT_DITHER:
862 case COPYBIT_BLUR:
Naseer Ahmed45a99602012-07-31 19:15:24 -0700863 case COPYBIT_BLIT_TO_FRAMEBUFFER:
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -0800864 // Do nothing
Naseer Ahmed45a99602012-07-31 19:15:24 -0700865 break;
Naseer Ahmed29a26812012-06-14 00:56:20 -0700866 default:
867 ALOGE("%s: default case param=0x%x", __FUNCTION__, name);
Arun Kumar K.Reb128aa2012-12-18 12:38:28 -0800868 status = -EINVAL;
Naseer Ahmed29a26812012-06-14 00:56:20 -0700869 break;
870 }
Arun Kumar K.Reb128aa2012-12-18 12:38:28 -0800871 pthread_mutex_unlock(&ctx->wait_cleanup_lock);
872 return status;
Naseer Ahmed29a26812012-06-14 00:56:20 -0700873}
874
875/** Get a static info value */
876static int get(struct copybit_device_t *dev, int name)
877{
878 struct copybit_context_t* ctx = (struct copybit_context_t*)dev;
879 int value;
880
881 if (!ctx) {
882 ALOGE("%s: null context error", __FUNCTION__);
883 return -EINVAL;
884 }
885
886 switch(name) {
887 case COPYBIT_MINIFICATION_LIMIT:
888 value = MAX_SCALE_FACTOR;
889 break;
890 case COPYBIT_MAGNIFICATION_LIMIT:
891 value = MAX_SCALE_FACTOR;
892 break;
893 case COPYBIT_SCALING_FRAC_BITS:
894 value = 32;
895 break;
896 case COPYBIT_ROTATION_STEP_DEG:
897 value = 1;
898 break;
899 default:
900 ALOGE("%s: default case param=0x%x", __FUNCTION__, name);
901 value = -EINVAL;
902 }
903 return value;
904}
905
906static int is_alpha(int cformat)
907{
908 int alpha = 0;
909 switch (cformat & 0xFF) {
910 case C2D_COLOR_FORMAT_8888_ARGB:
911 case C2D_COLOR_FORMAT_8888_RGBA:
912 case C2D_COLOR_FORMAT_5551_RGBA:
913 case C2D_COLOR_FORMAT_4444_ARGB:
914 alpha = 1;
915 break;
916 default:
917 alpha = 0;
918 break;
919 }
920
921 if(alpha && (cformat&C2D_FORMAT_DISABLE_ALPHA))
922 alpha = 0;
923
924 return alpha;
925}
926
927/* Function to check if we need a temporary buffer for the blit.
928 * This would happen if the requested destination stride and the
929 * C2D stride do not match. We ignore RGB buffers, since their
930 * stride is always aligned to 32.
931 */
932static bool need_temp_buffer(struct copybit_image_t const *img)
933{
934 if (COPYBIT_SUCCESS == is_supported_rgb_format(img->format))
935 return false;
936
937 struct private_handle_t* handle = (struct private_handle_t*)img->handle;
938
939 // The width parameter in the handle contains the aligned_w. We check if we
940 // need to convert based on this param. YUV formats have bpp=1, so checking
941 // if the requested stride is aligned should suffice.
942 if (0 == (handle->width)%32) {
943 return false;
944 }
945
946 return true;
947}
948
949/* Function to extract the information from the copybit image and set the corresponding
950 * values in the bufferInfo struct.
951 */
952static void populate_buffer_info(struct copybit_image_t const *img, bufferInfo& info)
953{
954 info.width = img->w;
955 info.height = img->h;
956 info.format = img->format;
957}
958
959/* Function to get the required size for a particular format, inorder for C2D to perform
960 * the blit operation.
961 */
962static size_t get_size(const bufferInfo& info)
963{
964 size_t size = 0;
965 int w = info.width;
966 int h = info.height;
967 int aligned_w = ALIGN(w, 32);
968 switch(info.format) {
969 case HAL_PIXEL_FORMAT_NV12_ENCODEABLE:
970 {
971 // Chroma for this format is aligned to 2K.
972 size = ALIGN((aligned_w*h), 2048) +
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -0800973 ALIGN(aligned_w/2, 32) * (h/2) *2;
Naseer Ahmed29a26812012-06-14 00:56:20 -0700974 size = ALIGN(size, 4096);
975 } break;
976 case HAL_PIXEL_FORMAT_YCbCr_420_SP:
977 case HAL_PIXEL_FORMAT_YCrCb_420_SP:
978 {
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -0800979 size = aligned_w * h +
980 ALIGN(aligned_w/2, 32) * (h/2) * 2;
Naseer Ahmed29a26812012-06-14 00:56:20 -0700981 size = ALIGN(size, 4096);
982 } break;
983 default: break;
984 }
985 return size;
986}
987
988/* Function to allocate memory for the temporary buffer. This memory is
989 * allocated from Ashmem. It is the caller's responsibility to free this
990 * memory.
991 */
992static int get_temp_buffer(const bufferInfo& info, alloc_data& data)
993{
994 ALOGD("%s E", __FUNCTION__);
995 // Alloc memory from system heap
996 data.base = 0;
997 data.fd = -1;
998 data.offset = 0;
999 data.size = get_size(info);
1000 data.align = getpagesize();
1001 data.uncached = true;
1002 int allocFlags = GRALLOC_USAGE_PRIVATE_SYSTEM_HEAP;
1003
1004 if (sAlloc == 0) {
Naseer Ahmed01d3fd32012-07-14 21:08:13 -07001005 sAlloc = gralloc::IAllocController::getInstance();
Naseer Ahmed29a26812012-06-14 00:56:20 -07001006 }
1007
1008 if (sAlloc == 0) {
1009 ALOGE("%s: sAlloc is still NULL", __FUNCTION__);
1010 return COPYBIT_FAILURE;
1011 }
1012
Naseer Ahmed01d3fd32012-07-14 21:08:13 -07001013 int err = sAlloc->allocate(data, allocFlags);
Naseer Ahmed29a26812012-06-14 00:56:20 -07001014 if (0 != err) {
1015 ALOGE("%s: allocate failed", __FUNCTION__);
1016 return COPYBIT_FAILURE;
1017 }
1018
1019 ALOGD("%s X", __FUNCTION__);
1020 return err;
1021}
1022
1023/* Function to free the temporary allocated memory.*/
1024static void free_temp_buffer(alloc_data &data)
1025{
1026 if (-1 != data.fd) {
Naseer Ahmed01d3fd32012-07-14 21:08:13 -07001027 IMemAlloc* memalloc = sAlloc->getAllocator(data.allocType);
Naseer Ahmed29a26812012-06-14 00:56:20 -07001028 memalloc->free_buffer(data.base, data.size, 0, data.fd);
1029 }
1030}
1031
1032/* Function to perform the software color conversion. Convert the
1033 * C2D compatible format to the Android compatible format
1034 */
1035static int copy_image(private_handle_t *src_handle,
1036 struct copybit_image_t const *rhs,
1037 eConversionType conversionType)
1038{
1039 if (src_handle->fd == -1) {
1040 ALOGE("%s: src_handle fd is invalid", __FUNCTION__);
1041 return COPYBIT_FAILURE;
1042 }
1043
1044 // Copy the info.
1045 int ret = COPYBIT_SUCCESS;
1046 switch(rhs->format) {
1047 case HAL_PIXEL_FORMAT_NV12_ENCODEABLE:
1048 case HAL_PIXEL_FORMAT_YCbCr_420_SP:
1049 case HAL_PIXEL_FORMAT_YCrCb_420_SP:
1050 {
1051 if (CONVERT_TO_ANDROID_FORMAT == conversionType) {
1052 return convert_yuv_c2d_to_yuv_android(src_handle, rhs);
1053 } else {
1054 return convert_yuv_android_to_yuv_c2d(src_handle, rhs);
1055 }
1056
1057 } break;
1058 default: {
1059 ALOGE("%s: invalid format 0x%x", __FUNCTION__, rhs->format);
1060 ret = COPYBIT_FAILURE;
1061 } break;
1062 }
1063 return ret;
1064}
1065
1066static void delete_handle(private_handle_t *handle)
1067{
1068 if (handle) {
1069 delete handle;
1070 handle = 0;
1071 }
1072}
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -08001073
1074static bool need_to_execute_draw(struct copybit_context_t* ctx,
1075 eC2DFlags flags)
1076{
1077 if (flags & FLAGS_TEMP_SRC_DST) {
1078 return true;
1079 }
1080 if (flags & FLAGS_YUV_DESTINATION) {
1081 return true;
1082 }
1083 return false;
1084}
1085
Naseer Ahmed29a26812012-06-14 00:56:20 -07001086/** do a stretch blit type operation */
1087static int stretch_copybit_internal(
1088 struct copybit_device_t *dev,
1089 struct copybit_image_t const *dst,
1090 struct copybit_image_t const *src,
1091 struct copybit_rect_t const *dst_rect,
1092 struct copybit_rect_t const *src_rect,
1093 struct copybit_region_t const *region,
1094 bool enableBlend)
1095{
1096 struct copybit_context_t* ctx = (struct copybit_context_t*)dev;
1097 int status = COPYBIT_SUCCESS;
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -08001098 int flags = 0;
1099 int src_surface_type;
1100 int mapped_src_idx = -1, mapped_dst_idx = -1;
1101 C2D_OBJECT_STR src_surface;
Naseer Ahmed29a26812012-06-14 00:56:20 -07001102
1103 if (!ctx) {
1104 ALOGE("%s: null context error", __FUNCTION__);
1105 return -EINVAL;
1106 }
1107
1108 if (src->w > MAX_DIMENSION || src->h > MAX_DIMENSION) {
1109 ALOGE("%s: src dimension error", __FUNCTION__);
1110 return -EINVAL;
1111 }
1112
1113 if (dst->w > MAX_DIMENSION || dst->h > MAX_DIMENSION) {
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -08001114 ALOGE("%s : dst dimension error dst w %d h %d", __FUNCTION__, dst->w,
1115 dst->h);
Naseer Ahmed29a26812012-06-14 00:56:20 -07001116 return -EINVAL;
1117 }
1118
Naseer Ahmed29a26812012-06-14 00:56:20 -07001119 if (is_valid_destination_format(dst->format) == COPYBIT_FAILURE) {
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -08001120 ALOGE("%s: Invalid destination format format = 0x%x", __FUNCTION__,
1121 dst->format);
Naseer Ahmed29a26812012-06-14 00:56:20 -07001122 return COPYBIT_FAILURE;
1123 }
1124
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -08001125 int dst_surface_type;
Naseer Ahmed29a26812012-06-14 00:56:20 -07001126 if (is_supported_rgb_format(dst->format) == COPYBIT_SUCCESS) {
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -08001127 dst_surface_type = RGB_SURFACE;
1128 flags |= FLAGS_PREMULTIPLIED_ALPHA;
Naseer Ahmed29a26812012-06-14 00:56:20 -07001129 } else if (is_supported_yuv_format(dst->format) == COPYBIT_SUCCESS) {
Naseer Ahmed29a26812012-06-14 00:56:20 -07001130 int num_planes = get_num_planes(dst->format);
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -08001131 flags |= FLAGS_YUV_DESTINATION;
Naseer Ahmed29a26812012-06-14 00:56:20 -07001132 if (num_planes == 2) {
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -08001133 dst_surface_type = YUV_SURFACE_2_PLANES;
Naseer Ahmed29a26812012-06-14 00:56:20 -07001134 } else if (num_planes == 3) {
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -08001135 dst_surface_type = YUV_SURFACE_3_PLANES;
Naseer Ahmed29a26812012-06-14 00:56:20 -07001136 } else {
1137 ALOGE("%s: dst number of YUV planes is invalid dst format = 0x%x",
1138 __FUNCTION__, dst->format);
1139 return COPYBIT_FAILURE;
1140 }
1141 } else {
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -08001142 ALOGE("%s: Invalid dst surface format 0x%x", __FUNCTION__,
1143 dst->format);
Naseer Ahmed29a26812012-06-14 00:56:20 -07001144 return COPYBIT_FAILURE;
1145 }
1146
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -08001147 if (ctx->blit_rgb_count == MAX_RGB_SURFACES ||
1148 ctx->blit_yuv_2_plane_count == MAX_YUV_2_PLANE_SURFACES ||
1149 ctx->blit_yuv_3_plane_count == MAX_YUV_2_PLANE_SURFACES ||
1150 ctx->blit_count == MAX_BLIT_OBJECT_COUNT ||
1151 ctx->dst_surface_type != dst_surface_type) {
1152 // we have reached the max. limits of our internal structures or
1153 // changed the target.
1154 // Draw the remaining surfaces. We need to do the finish here since
1155 // we need to free up the surface templates.
1156 finish_copybit(dev);
1157 }
1158
1159 ctx->dst_surface_type = dst_surface_type;
1160
1161 // Update the destination
Naseer Ahmed29a26812012-06-14 00:56:20 -07001162 copybit_image_t dst_image;
1163 dst_image.w = dst->w;
1164 dst_image.h = dst->h;
1165 dst_image.format = dst->format;
1166 dst_image.handle = dst->handle;
1167 // Check if we need a temp. copy for the destination. We'd need this the destination
1168 // width is not aligned to 32. This case occurs for YUV formats. RGB formats are
1169 // aligned to 32.
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -08001170 bool need_temp_dst = need_temp_buffer(dst);
Naseer Ahmed29a26812012-06-14 00:56:20 -07001171 bufferInfo dst_info;
1172 populate_buffer_info(dst, dst_info);
1173 private_handle_t* dst_hnd = new private_handle_t(-1, 0, 0, 0, dst_info.format,
1174 dst_info.width, dst_info.height);
1175 if (dst_hnd == NULL) {
1176 ALOGE("%s: dst_hnd is null", __FUNCTION__);
1177 return COPYBIT_FAILURE;
1178 }
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -08001179 if (need_temp_dst) {
Naseer Ahmed29a26812012-06-14 00:56:20 -07001180 if (get_size(dst_info) != ctx->temp_dst_buffer.size) {
1181 free_temp_buffer(ctx->temp_dst_buffer);
1182 // Create a temp buffer and set that as the destination.
1183 if (COPYBIT_FAILURE == get_temp_buffer(dst_info, ctx->temp_dst_buffer)) {
1184 ALOGE("%s: get_temp_buffer(dst) failed", __FUNCTION__);
1185 delete_handle(dst_hnd);
1186 return COPYBIT_FAILURE;
1187 }
1188 }
1189 dst_hnd->fd = ctx->temp_dst_buffer.fd;
1190 dst_hnd->size = ctx->temp_dst_buffer.size;
1191 dst_hnd->flags = ctx->temp_dst_buffer.allocType;
1192 dst_hnd->base = (int)(ctx->temp_dst_buffer.base);
1193 dst_hnd->offset = ctx->temp_dst_buffer.offset;
1194 dst_hnd->gpuaddr = 0;
1195 dst_image.handle = dst_hnd;
1196 }
Arun Kumar K.R14031eb2013-04-15 14:26:43 -07001197 if(!ctx->dst_surface_mapped) {
1198 //map the destination surface to GPU address
1199 status = set_image(ctx, ctx->dst[ctx->dst_surface_type], &dst_image,
1200 (eC2DFlags)flags, mapped_dst_idx);
1201 if(status) {
1202 ALOGE("%s: dst: set_image error", __FUNCTION__);
1203 delete_handle(dst_hnd);
1204 unmap_gpuaddr(ctx, mapped_dst_idx);
1205 return COPYBIT_FAILURE;
1206 }
1207 ctx->dst_surface_mapped = true;
1208 ctx->dst_surface_base = dst->base;
1209 } else if(ctx->dst_surface_mapped && ctx->dst_surface_base != dst->base) {
1210 // Destination surface for the operation should be same for multiple
1211 // requests, this check is catch if there is any case when the
1212 // destination changes
1213 ALOGE("%s: a different destination surface!!", __FUNCTION__);
Naseer Ahmed29a26812012-06-14 00:56:20 -07001214 }
1215
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -08001216 // Update the source
1217 flags = 0;
Naseer Ahmed29a26812012-06-14 00:56:20 -07001218 if(is_supported_rgb_format(src->format) == COPYBIT_SUCCESS) {
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -08001219 src_surface_type = RGB_SURFACE;
1220 src_surface = ctx->blit_rgb_object[ctx->blit_rgb_count];
Naseer Ahmed29a26812012-06-14 00:56:20 -07001221 } else if (is_supported_yuv_format(src->format) == COPYBIT_SUCCESS) {
1222 int num_planes = get_num_planes(src->format);
1223 if (num_planes == 2) {
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -08001224 src_surface_type = YUV_SURFACE_2_PLANES;
1225 src_surface = ctx->blit_yuv_2_plane_object[ctx->blit_yuv_2_plane_count];
Naseer Ahmed29a26812012-06-14 00:56:20 -07001226 } else if (num_planes == 3) {
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -08001227 src_surface_type = YUV_SURFACE_3_PLANES;
1228 src_surface = ctx->blit_yuv_3_plane_object[ctx->blit_yuv_2_plane_count];
Naseer Ahmed29a26812012-06-14 00:56:20 -07001229 } else {
1230 ALOGE("%s: src number of YUV planes is invalid src format = 0x%x",
1231 __FUNCTION__, src->format);
1232 delete_handle(dst_hnd);
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -08001233 unmap_gpuaddr(ctx, mapped_dst_idx);
Naseer Ahmed29a26812012-06-14 00:56:20 -07001234 return -EINVAL;
1235 }
1236 } else {
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -08001237 ALOGE("%s: Invalid source surface format 0x%x", __FUNCTION__,
1238 src->format);
Naseer Ahmed29a26812012-06-14 00:56:20 -07001239 delete_handle(dst_hnd);
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -08001240 unmap_gpuaddr(ctx, mapped_dst_idx);
Naseer Ahmed29a26812012-06-14 00:56:20 -07001241 return -EINVAL;
1242 }
1243
1244 copybit_image_t src_image;
1245 src_image.w = src->w;
1246 src_image.h = src->h;
1247 src_image.format = src->format;
1248 src_image.handle = src->handle;
1249
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -08001250 bool need_temp_src = need_temp_buffer(src);
Naseer Ahmed29a26812012-06-14 00:56:20 -07001251 bufferInfo src_info;
1252 populate_buffer_info(src, src_info);
1253 private_handle_t* src_hnd = new private_handle_t(-1, 0, 0, 0, src_info.format,
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -08001254 src_info.width, src_info.height);
Naseer Ahmed29a26812012-06-14 00:56:20 -07001255 if (NULL == src_hnd) {
1256 ALOGE("%s: src_hnd is null", __FUNCTION__);
1257 delete_handle(dst_hnd);
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -08001258 unmap_gpuaddr(ctx, mapped_dst_idx);
Naseer Ahmed29a26812012-06-14 00:56:20 -07001259 return COPYBIT_FAILURE;
1260 }
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -08001261 if (need_temp_src) {
Naseer Ahmed29a26812012-06-14 00:56:20 -07001262 if (get_size(src_info) != ctx->temp_src_buffer.size) {
1263 free_temp_buffer(ctx->temp_src_buffer);
1264 // Create a temp buffer and set that as the destination.
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -08001265 if (COPYBIT_SUCCESS != get_temp_buffer(src_info,
1266 ctx->temp_src_buffer)) {
Naseer Ahmed29a26812012-06-14 00:56:20 -07001267 ALOGE("%s: get_temp_buffer(src) failed", __FUNCTION__);
1268 delete_handle(dst_hnd);
1269 delete_handle(src_hnd);
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -08001270 unmap_gpuaddr(ctx, mapped_dst_idx);
Naseer Ahmed29a26812012-06-14 00:56:20 -07001271 return COPYBIT_FAILURE;
1272 }
1273 }
1274 src_hnd->fd = ctx->temp_src_buffer.fd;
1275 src_hnd->size = ctx->temp_src_buffer.size;
1276 src_hnd->flags = ctx->temp_src_buffer.allocType;
1277 src_hnd->base = (int)(ctx->temp_src_buffer.base);
1278 src_hnd->offset = ctx->temp_src_buffer.offset;
1279 src_hnd->gpuaddr = 0;
1280 src_image.handle = src_hnd;
1281
1282 // Copy the source.
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -08001283 status = copy_image((private_handle_t *)src->handle, &src_image,
1284 CONVERT_TO_C2D_FORMAT);
1285 if (status == COPYBIT_FAILURE) {
1286 ALOGE("%s:copy_image failed in temp source",__FUNCTION__);
1287 delete_handle(dst_hnd);
1288 delete_handle(src_hnd);
1289 unmap_gpuaddr(ctx, mapped_dst_idx);
1290 return status;
1291 }
Naseer Ahmed29a26812012-06-14 00:56:20 -07001292
Naseer Ahmedaeab91f2013-03-20 21:01:19 -04001293 // Clean the cache
Naseer Ahmed01d3fd32012-07-14 21:08:13 -07001294 IMemAlloc* memalloc = sAlloc->getAllocator(src_hnd->flags);
Naseer Ahmed29a26812012-06-14 00:56:20 -07001295 if (memalloc->clean_buffer((void *)(src_hnd->base), src_hnd->size,
Naseer Ahmedaeab91f2013-03-20 21:01:19 -04001296 src_hnd->offset, src_hnd->fd,
1297 gralloc::CACHE_CLEAN)) {
Naseer Ahmed29a26812012-06-14 00:56:20 -07001298 ALOGE("%s: clean_buffer failed", __FUNCTION__);
1299 delete_handle(dst_hnd);
1300 delete_handle(src_hnd);
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -08001301 unmap_gpuaddr(ctx, mapped_dst_idx);
Naseer Ahmed29a26812012-06-14 00:56:20 -07001302 return COPYBIT_FAILURE;
1303 }
1304 }
1305
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -08001306 flags |= (ctx->is_premultiplied_alpha) ? FLAGS_PREMULTIPLIED_ALPHA : 0;
1307 flags |= (ctx->dst_surface_type != RGB_SURFACE) ? FLAGS_YUV_DESTINATION : 0;
1308 status = set_image(ctx, src_surface.surface_id, &src_image,
1309 (eC2DFlags)flags, mapped_src_idx);
Naseer Ahmed29a26812012-06-14 00:56:20 -07001310 if(status) {
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -08001311 ALOGE("%s: set_image (src) error", __FUNCTION__);
Naseer Ahmed29a26812012-06-14 00:56:20 -07001312 delete_handle(dst_hnd);
1313 delete_handle(src_hnd);
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -08001314 unmap_gpuaddr(ctx, mapped_dst_idx);
1315 unmap_gpuaddr(ctx, mapped_src_idx);
Naseer Ahmed29a26812012-06-14 00:56:20 -07001316 return COPYBIT_FAILURE;
1317 }
1318
Arun Kumar K.Re29916b2013-03-20 11:42:53 -07001319 src_surface.config_mask = C2D_NO_ANTIALIASING_BIT | ctx->config_mask;
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -08001320 src_surface.global_alpha = ctx->src_global_alpha;
Naseer Ahmed29a26812012-06-14 00:56:20 -07001321 if (enableBlend) {
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -08001322 if(src_surface.config_mask & C2D_GLOBAL_ALPHA_BIT) {
1323 src_surface.config_mask &= ~C2D_ALPHA_BLEND_NONE;
1324 if(!(src_surface.global_alpha)) {
Naseer Ahmed29a26812012-06-14 00:56:20 -07001325 // src alpha is zero
Naseer Ahmed29a26812012-06-14 00:56:20 -07001326 delete_handle(dst_hnd);
1327 delete_handle(src_hnd);
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -08001328 unmap_gpuaddr(ctx, mapped_dst_idx);
1329 unmap_gpuaddr(ctx, mapped_src_idx);
1330 return COPYBIT_FAILURE;
Naseer Ahmed29a26812012-06-14 00:56:20 -07001331 }
Naseer Ahmed29a26812012-06-14 00:56:20 -07001332 }
1333 } else {
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -08001334 src_surface.config_mask |= C2D_ALPHA_BLEND_NONE;
Naseer Ahmed29a26812012-06-14 00:56:20 -07001335 }
1336
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -08001337 if (src_surface_type == RGB_SURFACE) {
1338 ctx->blit_rgb_object[ctx->blit_rgb_count] = src_surface;
1339 ctx->blit_rgb_count++;
1340 } else if (src_surface_type == YUV_SURFACE_2_PLANES) {
1341 ctx->blit_yuv_2_plane_object[ctx->blit_yuv_2_plane_count] = src_surface;
1342 ctx->blit_yuv_2_plane_count++;
1343 } else {
1344 ctx->blit_yuv_3_plane_object[ctx->blit_yuv_3_plane_count] = src_surface;
1345 ctx->blit_yuv_3_plane_count++;
1346 }
Naseer Ahmed29a26812012-06-14 00:56:20 -07001347
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -08001348 struct copybit_rect_t clip;
Naseer Ahmed29a26812012-06-14 00:56:20 -07001349 while ((status == 0) && region->next(region, &clip)) {
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -08001350 set_rects(ctx, &(src_surface), dst_rect, src_rect, &clip);
1351 if (ctx->blit_count == MAX_BLIT_OBJECT_COUNT) {
1352 ALOGW("Reached end of blit count");
Arun Kumar K.Reb128aa2012-12-18 12:38:28 -08001353 finish_copybit(dev);
Naseer Ahmed29a26812012-06-14 00:56:20 -07001354 }
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -08001355 ctx->blit_list[ctx->blit_count] = src_surface;
1356 ctx->blit_count++;
Naseer Ahmed29a26812012-06-14 00:56:20 -07001357 }
1358
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -08001359 // Check if we need to perform an early draw-finish.
1360 flags |= (need_temp_dst || need_temp_src) ? FLAGS_TEMP_SRC_DST : 0;
1361 if (need_to_execute_draw(ctx, (eC2DFlags)flags))
1362 {
1363 finish_copybit(dev);
Naseer Ahmed29a26812012-06-14 00:56:20 -07001364 }
1365
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -08001366 if (need_temp_dst) {
1367 // copy the temp. destination without the alignment to the actual
1368 // destination.
1369 status = copy_image(dst_hnd, dst, CONVERT_TO_ANDROID_FORMAT);
1370 if (status == COPYBIT_FAILURE) {
1371 ALOGE("%s:copy_image failed in temp Dest",__FUNCTION__);
1372 delete_handle(dst_hnd);
1373 delete_handle(src_hnd);
1374 unmap_gpuaddr(ctx, mapped_dst_idx);
1375 unmap_gpuaddr(ctx, mapped_src_idx);
1376 return status;
1377 }
Naseer Ahmedaeab91f2013-03-20 21:01:19 -04001378 // Clean the cache.
Naseer Ahmed01d3fd32012-07-14 21:08:13 -07001379 IMemAlloc* memalloc = sAlloc->getAllocator(dst_hnd->flags);
Naseer Ahmed29a26812012-06-14 00:56:20 -07001380 memalloc->clean_buffer((void *)(dst_hnd->base), dst_hnd->size,
Naseer Ahmedaeab91f2013-03-20 21:01:19 -04001381 dst_hnd->offset, dst_hnd->fd,
1382 gralloc::CACHE_CLEAN);
Naseer Ahmed29a26812012-06-14 00:56:20 -07001383 }
1384 delete_handle(dst_hnd);
1385 delete_handle(src_hnd);
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -08001386
1387 ctx->is_premultiplied_alpha = false;
Naseer Ahmed29a26812012-06-14 00:56:20 -07001388 ctx->fb_width = 0;
1389 ctx->fb_height = 0;
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -08001390 ctx->config_mask = 0;
Naseer Ahmed29a26812012-06-14 00:56:20 -07001391 return status;
1392}
1393
Terence Hampsonadf47302013-05-23 12:21:02 -04001394static int set_sync_copybit(struct copybit_device_t *dev,
1395 int acquireFenceFd)
1396{
1397 return 0;
1398}
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -08001399
Naseer Ahmed29a26812012-06-14 00:56:20 -07001400static int stretch_copybit(
1401 struct copybit_device_t *dev,
1402 struct copybit_image_t const *dst,
1403 struct copybit_image_t const *src,
1404 struct copybit_rect_t const *dst_rect,
1405 struct copybit_rect_t const *src_rect,
1406 struct copybit_region_t const *region)
1407{
1408 struct copybit_context_t* ctx = (struct copybit_context_t*)dev;
Arun Kumar K.Reb128aa2012-12-18 12:38:28 -08001409 int status = COPYBIT_SUCCESS;
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -08001410 bool needsBlending = (ctx->src_global_alpha != 0);
Arun Kumar K.Reb128aa2012-12-18 12:38:28 -08001411 pthread_mutex_lock(&ctx->wait_cleanup_lock);
1412 status = stretch_copybit_internal(dev, dst, src, dst_rect, src_rect,
Naseer Ahmed29a26812012-06-14 00:56:20 -07001413 region, needsBlending);
Arun Kumar K.Reb128aa2012-12-18 12:38:28 -08001414 pthread_mutex_unlock(&ctx->wait_cleanup_lock);
1415 return status;
Naseer Ahmed29a26812012-06-14 00:56:20 -07001416}
1417
1418/** Perform a blit type operation */
1419static int blit_copybit(
1420 struct copybit_device_t *dev,
1421 struct copybit_image_t const *dst,
1422 struct copybit_image_t const *src,
1423 struct copybit_region_t const *region)
1424{
Arun Kumar K.R71cfd812013-04-03 11:25:41 -07001425 int status = COPYBIT_SUCCESS;
1426 struct copybit_context_t* ctx = (struct copybit_context_t*)dev;
Naseer Ahmedbb887bd2013-04-16 14:12:27 -04001427 struct copybit_rect_t dr = { 0, 0, (int)dst->w, (int)dst->h };
1428 struct copybit_rect_t sr = { 0, 0, (int)src->w, (int)src->h };
Arun Kumar K.R71cfd812013-04-03 11:25:41 -07001429 pthread_mutex_lock(&ctx->wait_cleanup_lock);
1430 status = stretch_copybit_internal(dev, dst, src, &dr, &sr, region, false);
1431 pthread_mutex_unlock(&ctx->wait_cleanup_lock);
1432 return status;
Naseer Ahmed29a26812012-06-14 00:56:20 -07001433}
1434
1435/*****************************************************************************/
1436
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -08001437static void clean_up(copybit_context_t* ctx)
1438{
Arun Kumar K.Reb128aa2012-12-18 12:38:28 -08001439 void* ret;
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -08001440 if (!ctx)
1441 return;
1442
Arun Kumar K.Reb128aa2012-12-18 12:38:28 -08001443 // stop the wait_cleanup_thread
1444 pthread_mutex_lock(&ctx->wait_cleanup_lock);
1445 ctx->stop_thread = true;
1446 // Signal waiting thread
1447 pthread_cond_signal(&ctx->wait_cleanup_cond);
1448 pthread_mutex_unlock(&ctx->wait_cleanup_lock);
1449 // waits for the cleanup thread to exit
1450 pthread_join(ctx->wait_thread_id, &ret);
1451 pthread_mutex_destroy(&ctx->wait_cleanup_lock);
1452 pthread_cond_destroy (&ctx->wait_cleanup_cond);
1453
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -08001454 for (int i = 0; i < NUM_SURFACE_TYPES; i++) {
1455 if (ctx->dst[i])
1456 LINK_c2dDestroySurface(ctx->dst[i]);
1457 }
1458
1459 for (int i = 0; i < MAX_RGB_SURFACES; i++) {
1460 if (ctx->blit_rgb_object[i].surface_id)
1461 LINK_c2dDestroySurface(ctx->blit_rgb_object[i].surface_id);
1462 }
1463
1464 for (int i = 0; i < MAX_YUV_2_PLANE_SURFACES; i++) {
1465 if (ctx->blit_yuv_2_plane_object[i].surface_id)
1466 LINK_c2dDestroySurface(ctx->blit_yuv_2_plane_object[i].surface_id);
1467 }
1468
1469 for (int i = 0; i < MAX_YUV_3_PLANE_SURFACES; i++) {
1470 if (ctx->blit_yuv_3_plane_object[i].surface_id)
1471 LINK_c2dDestroySurface(ctx->blit_yuv_3_plane_object[i].surface_id);
1472 }
1473
1474 if (ctx->libc2d2) {
1475 ::dlclose(ctx->libc2d2);
1476 ALOGV("dlclose(libc2d2)");
1477 }
1478
1479 free(ctx);
1480}
1481
Naseer Ahmed29a26812012-06-14 00:56:20 -07001482/** Close the copybit device */
1483static int close_copybit(struct hw_device_t *dev)
1484{
1485 struct copybit_context_t* ctx = (struct copybit_context_t*)dev;
1486 if (ctx) {
Naseer Ahmed29a26812012-06-14 00:56:20 -07001487 free_temp_buffer(ctx->temp_src_buffer);
1488 free_temp_buffer(ctx->temp_dst_buffer);
Naseer Ahmed29a26812012-06-14 00:56:20 -07001489 }
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -08001490 clean_up(ctx);
Naseer Ahmed29a26812012-06-14 00:56:20 -07001491 return 0;
1492}
1493
1494/** Open a new instance of a copybit device using name */
1495static int open_copybit(const struct hw_module_t* module, const char* name,
1496 struct hw_device_t** device)
1497{
1498 int status = COPYBIT_SUCCESS;
1499 C2D_RGB_SURFACE_DEF surfDefinition = {0};
1500 C2D_YUV_SURFACE_DEF yuvSurfaceDef = {0} ;
1501 struct copybit_context_t *ctx;
1502 char fbName[64];
1503
1504 ctx = (struct copybit_context_t *)malloc(sizeof(struct copybit_context_t));
1505 if(!ctx) {
1506 ALOGE("%s: malloc failed", __FUNCTION__);
1507 return COPYBIT_FAILURE;
1508 }
1509
1510 /* initialize drawstate */
1511 memset(ctx, 0, sizeof(*ctx));
Naseer Ahmed29a26812012-06-14 00:56:20 -07001512 ctx->libc2d2 = ::dlopen("libC2D2.so", RTLD_NOW);
1513 if (!ctx->libc2d2) {
1514 ALOGE("FATAL ERROR: could not dlopen libc2d2.so: %s", dlerror());
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -08001515 clean_up(ctx);
1516 status = COPYBIT_FAILURE;
1517 *device = NULL;
1518 return status;
Naseer Ahmed29a26812012-06-14 00:56:20 -07001519 }
1520 *(void **)&LINK_c2dCreateSurface = ::dlsym(ctx->libc2d2,
1521 "c2dCreateSurface");
1522 *(void **)&LINK_c2dUpdateSurface = ::dlsym(ctx->libc2d2,
1523 "c2dUpdateSurface");
1524 *(void **)&LINK_c2dReadSurface = ::dlsym(ctx->libc2d2,
1525 "c2dReadSurface");
1526 *(void **)&LINK_c2dDraw = ::dlsym(ctx->libc2d2, "c2dDraw");
1527 *(void **)&LINK_c2dFlush = ::dlsym(ctx->libc2d2, "c2dFlush");
1528 *(void **)&LINK_c2dFinish = ::dlsym(ctx->libc2d2, "c2dFinish");
1529 *(void **)&LINK_c2dWaitTimestamp = ::dlsym(ctx->libc2d2,
1530 "c2dWaitTimestamp");
1531 *(void **)&LINK_c2dDestroySurface = ::dlsym(ctx->libc2d2,
1532 "c2dDestroySurface");
1533 *(void **)&LINK_c2dMapAddr = ::dlsym(ctx->libc2d2,
1534 "c2dMapAddr");
1535 *(void **)&LINK_c2dUnMapAddr = ::dlsym(ctx->libc2d2,
1536 "c2dUnMapAddr");
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -08001537 *(void **)&LINK_c2dGetDriverCapabilities = ::dlsym(ctx->libc2d2,
1538 "c2dGetDriverCapabilities");
Arun Kumar K.Reb128aa2012-12-18 12:38:28 -08001539 *(void **)&LINK_c2dCreateFenceFD = ::dlsym(ctx->libc2d2,
1540 "c2dCreateFenceFD");
Naseer Ahmed183939d2013-03-14 20:44:17 -04001541 *(void **)&LINK_c2dFillSurface = ::dlsym(ctx->libc2d2,
1542 "c2dFillSurface");
Naseer Ahmed29a26812012-06-14 00:56:20 -07001543
1544 if (!LINK_c2dCreateSurface || !LINK_c2dUpdateSurface || !LINK_c2dReadSurface
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -08001545 || !LINK_c2dDraw || !LINK_c2dFlush || !LINK_c2dWaitTimestamp ||
1546 !LINK_c2dFinish || !LINK_c2dDestroySurface ||
Naseer Ahmed183939d2013-03-14 20:44:17 -04001547 !LINK_c2dGetDriverCapabilities || !LINK_c2dCreateFenceFD ||
1548 !LINK_c2dFillSurface) {
Naseer Ahmed29a26812012-06-14 00:56:20 -07001549 ALOGE("%s: dlsym ERROR", __FUNCTION__);
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -08001550 clean_up(ctx);
1551 status = COPYBIT_FAILURE;
1552 *device = NULL;
1553 return status;
Naseer Ahmed29a26812012-06-14 00:56:20 -07001554 }
1555
1556 ctx->device.common.tag = HARDWARE_DEVICE_TAG;
1557 ctx->device.common.version = 1;
1558 ctx->device.common.module = (hw_module_t*)(module);
1559 ctx->device.common.close = close_copybit;
1560 ctx->device.set_parameter = set_parameter_copybit;
1561 ctx->device.get = get;
1562 ctx->device.blit = blit_copybit;
Terence Hampsonadf47302013-05-23 12:21:02 -04001563 ctx->device.set_sync = set_sync_copybit;
Naseer Ahmed29a26812012-06-14 00:56:20 -07001564 ctx->device.stretch = stretch_copybit;
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -08001565 ctx->device.finish = finish_copybit;
Arun Kumar K.Reb128aa2012-12-18 12:38:28 -08001566 ctx->device.flush_get_fence = flush_get_fence_copybit;
Naseer Ahmed183939d2013-03-14 20:44:17 -04001567 ctx->device.clear = clear_copybit;
Naseer Ahmed29a26812012-06-14 00:56:20 -07001568
1569 /* Create RGB Surface */
1570 surfDefinition.buffer = (void*)0xdddddddd;
1571 surfDefinition.phys = (void*)0xdddddddd;
1572 surfDefinition.stride = 1 * 4;
1573 surfDefinition.width = 1;
1574 surfDefinition.height = 1;
1575 surfDefinition.format = C2D_COLOR_FORMAT_8888_ARGB;
1576 if (LINK_c2dCreateSurface(&(ctx->dst[RGB_SURFACE]), C2D_TARGET | C2D_SOURCE,
1577 (C2D_SURFACE_TYPE)(C2D_SURFACE_RGB_HOST |
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -08001578 C2D_SURFACE_WITH_PHYS |
1579 C2D_SURFACE_WITH_PHYS_DUMMY ),
1580 &surfDefinition)) {
1581 ALOGE("%s: create ctx->dst_surface[RGB_SURFACE] failed", __FUNCTION__);
1582 ctx->dst[RGB_SURFACE] = 0;
1583 clean_up(ctx);
1584 status = COPYBIT_FAILURE;
1585 *device = NULL;
1586 return status;
Naseer Ahmed29a26812012-06-14 00:56:20 -07001587 }
1588
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -08001589 unsigned int surface_id = 0;
1590 for (int i = 0; i < MAX_RGB_SURFACES; i++)
1591 {
1592 if (LINK_c2dCreateSurface(&surface_id, C2D_TARGET | C2D_SOURCE,
Naseer Ahmed29a26812012-06-14 00:56:20 -07001593 (C2D_SURFACE_TYPE)(C2D_SURFACE_RGB_HOST |
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -08001594 C2D_SURFACE_WITH_PHYS |
1595 C2D_SURFACE_WITH_PHYS_DUMMY ),
1596 &surfDefinition)) {
1597 ALOGE("%s: create RGB source surface %d failed", __FUNCTION__, i);
1598 ctx->blit_rgb_object[i].surface_id = 0;
1599 status = COPYBIT_FAILURE;
1600 break;
1601 } else {
1602 ctx->blit_rgb_object[i].surface_id = surface_id;
1603 ALOGW("%s i = %d surface_id=%d", __FUNCTION__, i,
1604 ctx->blit_rgb_object[i].surface_id);
1605 }
Naseer Ahmed29a26812012-06-14 00:56:20 -07001606 }
1607
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -08001608 if (status == COPYBIT_FAILURE) {
1609 clean_up(ctx);
1610 status = COPYBIT_FAILURE;
1611 *device = NULL;
1612 return status;
1613 }
Naseer Ahmed29a26812012-06-14 00:56:20 -07001614
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -08001615 // Create 2 plane YUV surfaces
1616 yuvSurfaceDef.format = C2D_COLOR_FORMAT_420_NV12;
Naseer Ahmed29a26812012-06-14 00:56:20 -07001617 yuvSurfaceDef.width = 4;
1618 yuvSurfaceDef.height = 4;
1619 yuvSurfaceDef.plane0 = (void*)0xaaaaaaaa;
1620 yuvSurfaceDef.phys0 = (void*) 0xaaaaaaaa;
1621 yuvSurfaceDef.stride0 = 4;
1622
1623 yuvSurfaceDef.plane1 = (void*)0xaaaaaaaa;
1624 yuvSurfaceDef.phys1 = (void*) 0xaaaaaaaa;
1625 yuvSurfaceDef.stride1 = 4;
Naseer Ahmed29a26812012-06-14 00:56:20 -07001626 if (LINK_c2dCreateSurface(&(ctx->dst[YUV_SURFACE_2_PLANES]),
1627 C2D_TARGET | C2D_SOURCE,
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -08001628 (C2D_SURFACE_TYPE)(C2D_SURFACE_YUV_HOST |
1629 C2D_SURFACE_WITH_PHYS |
1630 C2D_SURFACE_WITH_PHYS_DUMMY),
Naseer Ahmed29a26812012-06-14 00:56:20 -07001631 &yuvSurfaceDef)) {
1632 ALOGE("%s: create ctx->dst[YUV_SURFACE_2_PLANES] failed", __FUNCTION__);
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -08001633 ctx->dst[YUV_SURFACE_2_PLANES] = 0;
1634 clean_up(ctx);
1635 status = COPYBIT_FAILURE;
1636 *device = NULL;
1637 return status;
Naseer Ahmed29a26812012-06-14 00:56:20 -07001638 }
1639
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -08001640 for (int i=0; i < MAX_YUV_2_PLANE_SURFACES; i++)
1641 {
1642 if (LINK_c2dCreateSurface(&surface_id, C2D_TARGET | C2D_SOURCE,
1643 (C2D_SURFACE_TYPE)(C2D_SURFACE_YUV_HOST |
1644 C2D_SURFACE_WITH_PHYS |
1645 C2D_SURFACE_WITH_PHYS_DUMMY ),
1646 &yuvSurfaceDef)) {
1647 ALOGE("%s: create YUV source %d failed", __FUNCTION__, i);
1648 ctx->blit_yuv_2_plane_object[i].surface_id = 0;
1649 status = COPYBIT_FAILURE;
1650 break;
1651 } else {
1652 ctx->blit_yuv_2_plane_object[i].surface_id = surface_id;
1653 ALOGW("%s: 2 Plane YUV i=%d surface_id=%d", __FUNCTION__, i,
1654 ctx->blit_yuv_2_plane_object[i].surface_id);
1655 }
1656 }
1657
1658 if (status == COPYBIT_FAILURE) {
1659 clean_up(ctx);
1660 status = COPYBIT_FAILURE;
1661 *device = NULL;
1662 return status;
1663 }
1664
1665 // Create YUV 3 plane surfaces
Naseer Ahmed29a26812012-06-14 00:56:20 -07001666 yuvSurfaceDef.format = C2D_COLOR_FORMAT_420_YV12;
1667 yuvSurfaceDef.plane2 = (void*)0xaaaaaaaa;
1668 yuvSurfaceDef.phys2 = (void*) 0xaaaaaaaa;
1669 yuvSurfaceDef.stride2 = 4;
1670
Naseer Ahmed29a26812012-06-14 00:56:20 -07001671 if (LINK_c2dCreateSurface(&(ctx->dst[YUV_SURFACE_3_PLANES]),
1672 C2D_TARGET | C2D_SOURCE,
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -08001673 (C2D_SURFACE_TYPE)(C2D_SURFACE_YUV_HOST |
1674 C2D_SURFACE_WITH_PHYS |
1675 C2D_SURFACE_WITH_PHYS_DUMMY),
Naseer Ahmed29a26812012-06-14 00:56:20 -07001676 &yuvSurfaceDef)) {
1677 ALOGE("%s: create ctx->dst[YUV_SURFACE_3_PLANES] failed", __FUNCTION__);
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -08001678 ctx->dst[YUV_SURFACE_3_PLANES] = 0;
1679 clean_up(ctx);
1680 status = COPYBIT_FAILURE;
1681 *device = NULL;
1682 return status;
Naseer Ahmed29a26812012-06-14 00:56:20 -07001683 }
1684
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -08001685 for (int i=0; i < MAX_YUV_3_PLANE_SURFACES; i++)
1686 {
1687 if (LINK_c2dCreateSurface(&(surface_id),
1688 C2D_TARGET | C2D_SOURCE,
1689 (C2D_SURFACE_TYPE)(C2D_SURFACE_YUV_HOST |
1690 C2D_SURFACE_WITH_PHYS |
1691 C2D_SURFACE_WITH_PHYS_DUMMY),
1692 &yuvSurfaceDef)) {
1693 ALOGE("%s: create 3 plane YUV surface %d failed", __FUNCTION__, i);
1694 ctx->blit_yuv_3_plane_object[i].surface_id = 0;
1695 status = COPYBIT_FAILURE;
1696 break;
1697 } else {
1698 ctx->blit_yuv_3_plane_object[i].surface_id = surface_id;
1699 ALOGW("%s: 3 Plane YUV i=%d surface_id=%d", __FUNCTION__, i,
1700 ctx->blit_yuv_3_plane_object[i].surface_id);
1701 }
1702 }
1703
1704 if (status == COPYBIT_FAILURE) {
1705 clean_up(ctx);
1706 status = COPYBIT_FAILURE;
1707 *device = NULL;
1708 return status;
1709 }
1710
1711 if (LINK_c2dGetDriverCapabilities(&(ctx->c2d_driver_info))) {
1712 ALOGE("%s: LINK_c2dGetDriverCapabilities failed", __FUNCTION__);
1713 clean_up(ctx);
1714 status = COPYBIT_FAILURE;
1715 *device = NULL;
1716 return status;
1717 }
1718 // Initialize context variables.
1719 ctx->trg_transform = C2D_TARGET_ROTATE_0;
1720
Naseer Ahmed29a26812012-06-14 00:56:20 -07001721 ctx->temp_src_buffer.fd = -1;
1722 ctx->temp_src_buffer.base = 0;
1723 ctx->temp_src_buffer.size = 0;
1724
1725 ctx->temp_dst_buffer.fd = -1;
1726 ctx->temp_dst_buffer.base = 0;
1727 ctx->temp_dst_buffer.size = 0;
1728
1729 ctx->fb_width = 0;
1730 ctx->fb_height = 0;
Arun Kumar K.R6b353bd2012-11-27 17:18:45 -08001731
1732 ctx->blit_rgb_count = 0;
1733 ctx->blit_yuv_2_plane_count = 0;
1734 ctx->blit_yuv_3_plane_count = 0;
1735 ctx->blit_count = 0;
Naseer Ahmed29a26812012-06-14 00:56:20 -07001736
Arun Kumar K.Reb128aa2012-12-18 12:38:28 -08001737 ctx->wait_timestamp = false;
1738 ctx->stop_thread = false;
1739 pthread_mutex_init(&(ctx->wait_cleanup_lock), NULL);
1740 pthread_cond_init(&(ctx->wait_cleanup_cond), NULL);
1741 /* Start the wait thread */
1742 pthread_attr_t attr;
1743 pthread_attr_init(&attr);
1744 pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE);
1745
1746 pthread_create(&ctx->wait_thread_id, &attr, &c2d_wait_loop,
1747 (void *)ctx);
1748 pthread_attr_destroy(&attr);
1749
Naseer Ahmed29a26812012-06-14 00:56:20 -07001750 *device = &ctx->device.common;
1751 return status;
Naseer Ahmed29a26812012-06-14 00:56:20 -07001752}