blob: 3d1ef98cc34d9d6398e2b875bf3cd20170cfe9f7 [file] [log] [blame]
fbarchard@google.com492768c2013-04-09 23:00:56 +00001/*
2 * Copyright 2011 The LibYuv Project Authors. All rights reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11#include "libyuv/convert.h"
12
13#ifdef HAVE_JPEG
14#include "libyuv/mjpeg_decoder.h"
15#endif
16
17#ifdef __cplusplus
18namespace libyuv {
19extern "C" {
20#endif
21
22#ifdef HAVE_JPEG
23struct I420Buffers {
24 uint8* y;
25 int y_stride;
26 uint8* u;
27 int u_stride;
28 uint8* v;
29 int v_stride;
30 int w;
31 int h;
32};
33
34static void JpegCopyI420(void* opaque,
35 const uint8* const* data,
36 const int* strides,
37 int rows) {
38 I420Buffers* dest = static_cast<I420Buffers*>(opaque);
39 I420Copy(data[0], strides[0],
40 data[1], strides[1],
41 data[2], strides[2],
42 dest->y, dest->y_stride,
43 dest->u, dest->u_stride,
44 dest->v, dest->v_stride,
45 dest->w, rows);
46 dest->y += rows * dest->y_stride;
47 dest->u += ((rows + 1) >> 1) * dest->u_stride;
48 dest->v += ((rows + 1) >> 1) * dest->v_stride;
49 dest->h -= rows;
50}
51
52static void JpegI422ToI420(void* opaque,
53 const uint8* const* data,
54 const int* strides,
55 int rows) {
56 I420Buffers* dest = static_cast<I420Buffers*>(opaque);
57 I422ToI420(data[0], strides[0],
58 data[1], strides[1],
59 data[2], strides[2],
60 dest->y, dest->y_stride,
61 dest->u, dest->u_stride,
62 dest->v, dest->v_stride,
63 dest->w, rows);
64 dest->y += rows * dest->y_stride;
65 dest->u += ((rows + 1) >> 1) * dest->u_stride;
66 dest->v += ((rows + 1) >> 1) * dest->v_stride;
67 dest->h -= rows;
68}
69
70static void JpegI444ToI420(void* opaque,
71 const uint8* const* data,
72 const int* strides,
73 int rows) {
74 I420Buffers* dest = static_cast<I420Buffers*>(opaque);
75 I444ToI420(data[0], strides[0],
76 data[1], strides[1],
77 data[2], strides[2],
78 dest->y, dest->y_stride,
79 dest->u, dest->u_stride,
80 dest->v, dest->v_stride,
81 dest->w, rows);
82 dest->y += rows * dest->y_stride;
83 dest->u += ((rows + 1) >> 1) * dest->u_stride;
84 dest->v += ((rows + 1) >> 1) * dest->v_stride;
85 dest->h -= rows;
86}
87
88static void JpegI411ToI420(void* opaque,
89 const uint8* const* data,
90 const int* strides,
91 int rows) {
92 I420Buffers* dest = static_cast<I420Buffers*>(opaque);
93 I411ToI420(data[0], strides[0],
94 data[1], strides[1],
95 data[2], strides[2],
96 dest->y, dest->y_stride,
97 dest->u, dest->u_stride,
98 dest->v, dest->v_stride,
99 dest->w, rows);
100 dest->y += rows * dest->y_stride;
101 dest->u += ((rows + 1) >> 1) * dest->u_stride;
102 dest->v += ((rows + 1) >> 1) * dest->v_stride;
103 dest->h -= rows;
104}
105
106static void JpegI400ToI420(void* opaque,
107 const uint8* const* data,
108 const int* strides,
109 int rows) {
110 I420Buffers* dest = static_cast<I420Buffers*>(opaque);
111 I400ToI420(data[0], strides[0],
112 dest->y, dest->y_stride,
113 dest->u, dest->u_stride,
114 dest->v, dest->v_stride,
115 dest->w, rows);
116 dest->y += rows * dest->y_stride;
117 dest->u += ((rows + 1) >> 1) * dest->u_stride;
118 dest->v += ((rows + 1) >> 1) * dest->v_stride;
119 dest->h -= rows;
120}
121
122// Query size of MJPG in pixels.
123LIBYUV_API
124int MJPGSize(const uint8* sample, size_t sample_size,
125 int* width, int* height) {
126 MJpegDecoder mjpeg_decoder;
127 bool ret = mjpeg_decoder.LoadFrame(sample, sample_size);
128 if (ret) {
129 *width = mjpeg_decoder.GetWidth();
130 *height = mjpeg_decoder.GetHeight();
131 }
132 mjpeg_decoder.UnloadFrame();
133 return ret ? 0 : -1; // -1 for runtime failure.
134}
135
136// MJPG (Motion JPeg) to I420
137// TODO(fbarchard): review w and h requirement. dw and dh may be enough.
138LIBYUV_API
139int MJPGToI420(const uint8* sample,
140 size_t sample_size,
141 uint8* y, int y_stride,
142 uint8* u, int u_stride,
143 uint8* v, int v_stride,
144 int w, int h,
145 int dw, int dh) {
146 if (sample_size == kUnknownDataSize) {
147 // ERROR: MJPEG frame size unknown
148 return -1;
149 }
150
151 // TODO(fbarchard): Port MJpeg to C.
152 MJpegDecoder mjpeg_decoder;
153 bool ret = mjpeg_decoder.LoadFrame(sample, sample_size);
154 if (ret && (mjpeg_decoder.GetWidth() != w ||
155 mjpeg_decoder.GetHeight() != h)) {
156 // ERROR: MJPEG frame has unexpected dimensions
157 mjpeg_decoder.UnloadFrame();
158 return 1; // runtime failure
159 }
160 if (ret) {
161 I420Buffers bufs = { y, y_stride, u, u_stride, v, v_stride, dw, dh };
162 // YUV420
163 if (mjpeg_decoder.GetColorSpace() ==
164 MJpegDecoder::kColorSpaceYCbCr &&
165 mjpeg_decoder.GetNumComponents() == 3 &&
166 mjpeg_decoder.GetVertSampFactor(0) == 2 &&
167 mjpeg_decoder.GetHorizSampFactor(0) == 2 &&
168 mjpeg_decoder.GetVertSampFactor(1) == 1 &&
169 mjpeg_decoder.GetHorizSampFactor(1) == 1 &&
170 mjpeg_decoder.GetVertSampFactor(2) == 1 &&
171 mjpeg_decoder.GetHorizSampFactor(2) == 1) {
172 ret = mjpeg_decoder.DecodeToCallback(&JpegCopyI420, &bufs, dw, dh);
173 // YUV422
174 } else if (mjpeg_decoder.GetColorSpace() ==
175 MJpegDecoder::kColorSpaceYCbCr &&
176 mjpeg_decoder.GetNumComponents() == 3 &&
177 mjpeg_decoder.GetVertSampFactor(0) == 1 &&
178 mjpeg_decoder.GetHorizSampFactor(0) == 2 &&
179 mjpeg_decoder.GetVertSampFactor(1) == 1 &&
180 mjpeg_decoder.GetHorizSampFactor(1) == 1 &&
181 mjpeg_decoder.GetVertSampFactor(2) == 1 &&
182 mjpeg_decoder.GetHorizSampFactor(2) == 1) {
183 ret = mjpeg_decoder.DecodeToCallback(&JpegI422ToI420, &bufs, dw, dh);
184 // YUV444
185 } else if (mjpeg_decoder.GetColorSpace() ==
186 MJpegDecoder::kColorSpaceYCbCr &&
187 mjpeg_decoder.GetNumComponents() == 3 &&
188 mjpeg_decoder.GetVertSampFactor(0) == 1 &&
189 mjpeg_decoder.GetHorizSampFactor(0) == 1 &&
190 mjpeg_decoder.GetVertSampFactor(1) == 1 &&
191 mjpeg_decoder.GetHorizSampFactor(1) == 1 &&
192 mjpeg_decoder.GetVertSampFactor(2) == 1 &&
193 mjpeg_decoder.GetHorizSampFactor(2) == 1) {
194 ret = mjpeg_decoder.DecodeToCallback(&JpegI444ToI420, &bufs, dw, dh);
195 // YUV411
196 } else if (mjpeg_decoder.GetColorSpace() ==
197 MJpegDecoder::kColorSpaceYCbCr &&
198 mjpeg_decoder.GetNumComponents() == 3 &&
199 mjpeg_decoder.GetVertSampFactor(0) == 1 &&
200 mjpeg_decoder.GetHorizSampFactor(0) == 4 &&
201 mjpeg_decoder.GetVertSampFactor(1) == 1 &&
202 mjpeg_decoder.GetHorizSampFactor(1) == 1 &&
203 mjpeg_decoder.GetVertSampFactor(2) == 1 &&
204 mjpeg_decoder.GetHorizSampFactor(2) == 1) {
205 ret = mjpeg_decoder.DecodeToCallback(&JpegI411ToI420, &bufs, dw, dh);
206 // YUV400
207 } else if (mjpeg_decoder.GetColorSpace() ==
208 MJpegDecoder::kColorSpaceGrayscale &&
209 mjpeg_decoder.GetNumComponents() == 1 &&
210 mjpeg_decoder.GetVertSampFactor(0) == 1 &&
211 mjpeg_decoder.GetHorizSampFactor(0) == 1) {
212 ret = mjpeg_decoder.DecodeToCallback(&JpegI400ToI420, &bufs, dw, dh);
213 } else {
214 // TODO(fbarchard): Implement conversion for any other colorspace/sample
215 // factors that occur in practice. 411 is supported by libjpeg
216 // ERROR: Unable to convert MJPEG frame because format is not supported
217 mjpeg_decoder.UnloadFrame();
218 return 1;
219 }
220 }
221 return 0;
222}
fbarchard@google.com8c8cf8d2013-04-11 16:34:24 +0000223
224#ifdef HAVE_JPEG
225struct ARGBBuffers {
226 uint8* argb;
227 int argb_stride;
228 int w;
229 int h;
230};
231
232static void JpegI420ToARGB(void* opaque,
233 const uint8* const* data,
234 const int* strides,
235 int rows) {
236 ARGBBuffers* dest = static_cast<ARGBBuffers*>(opaque);
237 I420ToARGB(data[0], strides[0],
238 data[1], strides[1],
239 data[2], strides[2],
240 dest->argb, dest->argb_stride,
241 dest->w, rows);
242 dest->argb += rows * dest->argb_stride;
243 dest->h -= rows;
244}
245
246static void JpegI422ToARGB(void* opaque,
247 const uint8* const* data,
248 const int* strides,
249 int rows) {
250 ARGBBuffers* dest = static_cast<ARGBBuffers*>(opaque);
251 I422ToARGB(data[0], strides[0],
252 data[1], strides[1],
253 data[2], strides[2],
254 dest->argb, dest->argb_stride,
255 dest->w, rows);
256 dest->argb += rows * dest->argb_stride;
257 dest->h -= rows;
258}
259
260static void JpegI444ToARGB(void* opaque,
261 const uint8* const* data,
262 const int* strides,
263 int rows) {
264 ARGBBuffers* dest = static_cast<ARGBBuffers*>(opaque);
265 I444ToARGB(data[0], strides[0],
266 data[1], strides[1],
267 data[2], strides[2],
268 dest->argb, dest->argb_stride,
269 dest->w, rows);
270 dest->argb += rows * dest->argb_stride;
271 dest->h -= rows;
272}
273
274static void JpegI411ToARGB(void* opaque,
275 const uint8* const* data,
276 const int* strides,
277 int rows) {
278 ARGBBuffers* dest = static_cast<ARGBBuffers*>(opaque);
279 I411ToARGB(data[0], strides[0],
280 data[1], strides[1],
281 data[2], strides[2],
282 dest->argb, dest->argb_stride,
283 dest->w, rows);
284 dest->argb += rows * dest->argb_stride;
285 dest->h -= rows;
286}
287
288static void JpegI400ToARGB(void* opaque,
289 const uint8* const* data,
290 const int* strides,
291 int rows) {
292 ARGBBuffers* dest = static_cast<ARGBBuffers*>(opaque);
293 I400ToARGB(data[0], strides[0],
294 dest->argb, dest->argb_stride,
295 dest->w, rows);
296 dest->argb += rows * dest->argb_stride;
297 dest->h -= rows;
298}
299
300// MJPG (Motion JPeg) to ARGB
301// TODO(fbarchard): review w and h requirement. dw and dh may be enough.
302LIBYUV_API
303int MJPGToARGB(const uint8* sample,
304 size_t sample_size,
305 uint8* argb, int argb_stride,
306 int w, int h,
307 int dw, int dh) {
308 if (sample_size == kUnknownDataSize) {
309 // ERROR: MJPEG frame size unknown
310 return -1;
311 }
312
313 // TODO(fbarchard): Port MJpeg to C.
314 MJpegDecoder mjpeg_decoder;
315 bool ret = mjpeg_decoder.LoadFrame(sample, sample_size);
316 if (ret && (mjpeg_decoder.GetWidth() != w ||
317 mjpeg_decoder.GetHeight() != h)) {
318 // ERROR: MJPEG frame has unexpected dimensions
319 mjpeg_decoder.UnloadFrame();
320 return 1; // runtime failure
321 }
322 if (ret) {
323 ARGBBuffers bufs = { argb, argb_stride, dw, dh };
324 // YUV420
325 if (mjpeg_decoder.GetColorSpace() ==
326 MJpegDecoder::kColorSpaceYCbCr &&
327 mjpeg_decoder.GetNumComponents() == 3 &&
328 mjpeg_decoder.GetVertSampFactor(0) == 2 &&
329 mjpeg_decoder.GetHorizSampFactor(0) == 2 &&
330 mjpeg_decoder.GetVertSampFactor(1) == 1 &&
331 mjpeg_decoder.GetHorizSampFactor(1) == 1 &&
332 mjpeg_decoder.GetVertSampFactor(2) == 1 &&
333 mjpeg_decoder.GetHorizSampFactor(2) == 1) {
334 ret = mjpeg_decoder.DecodeToCallback(&JpegI420ToARGB, &bufs, dw, dh);
335 // YUV422
336 } else if (mjpeg_decoder.GetColorSpace() ==
337 MJpegDecoder::kColorSpaceYCbCr &&
338 mjpeg_decoder.GetNumComponents() == 3 &&
339 mjpeg_decoder.GetVertSampFactor(0) == 1 &&
340 mjpeg_decoder.GetHorizSampFactor(0) == 2 &&
341 mjpeg_decoder.GetVertSampFactor(1) == 1 &&
342 mjpeg_decoder.GetHorizSampFactor(1) == 1 &&
343 mjpeg_decoder.GetVertSampFactor(2) == 1 &&
344 mjpeg_decoder.GetHorizSampFactor(2) == 1) {
345 ret = mjpeg_decoder.DecodeToCallback(&JpegI422ToARGB, &bufs, dw, dh);
346 // YUV444
347 } else if (mjpeg_decoder.GetColorSpace() ==
348 MJpegDecoder::kColorSpaceYCbCr &&
349 mjpeg_decoder.GetNumComponents() == 3 &&
350 mjpeg_decoder.GetVertSampFactor(0) == 1 &&
351 mjpeg_decoder.GetHorizSampFactor(0) == 1 &&
352 mjpeg_decoder.GetVertSampFactor(1) == 1 &&
353 mjpeg_decoder.GetHorizSampFactor(1) == 1 &&
354 mjpeg_decoder.GetVertSampFactor(2) == 1 &&
355 mjpeg_decoder.GetHorizSampFactor(2) == 1) {
356 ret = mjpeg_decoder.DecodeToCallback(&JpegI444ToARGB, &bufs, dw, dh);
357 // YUV411
358 } else if (mjpeg_decoder.GetColorSpace() ==
359 MJpegDecoder::kColorSpaceYCbCr &&
360 mjpeg_decoder.GetNumComponents() == 3 &&
361 mjpeg_decoder.GetVertSampFactor(0) == 1 &&
362 mjpeg_decoder.GetHorizSampFactor(0) == 4 &&
363 mjpeg_decoder.GetVertSampFactor(1) == 1 &&
364 mjpeg_decoder.GetHorizSampFactor(1) == 1 &&
365 mjpeg_decoder.GetVertSampFactor(2) == 1 &&
366 mjpeg_decoder.GetHorizSampFactor(2) == 1) {
367 ret = mjpeg_decoder.DecodeToCallback(&JpegI411ToARGB, &bufs, dw, dh);
368 // YUV400
369 } else if (mjpeg_decoder.GetColorSpace() ==
370 MJpegDecoder::kColorSpaceGrayscale &&
371 mjpeg_decoder.GetNumComponents() == 1 &&
372 mjpeg_decoder.GetVertSampFactor(0) == 1 &&
373 mjpeg_decoder.GetHorizSampFactor(0) == 1) {
374 ret = mjpeg_decoder.DecodeToCallback(&JpegI400ToARGB, &bufs, dw, dh);
375 } else {
376 // TODO(fbarchard): Implement conversion for any other colorspace/sample
377 // factors that occur in practice. 411 is supported by libjpeg
378 // ERROR: Unable to convert MJPEG frame because format is not supported
379 mjpeg_decoder.UnloadFrame();
380 return 1;
381 }
382 }
383 return 0;
384}
385#endif
386
fbarchard@google.com492768c2013-04-09 23:00:56 +0000387#endif
388
389#ifdef __cplusplus
390} // extern "C"
391} // namespace libyuv
392#endif