blob: 8e1812d585567c0a5ba925544fe56145e170f99e [file] [log] [blame]
sakal07a3bd72017-09-04 03:57:21 -07001/*
2 * Copyright 2017 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
Mirko Bonadei92ea95e2017-09-15 06:47:31 +020011#include "sdk/android/src/jni/videoencoderwrapper.h"
sakal07a3bd72017-09-04 03:57:21 -070012
13#include <utility>
14
Mirko Bonadei92ea95e2017-09-15 06:47:31 +020015#include "common_video/h264/h264_common.h"
Magnus Jedvertba78b5a2017-09-26 18:21:19 +020016#include "jni/VideoEncoder_jni.h"
Mirko Bonadei92ea95e2017-09-15 06:47:31 +020017#include "modules/include/module_common_types.h"
18#include "modules/video_coding/include/video_codec_interface.h"
19#include "modules/video_coding/include/video_error_codes.h"
20#include "modules/video_coding/utility/vp8_header_parser.h"
21#include "modules/video_coding/utility/vp9_uncompressed_header_parser.h"
22#include "rtc_base/logging.h"
23#include "rtc_base/random.h"
24#include "rtc_base/timeutils.h"
25#include "sdk/android/src/jni/classreferenceholder.h"
sakal07a3bd72017-09-04 03:57:21 -070026
27namespace webrtc {
28namespace jni {
29
30static const int kMaxJavaEncoderResets = 3;
31
32VideoEncoderWrapper::VideoEncoderWrapper(JNIEnv* jni, jobject j_encoder)
33 : encoder_(jni, j_encoder),
34 settings_class_(jni, FindClass(jni, "org/webrtc/VideoEncoder$Settings")),
35 encode_info_class_(jni,
36 FindClass(jni, "org/webrtc/VideoEncoder$EncodeInfo")),
37 frame_type_class_(jni,
38 FindClass(jni, "org/webrtc/EncodedImage$FrameType")),
39 bitrate_allocation_class_(
40 jni,
41 FindClass(jni, "org/webrtc/VideoEncoder$BitrateAllocation")),
42 int_array_class_(jni, jni->FindClass("[I")),
43 video_frame_factory_(jni) {
sakal07a3bd72017-09-04 03:57:21 -070044 settings_constructor_ =
45 jni->GetMethodID(*settings_class_, "<init>", "(IIIIIZ)V");
46
47 encode_info_constructor_ = jni->GetMethodID(
48 *encode_info_class_, "<init>", "([Lorg/webrtc/EncodedImage$FrameType;)V");
49
50 frame_type_from_native_method_ =
51 jni->GetStaticMethodID(*frame_type_class_, "fromNative",
52 "(I)Lorg/webrtc/EncodedImage$FrameType;");
53
54 bitrate_allocation_constructor_ =
55 jni->GetMethodID(*bitrate_allocation_class_, "<init>", "([[I)V");
56
57 jclass video_codec_status_class =
58 FindClass(jni, "org/webrtc/VideoCodecStatus");
59 get_number_method_ =
60 jni->GetMethodID(video_codec_status_class, "getNumber", "()I");
61
62 jclass integer_class = jni->FindClass("java/lang/Integer");
63 int_value_method_ = jni->GetMethodID(integer_class, "intValue", "()I");
64
65 jclass scaling_settings_class =
66 FindClass(jni, "org/webrtc/VideoEncoder$ScalingSettings");
67 scaling_settings_on_field_ =
68 jni->GetFieldID(scaling_settings_class, "on", "Z");
69 scaling_settings_low_field_ =
70 jni->GetFieldID(scaling_settings_class, "low", "Ljava/lang/Integer;");
71 scaling_settings_high_field_ =
72 jni->GetFieldID(scaling_settings_class, "high", "Ljava/lang/Integer;");
73
74 implementation_name_ = GetImplementationName(jni);
75
76 encoder_queue_ = rtc::TaskQueue::Current();
77
78 initialized_ = false;
79 num_resets_ = 0;
80
81 Random random(rtc::TimeMicros());
82 picture_id_ = random.Rand<uint16_t>() & 0x7FFF;
83 tl0_pic_idx_ = random.Rand<uint8_t>();
84}
85
86int32_t VideoEncoderWrapper::InitEncode(const VideoCodec* codec_settings,
87 int32_t number_of_cores,
88 size_t max_payload_size) {
89 JNIEnv* jni = AttachCurrentThreadIfNeeded();
90 ScopedLocalRefFrame local_ref_frame(jni);
91
92 number_of_cores_ = number_of_cores;
93 codec_settings_ = *codec_settings;
94 num_resets_ = 0;
95
96 return InitEncodeInternal(jni);
97}
98
99int32_t VideoEncoderWrapper::InitEncodeInternal(JNIEnv* jni) {
100 bool automatic_resize_on;
101 switch (codec_settings_.codecType) {
102 case kVideoCodecVP8:
103 automatic_resize_on = codec_settings_.VP8()->automaticResizeOn;
104 break;
105 case kVideoCodecVP9:
106 automatic_resize_on = codec_settings_.VP9()->automaticResizeOn;
107 break;
108 default:
109 automatic_resize_on = true;
110 }
111
112 jobject settings =
113 jni->NewObject(*settings_class_, settings_constructor_, number_of_cores_,
114 codec_settings_.width, codec_settings_.height,
115 codec_settings_.startBitrate, codec_settings_.maxFramerate,
116 automatic_resize_on);
117
118 jclass callback_class =
119 FindClass(jni, "org/webrtc/VideoEncoderWrapperCallback");
120 jmethodID callback_constructor =
121 jni->GetMethodID(callback_class, "<init>", "(J)V");
122 jobject callback = jni->NewObject(callback_class, callback_constructor,
123 jlongFromPointer(this));
124
125 jobject ret =
Magnus Jedvertba78b5a2017-09-26 18:21:19 +0200126 Java_VideoEncoder_initEncode(jni, *encoder_, settings, callback);
127
sakal07a3bd72017-09-04 03:57:21 -0700128 if (jni->CallIntMethod(ret, get_number_method_) == WEBRTC_VIDEO_CODEC_OK) {
129 initialized_ = true;
130 }
131
132 return HandleReturnCode(jni, ret);
133}
134
135int32_t VideoEncoderWrapper::RegisterEncodeCompleteCallback(
136 EncodedImageCallback* callback) {
137 callback_ = callback;
138 return WEBRTC_VIDEO_CODEC_OK;
139}
140
141int32_t VideoEncoderWrapper::Release() {
142 JNIEnv* jni = AttachCurrentThreadIfNeeded();
143 ScopedLocalRefFrame local_ref_frame(jni);
Magnus Jedvertba78b5a2017-09-26 18:21:19 +0200144 jobject ret = Java_VideoEncoder_release(jni, *encoder_);
sakal07a3bd72017-09-04 03:57:21 -0700145 frame_extra_infos_.clear();
146 initialized_ = false;
147 return HandleReturnCode(jni, ret);
148}
149
150int32_t VideoEncoderWrapper::Encode(
151 const VideoFrame& frame,
152 const CodecSpecificInfo* /* codec_specific_info */,
153 const std::vector<FrameType>* frame_types) {
154 if (!initialized_) {
155 // Most likely initializing the codec failed.
156 return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
157 }
158
159 JNIEnv* jni = AttachCurrentThreadIfNeeded();
160 ScopedLocalRefFrame local_ref_frame(jni);
161
162 // Construct encode info.
163 jobjectArray j_frame_types =
164 jni->NewObjectArray(frame_types->size(), *frame_type_class_, nullptr);
165 for (size_t i = 0; i < frame_types->size(); ++i) {
166 jobject j_frame_type = jni->CallStaticObjectMethod(
167 *frame_type_class_, frame_type_from_native_method_,
168 static_cast<jint>((*frame_types)[i]));
169 jni->SetObjectArrayElement(j_frame_types, i, j_frame_type);
170 }
171 jobject encode_info = jni->NewObject(*encode_info_class_,
172 encode_info_constructor_, j_frame_types);
173
174 FrameExtraInfo info;
175 info.capture_time_ns = frame.timestamp_us() * rtc::kNumNanosecsPerMicrosec;
176 info.timestamp_rtp = frame.timestamp();
177 frame_extra_infos_.push_back(info);
178
Magnus Jedvertba78b5a2017-09-26 18:21:19 +0200179 jobject ret = Java_VideoEncoder_encode(
180 jni, *encoder_, video_frame_factory_.ToJavaFrame(jni, frame),
sakal07a3bd72017-09-04 03:57:21 -0700181 encode_info);
182 return HandleReturnCode(jni, ret);
183}
184
185int32_t VideoEncoderWrapper::SetChannelParameters(uint32_t packet_loss,
186 int64_t rtt) {
187 JNIEnv* jni = AttachCurrentThreadIfNeeded();
188 ScopedLocalRefFrame local_ref_frame(jni);
Magnus Jedvertba78b5a2017-09-26 18:21:19 +0200189 jobject ret = Java_VideoEncoder_setChannelParameters(
190 jni, *encoder_, (jshort)packet_loss, (jlong)rtt);
sakal07a3bd72017-09-04 03:57:21 -0700191 return HandleReturnCode(jni, ret);
192}
193
194int32_t VideoEncoderWrapper::SetRateAllocation(
195 const BitrateAllocation& allocation,
196 uint32_t framerate) {
197 JNIEnv* jni = AttachCurrentThreadIfNeeded();
198 ScopedLocalRefFrame local_ref_frame(jni);
199
200 jobject j_bitrate_allocation = ToJavaBitrateAllocation(jni, allocation);
Magnus Jedvertba78b5a2017-09-26 18:21:19 +0200201 jobject ret = Java_VideoEncoder_setRateAllocation(
202 jni, *encoder_, j_bitrate_allocation, (jint)framerate);
sakal07a3bd72017-09-04 03:57:21 -0700203 return HandleReturnCode(jni, ret);
204}
205
206VideoEncoderWrapper::ScalingSettings VideoEncoderWrapper::GetScalingSettings()
207 const {
208 JNIEnv* jni = AttachCurrentThreadIfNeeded();
209 ScopedLocalRefFrame local_ref_frame(jni);
210 jobject j_scaling_settings =
Magnus Jedvertba78b5a2017-09-26 18:21:19 +0200211 Java_VideoEncoder_getScalingSettings(jni, *encoder_);
sakal07a3bd72017-09-04 03:57:21 -0700212 bool on =
213 jni->GetBooleanField(j_scaling_settings, scaling_settings_on_field_);
214 jobject j_low =
215 jni->GetObjectField(j_scaling_settings, scaling_settings_low_field_);
216 jobject j_high =
217 jni->GetObjectField(j_scaling_settings, scaling_settings_high_field_);
218
219 if (j_low != nullptr || j_high != nullptr) {
220 RTC_DCHECK(j_low != nullptr);
221 RTC_DCHECK(j_high != nullptr);
222 int low = jni->CallIntMethod(j_low, int_value_method_);
223 int high = jni->CallIntMethod(j_high, int_value_method_);
224 return ScalingSettings(on, low, high);
225 } else {
226 return ScalingSettings(on);
227 }
228}
229
230const char* VideoEncoderWrapper::ImplementationName() const {
231 return implementation_name_.c_str();
232}
233
234void VideoEncoderWrapper::OnEncodedFrame(JNIEnv* jni,
235 jobject j_buffer,
236 jint encoded_width,
237 jint encoded_height,
238 jlong capture_time_ns,
239 jint frame_type,
240 jint rotation,
241 jboolean complete_frame,
242 jobject j_qp) {
243 const uint8_t* buffer =
244 static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_buffer));
245 const size_t buffer_size = jni->GetDirectBufferCapacity(j_buffer);
246
247 std::vector<uint8_t> buffer_copy(buffer_size);
248 memcpy(buffer_copy.data(), buffer, buffer_size);
249 int qp = -1;
250 if (j_qp != nullptr) {
251 qp = jni->CallIntMethod(j_qp, int_value_method_);
252 }
253
254 encoder_queue_->PostTask(
255 [
256 this, task_buffer = std::move(buffer_copy), qp, encoded_width,
257 encoded_height, capture_time_ns, frame_type, rotation, complete_frame
258 ]() {
259 FrameExtraInfo frame_extra_info;
260 do {
261 if (frame_extra_infos_.empty()) {
262 LOG(LS_WARNING)
263 << "Java encoder produced an unexpected frame with timestamp: "
264 << capture_time_ns;
265 return;
266 }
267
268 frame_extra_info = frame_extra_infos_.front();
269 frame_extra_infos_.pop_front();
270 // The encoder might drop frames so iterate through the queue until
271 // we find a matching timestamp.
272 } while (frame_extra_info.capture_time_ns != capture_time_ns);
273
274 RTPFragmentationHeader header = ParseFragmentationHeader(task_buffer);
275 EncodedImage frame(const_cast<uint8_t*>(task_buffer.data()),
276 task_buffer.size(), task_buffer.size());
277 frame._encodedWidth = encoded_width;
278 frame._encodedHeight = encoded_height;
279 frame._timeStamp = frame_extra_info.timestamp_rtp;
280 frame.capture_time_ms_ = capture_time_ns / rtc::kNumNanosecsPerMillisec;
281 frame._frameType = (FrameType)frame_type;
282 frame.rotation_ = (VideoRotation)rotation;
283 frame._completeFrame = complete_frame;
284 if (qp == -1) {
285 frame.qp_ = ParseQp(task_buffer);
286 } else {
287 frame.qp_ = qp;
288 }
289
290 CodecSpecificInfo info(ParseCodecSpecificInfo(frame));
291 callback_->OnEncodedImage(frame, &info, &header);
292 });
293}
294
295int32_t VideoEncoderWrapper::HandleReturnCode(JNIEnv* jni, jobject code) {
296 int32_t value = jni->CallIntMethod(code, get_number_method_);
297 if (value < 0) { // Any errors are represented by negative values.
298 // Try resetting the codec.
299 if (++num_resets_ <= kMaxJavaEncoderResets &&
300 Release() == WEBRTC_VIDEO_CODEC_OK) {
301 LOG(LS_WARNING) << "Reset Java encoder: " << num_resets_;
302 return InitEncodeInternal(jni);
303 }
304
305 LOG(LS_WARNING) << "Falling back to software decoder.";
306 return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
307 } else {
308 return value;
309 }
310}
311
312RTPFragmentationHeader VideoEncoderWrapper::ParseFragmentationHeader(
313 const std::vector<uint8_t>& buffer) {
314 RTPFragmentationHeader header;
315 if (codec_settings_.codecType == kVideoCodecH264) {
316 h264_bitstream_parser_.ParseBitstream(buffer.data(), buffer.size());
317
318 // For H.264 search for start codes.
319 const std::vector<H264::NaluIndex> nalu_idxs =
320 H264::FindNaluIndices(buffer.data(), buffer.size());
321 if (nalu_idxs.empty()) {
322 LOG(LS_ERROR) << "Start code is not found!";
323 LOG(LS_ERROR) << "Data:" << buffer[0] << " " << buffer[1] << " "
324 << buffer[2] << " " << buffer[3] << " " << buffer[4] << " "
325 << buffer[5];
326 }
327 header.VerifyAndAllocateFragmentationHeader(nalu_idxs.size());
328 for (size_t i = 0; i < nalu_idxs.size(); i++) {
329 header.fragmentationOffset[i] = nalu_idxs[i].payload_start_offset;
330 header.fragmentationLength[i] = nalu_idxs[i].payload_size;
331 header.fragmentationPlType[i] = 0;
332 header.fragmentationTimeDiff[i] = 0;
333 }
334 } else {
335 // Generate a header describing a single fragment.
336 header.VerifyAndAllocateFragmentationHeader(1);
337 header.fragmentationOffset[0] = 0;
338 header.fragmentationLength[0] = buffer.size();
339 header.fragmentationPlType[0] = 0;
340 header.fragmentationTimeDiff[0] = 0;
341 }
342 return header;
343}
344
345int VideoEncoderWrapper::ParseQp(const std::vector<uint8_t>& buffer) {
346 int qp;
347 bool success;
348 switch (codec_settings_.codecType) {
349 case kVideoCodecVP8:
350 success = vp8::GetQp(buffer.data(), buffer.size(), &qp);
351 break;
352 case kVideoCodecVP9:
353 success = vp9::GetQp(buffer.data(), buffer.size(), &qp);
354 break;
355 case kVideoCodecH264:
356 success = h264_bitstream_parser_.GetLastSliceQp(&qp);
357 break;
358 default: // Default is to not provide QP.
359 success = false;
360 break;
361 }
362 return success ? qp : -1; // -1 means unknown QP.
363}
364
365CodecSpecificInfo VideoEncoderWrapper::ParseCodecSpecificInfo(
366 const EncodedImage& frame) {
367 const bool key_frame = frame._frameType == kVideoFrameKey;
368
369 CodecSpecificInfo info;
370 memset(&info, 0, sizeof(info));
371 info.codecType = codec_settings_.codecType;
372 info.codec_name = implementation_name_.c_str();
373
374 switch (codec_settings_.codecType) {
375 case kVideoCodecVP8:
376 info.codecSpecific.VP8.pictureId = picture_id_;
377 info.codecSpecific.VP8.nonReference = false;
378 info.codecSpecific.VP8.simulcastIdx = 0;
379 info.codecSpecific.VP8.temporalIdx = kNoTemporalIdx;
380 info.codecSpecific.VP8.layerSync = false;
381 info.codecSpecific.VP8.tl0PicIdx = kNoTl0PicIdx;
382 info.codecSpecific.VP8.keyIdx = kNoKeyIdx;
383 break;
384 case kVideoCodecVP9:
385 if (key_frame) {
386 gof_idx_ = 0;
387 }
388 info.codecSpecific.VP9.picture_id = picture_id_;
389 info.codecSpecific.VP9.inter_pic_predicted = key_frame ? false : true;
390 info.codecSpecific.VP9.flexible_mode = false;
391 info.codecSpecific.VP9.ss_data_available = key_frame ? true : false;
392 info.codecSpecific.VP9.tl0_pic_idx = tl0_pic_idx_++;
393 info.codecSpecific.VP9.temporal_idx = kNoTemporalIdx;
394 info.codecSpecific.VP9.spatial_idx = kNoSpatialIdx;
395 info.codecSpecific.VP9.temporal_up_switch = true;
396 info.codecSpecific.VP9.inter_layer_predicted = false;
397 info.codecSpecific.VP9.gof_idx =
398 static_cast<uint8_t>(gof_idx_++ % gof_.num_frames_in_gof);
399 info.codecSpecific.VP9.num_spatial_layers = 1;
400 info.codecSpecific.VP9.spatial_layer_resolution_present = false;
401 if (info.codecSpecific.VP9.ss_data_available) {
402 info.codecSpecific.VP9.spatial_layer_resolution_present = true;
403 info.codecSpecific.VP9.width[0] = frame._encodedWidth;
404 info.codecSpecific.VP9.height[0] = frame._encodedHeight;
405 info.codecSpecific.VP9.gof.CopyGofInfoVP9(gof_);
406 }
407 break;
408 default:
409 break;
410 }
411
412 picture_id_ = (picture_id_ + 1) & 0x7FFF;
413
414 return info;
415}
416
417jobject VideoEncoderWrapper::ToJavaBitrateAllocation(
418 JNIEnv* jni,
419 const BitrateAllocation& allocation) {
420 jobjectArray j_allocation_array = jni->NewObjectArray(
421 kMaxSpatialLayers, *int_array_class_, nullptr /* initial */);
422 for (int spatial_i = 0; spatial_i < kMaxSpatialLayers; ++spatial_i) {
423 jintArray j_array_spatial_layer = jni->NewIntArray(kMaxTemporalStreams);
424 jint* array_spatial_layer =
425 jni->GetIntArrayElements(j_array_spatial_layer, nullptr /* isCopy */);
426 for (int temporal_i = 0; temporal_i < kMaxTemporalStreams; ++temporal_i) {
427 array_spatial_layer[temporal_i] =
428 allocation.GetBitrate(spatial_i, temporal_i);
429 }
430 jni->ReleaseIntArrayElements(j_array_spatial_layer, array_spatial_layer,
431 JNI_COMMIT);
432
433 jni->SetObjectArrayElement(j_allocation_array, spatial_i,
434 j_array_spatial_layer);
435 }
436 return jni->NewObject(*bitrate_allocation_class_,
437 bitrate_allocation_constructor_, j_allocation_array);
438}
439
440std::string VideoEncoderWrapper::GetImplementationName(JNIEnv* jni) const {
Magnus Jedvertba78b5a2017-09-26 18:21:19 +0200441 jstring jname = Java_VideoEncoder_getImplementationName(jni, *encoder_);
sakal07a3bd72017-09-04 03:57:21 -0700442 return JavaToStdString(jni, jname);
443}
444
445JNI_FUNCTION_DECLARATION(void,
446 VideoEncoderWrapperCallback_nativeOnEncodedFrame,
447 JNIEnv* jni,
448 jclass,
449 jlong j_native_encoder,
450 jobject buffer,
451 jint encoded_width,
452 jint encoded_height,
453 jlong capture_time_ns,
454 jint frame_type,
455 jint rotation,
456 jboolean complete_frame,
457 jobject qp) {
458 VideoEncoderWrapper* native_encoder =
459 reinterpret_cast<VideoEncoderWrapper*>(j_native_encoder);
460 native_encoder->OnEncodedFrame(jni, buffer, encoded_width, encoded_height,
461 capture_time_ns, frame_type, rotation,
462 complete_frame, qp);
463}
464
465} // namespace jni
466} // namespace webrtc