[Android WebView] Fix webview perf bot switchover to use org.chromium.webview_shell...
[chromium-blink-merge.git] / content / common / gpu / media / android_video_encode_accelerator.cc
blobbdc4ffbe77d6a7c135206e28fb7af8123202ee79
1 // Copyright 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "content/common/gpu/media/android_video_encode_accelerator.h"
7 #include <set>
9 #include "base/bind.h"
10 #include "base/command_line.h"
11 #include "base/logging.h"
12 #include "base/message_loop/message_loop.h"
13 #include "base/metrics/histogram.h"
14 #include "content/common/gpu/gpu_channel.h"
15 #include "content/public/common/content_switches.h"
16 #include "gpu/command_buffer/service/gles2_cmd_decoder.h"
17 #include "media/base/android/media_codec_bridge.h"
18 #include "media/base/bitstream_buffer.h"
19 #include "media/base/limits.h"
20 #include "media/video/picture.h"
21 #include "third_party/libyuv/include/libyuv/convert_from.h"
22 #include "ui/gl/android/scoped_java_surface.h"
23 #include "ui/gl/gl_bindings.h"
25 using media::MediaCodecBridge;
26 using media::VideoCodecBridge;
27 using media::VideoFrame;
29 namespace content {
31 // Limit default max video codec size for Android to avoid
32 // HW codec initialization failure for resolution higher than 720p.
33 // Default values are from Libjingle "jsepsessiondescription.cc".
34 const int kMaxEncodeFrameWidth = 1280;
35 const int kMaxEncodeFrameHeight = 720;
36 const int kMaxFramerateNumerator = 30;
37 const int kMaxFramerateDenominator = 1;
39 enum PixelFormat {
40 // Subset of MediaCodecInfo.CodecCapabilities.
41 COLOR_FORMAT_YUV420_PLANAR = 19,
42 COLOR_FORMAT_YUV420_SEMIPLANAR = 21,
45 // Helper macros for dealing with failure. If |result| evaluates false, emit
46 // |log| to DLOG(ERROR), register |error| with the client, and return.
47 #define RETURN_ON_FAILURE(result, log, error) \
48 do { \
49 if (!(result)) { \
50 DLOG(ERROR) << log; \
51 if (client_ptr_factory_->GetWeakPtr()) { \
52 client_ptr_factory_->GetWeakPtr()->NotifyError(error); \
53 client_ptr_factory_.reset(); \
54 } \
55 return; \
56 } \
57 } while (0)
59 // Because MediaCodec is thread-hostile (must be poked on a single thread) and
60 // has no callback mechanism (b/11990118), we must drive it by polling for
61 // complete frames (and available input buffers, when the codec is fully
62 // saturated). This function defines the polling delay. The value used is an
63 // arbitrary choice that trades off CPU utilization (spinning) against latency.
64 // Mirrors android_video_decode_accelerator.cc::DecodePollDelay().
65 static inline const base::TimeDelta EncodePollDelay() {
66 // An alternative to this polling scheme could be to dedicate a new thread
67 // (instead of using the ChildThread) to run the MediaCodec, and make that
68 // thread use the timeout-based flavor of MediaCodec's dequeue methods when it
69 // believes the codec should complete "soon" (e.g. waiting for an input
70 // buffer, or waiting for a picture when it knows enough complete input
71 // pictures have been fed to saturate any internal buffering). This is
72 // speculative and it's unclear that this would be a win (nor that there's a
73 // reasonably device-agnostic way to fill in the "believes" above).
74 return base::TimeDelta::FromMilliseconds(10);
77 static inline const base::TimeDelta NoWaitTimeOut() {
78 return base::TimeDelta::FromMicroseconds(0);
81 static bool GetSupportedColorFormatForMime(const std::string& mime,
82 PixelFormat* pixel_format) {
83 if (mime.empty())
84 return false;
86 std::set<int> formats = MediaCodecBridge::GetEncoderColorFormats(mime);
87 if (formats.count(COLOR_FORMAT_YUV420_SEMIPLANAR) > 0)
88 *pixel_format = COLOR_FORMAT_YUV420_SEMIPLANAR;
89 else if (formats.count(COLOR_FORMAT_YUV420_PLANAR) > 0)
90 *pixel_format = COLOR_FORMAT_YUV420_PLANAR;
91 else
92 return false;
94 return true;
97 AndroidVideoEncodeAccelerator::AndroidVideoEncodeAccelerator()
98 : num_buffers_at_codec_(0),
99 num_output_buffers_(-1),
100 output_buffers_capacity_(0),
101 last_set_bitrate_(0) {}
103 AndroidVideoEncodeAccelerator::~AndroidVideoEncodeAccelerator() {
104 DCHECK(thread_checker_.CalledOnValidThread());
107 media::VideoEncodeAccelerator::SupportedProfiles
108 AndroidVideoEncodeAccelerator::GetSupportedProfiles() {
109 SupportedProfiles profiles;
111 #if defined(ENABLE_WEBRTC)
112 const base::CommandLine* cmd_line = base::CommandLine::ForCurrentProcess();
113 if (cmd_line->HasSwitch(switches::kDisableWebRtcHWEncoding))
114 return profiles;
115 #endif
117 const struct {
118 const media::VideoCodec codec;
119 const media::VideoCodecProfile profile;
120 } kSupportedCodecs[] = {
121 { media::kCodecVP8, media::VP8PROFILE_ANY },
122 { media::kCodecH264, media::H264PROFILE_BASELINE },
123 { media::kCodecH264, media::H264PROFILE_MAIN }
126 for (const auto& supported_codec : kSupportedCodecs) {
127 if (VideoCodecBridge::IsKnownUnaccelerated(supported_codec.codec,
128 media::MEDIA_CODEC_ENCODER)) {
129 continue;
132 SupportedProfile profile;
133 profile.profile = supported_codec.profile;
134 // It would be nice if MediaCodec exposes the maximum capabilities of
135 // the encoder. Hard-code some reasonable defaults as workaround.
136 profile.max_resolution.SetSize(kMaxEncodeFrameWidth,
137 kMaxEncodeFrameHeight);
138 profile.max_framerate_numerator = kMaxFramerateNumerator;
139 profile.max_framerate_denominator = kMaxFramerateDenominator;
140 profiles.push_back(profile);
142 return profiles;
145 bool AndroidVideoEncodeAccelerator::Initialize(
146 VideoFrame::Format format,
147 const gfx::Size& input_visible_size,
148 media::VideoCodecProfile output_profile,
149 uint32 initial_bitrate,
150 Client* client) {
151 DVLOG(3) << __PRETTY_FUNCTION__ << " format: " << format
152 << ", input_visible_size: " << input_visible_size.ToString()
153 << ", output_profile: " << output_profile
154 << ", initial_bitrate: " << initial_bitrate;
155 DCHECK(!media_codec_);
156 DCHECK(thread_checker_.CalledOnValidThread());
158 client_ptr_factory_.reset(new base::WeakPtrFactory<Client>(client));
160 if (!(media::MediaCodecBridge::SupportsSetParameters() &&
161 format == VideoFrame::I420)) {
162 DLOG(ERROR) << "Unexpected combo: " << format << ", " << output_profile;
163 return false;
166 std::string mime_type;
167 media::VideoCodec codec;
168 if (output_profile == media::VP8PROFILE_ANY) {
169 codec = media::kCodecVP8;
170 mime_type = "video/x-vnd.on2.vp8";
171 } else if (output_profile == media::H264PROFILE_BASELINE ||
172 output_profile == media::H264PROFILE_MAIN) {
173 codec = media::kCodecH264;
174 mime_type = "video/avc";
175 } else {
176 return false;
179 last_set_bitrate_ = initial_bitrate;
181 // Only consider using MediaCodec if it's likely backed by hardware.
182 if (media::VideoCodecBridge::IsKnownUnaccelerated(
183 codec, media::MEDIA_CODEC_ENCODER)) {
184 DLOG(ERROR) << "No HW support";
185 return false;
188 PixelFormat pixel_format = COLOR_FORMAT_YUV420_SEMIPLANAR;
189 if (!GetSupportedColorFormatForMime(mime_type, &pixel_format)) {
190 DLOG(ERROR) << "No color format support.";
191 return false;
193 media_codec_.reset(media::VideoCodecBridge::CreateEncoder(codec,
194 input_visible_size,
195 initial_bitrate,
196 INITIAL_FRAMERATE,
197 IFRAME_INTERVAL,
198 pixel_format));
200 if (!media_codec_) {
201 DLOG(ERROR) << "Failed to create/start the codec: "
202 << input_visible_size.ToString();
203 return false;
206 num_output_buffers_ = media_codec_->GetOutputBuffersCount();
207 output_buffers_capacity_ = media_codec_->GetOutputBuffersCapacity();
208 base::MessageLoop::current()->PostTask(
209 FROM_HERE,
210 base::Bind(&VideoEncodeAccelerator::Client::RequireBitstreamBuffers,
211 client_ptr_factory_->GetWeakPtr(),
212 num_output_buffers_,
213 input_visible_size,
214 output_buffers_capacity_));
215 return true;
218 void AndroidVideoEncodeAccelerator::MaybeStartIOTimer() {
219 if (!io_timer_.IsRunning() &&
220 (num_buffers_at_codec_ > 0 || !pending_frames_.empty())) {
221 io_timer_.Start(FROM_HERE,
222 EncodePollDelay(),
223 this,
224 &AndroidVideoEncodeAccelerator::DoIOTask);
228 void AndroidVideoEncodeAccelerator::MaybeStopIOTimer() {
229 if (io_timer_.IsRunning() &&
230 (num_buffers_at_codec_ == 0 && pending_frames_.empty())) {
231 io_timer_.Stop();
235 void AndroidVideoEncodeAccelerator::Encode(
236 const scoped_refptr<VideoFrame>& frame,
237 bool force_keyframe) {
238 DVLOG(3) << __PRETTY_FUNCTION__ << ": " << force_keyframe;
239 DCHECK(thread_checker_.CalledOnValidThread());
240 RETURN_ON_FAILURE(frame->format() == VideoFrame::I420,
241 "Unexpected format",
242 kInvalidArgumentError);
244 // MediaCodec doesn't have a way to specify stride for non-Packed formats, so
245 // we insist on being called with packed frames and no cropping :(
246 RETURN_ON_FAILURE(frame->row_bytes(VideoFrame::kYPlane) ==
247 frame->stride(VideoFrame::kYPlane) &&
248 frame->row_bytes(VideoFrame::kUPlane) ==
249 frame->stride(VideoFrame::kUPlane) &&
250 frame->row_bytes(VideoFrame::kVPlane) ==
251 frame->stride(VideoFrame::kVPlane) &&
252 frame->coded_size() == frame->visible_rect().size(),
253 "Non-packed frame, or visible_rect != coded_size",
254 kInvalidArgumentError);
256 pending_frames_.push(
257 base::MakeTuple(frame, force_keyframe, base::Time::Now()));
258 DoIOTask();
261 void AndroidVideoEncodeAccelerator::UseOutputBitstreamBuffer(
262 const media::BitstreamBuffer& buffer) {
263 DVLOG(3) << __PRETTY_FUNCTION__ << ": bitstream_buffer_id=" << buffer.id();
264 DCHECK(thread_checker_.CalledOnValidThread());
265 RETURN_ON_FAILURE(buffer.size() >= media_codec_->GetOutputBuffersCapacity(),
266 "Output buffers too small!",
267 kInvalidArgumentError);
268 available_bitstream_buffers_.push_back(buffer);
269 DoIOTask();
272 void AndroidVideoEncodeAccelerator::RequestEncodingParametersChange(
273 uint32 bitrate,
274 uint32 framerate) {
275 DVLOG(3) << __PRETTY_FUNCTION__ << ": bitrate: " << bitrate
276 << ", framerate: " << framerate;
277 DCHECK(thread_checker_.CalledOnValidThread());
278 if (bitrate != last_set_bitrate_) {
279 last_set_bitrate_ = bitrate;
280 media_codec_->SetVideoBitrate(bitrate);
282 // Note: Android's MediaCodec doesn't allow mid-stream adjustments to
283 // framerate, so we ignore that here. This is OK because Android only uses
284 // the framerate value from MediaFormat during configure() as a proxy for
285 // bitrate, and we set that explicitly.
288 void AndroidVideoEncodeAccelerator::Destroy() {
289 DVLOG(3) << __PRETTY_FUNCTION__;
290 DCHECK(thread_checker_.CalledOnValidThread());
291 client_ptr_factory_.reset();
292 if (media_codec_) {
293 if (io_timer_.IsRunning())
294 io_timer_.Stop();
295 media_codec_->Stop();
297 delete this;
300 void AndroidVideoEncodeAccelerator::DoIOTask() {
301 QueueInput();
302 DequeueOutput();
303 MaybeStartIOTimer();
304 MaybeStopIOTimer();
307 void AndroidVideoEncodeAccelerator::QueueInput() {
308 if (!client_ptr_factory_->GetWeakPtr() || pending_frames_.empty())
309 return;
311 int input_buf_index = 0;
312 media::MediaCodecStatus status =
313 media_codec_->DequeueInputBuffer(NoWaitTimeOut(), &input_buf_index);
314 if (status != media::MEDIA_CODEC_OK) {
315 DCHECK(status == media::MEDIA_CODEC_DEQUEUE_INPUT_AGAIN_LATER ||
316 status == media::MEDIA_CODEC_ERROR);
317 RETURN_ON_FAILURE(status != media::MEDIA_CODEC_ERROR,
318 "MediaCodec error",
319 kPlatformFailureError);
320 return;
323 const PendingFrames::value_type& input = pending_frames_.front();
324 bool is_key_frame = base::get<1>(input);
325 if (is_key_frame) {
326 // Ideally MediaCodec would honor BUFFER_FLAG_SYNC_FRAME so we could
327 // indicate this in the QueueInputBuffer() call below and guarantee _this_
328 // frame be encoded as a key frame, but sadly that flag is ignored.
329 // Instead, we request a key frame "soon".
330 media_codec_->RequestKeyFrameSoon();
332 scoped_refptr<VideoFrame> frame = base::get<0>(input);
334 uint8* buffer = NULL;
335 size_t capacity = 0;
336 media_codec_->GetInputBuffer(input_buf_index, &buffer, &capacity);
338 size_t queued_size =
339 VideoFrame::AllocationSize(VideoFrame::I420, frame->coded_size());
340 RETURN_ON_FAILURE(capacity >= queued_size,
341 "Failed to get input buffer: " << input_buf_index,
342 kPlatformFailureError);
344 uint8* dst_y = buffer;
345 int dst_stride_y = frame->stride(VideoFrame::kYPlane);
346 uint8* dst_uv = buffer + frame->stride(VideoFrame::kYPlane) *
347 frame->rows(VideoFrame::kYPlane);
348 int dst_stride_uv = frame->stride(VideoFrame::kUPlane) * 2;
349 // Why NV12? Because COLOR_FORMAT_YUV420_SEMIPLANAR. See comment at other
350 // mention of that constant.
351 bool converted = !libyuv::I420ToNV12(frame->data(VideoFrame::kYPlane),
352 frame->stride(VideoFrame::kYPlane),
353 frame->data(VideoFrame::kUPlane),
354 frame->stride(VideoFrame::kUPlane),
355 frame->data(VideoFrame::kVPlane),
356 frame->stride(VideoFrame::kVPlane),
357 dst_y,
358 dst_stride_y,
359 dst_uv,
360 dst_stride_uv,
361 frame->coded_size().width(),
362 frame->coded_size().height());
363 RETURN_ON_FAILURE(converted, "Failed to I420ToNV12!", kPlatformFailureError);
365 fake_input_timestamp_ += base::TimeDelta::FromMicroseconds(1);
366 status = media_codec_->QueueInputBuffer(
367 input_buf_index, NULL, queued_size, fake_input_timestamp_);
368 UMA_HISTOGRAM_TIMES("Media.AVEA.InputQueueTime",
369 base::Time::Now() - base::get<2>(input));
370 RETURN_ON_FAILURE(status == media::MEDIA_CODEC_OK,
371 "Failed to QueueInputBuffer: " << status,
372 kPlatformFailureError);
373 ++num_buffers_at_codec_;
374 pending_frames_.pop();
377 bool AndroidVideoEncodeAccelerator::DoOutputBuffersSuffice() {
378 // If this returns false ever, then the VEA::Client interface will need to
379 // grow a DismissBitstreamBuffer() call, and VEA::Client impls will have to be
380 // prepared to field multiple requests to RequireBitstreamBuffers().
381 int count = media_codec_->GetOutputBuffersCount();
382 size_t capacity = media_codec_->GetOutputBuffersCapacity();
383 bool ret = count <= num_output_buffers_ &&
384 capacity <= output_buffers_capacity_;
385 LOG_IF(ERROR, !ret) << "Need more/bigger buffers; before: "
386 << num_output_buffers_ << "x" << output_buffers_capacity_
387 << ", now: " << count << "x" << capacity;
388 UMA_HISTOGRAM_BOOLEAN("Media.AVEA.OutputBuffersSuffice", ret);
389 return ret;
392 void AndroidVideoEncodeAccelerator::DequeueOutput() {
393 if (!client_ptr_factory_->GetWeakPtr() ||
394 available_bitstream_buffers_.empty() || num_buffers_at_codec_ == 0) {
395 return;
398 int32 buf_index = 0;
399 size_t offset = 0;
400 size_t size = 0;
401 bool key_frame = false;
402 do {
403 media::MediaCodecStatus status = media_codec_->DequeueOutputBuffer(
404 NoWaitTimeOut(), &buf_index, &offset, &size, NULL, NULL, &key_frame);
405 switch (status) {
406 case media::MEDIA_CODEC_DEQUEUE_OUTPUT_AGAIN_LATER:
407 return;
409 case media::MEDIA_CODEC_ERROR:
410 RETURN_ON_FAILURE(false, "Codec error", kPlatformFailureError);
411 // Unreachable because of previous statement, but included for clarity.
412 return;
414 case media::MEDIA_CODEC_OUTPUT_FORMAT_CHANGED: // Fall-through.
415 case media::MEDIA_CODEC_OUTPUT_BUFFERS_CHANGED:
416 RETURN_ON_FAILURE(DoOutputBuffersSuffice(),
417 "Bitstream now requires more/larger buffers",
418 kPlatformFailureError);
419 break;
421 case media::MEDIA_CODEC_OK:
422 DCHECK_GE(buf_index, 0);
423 break;
425 default:
426 NOTREACHED();
427 break;
429 } while (buf_index < 0);
431 media::BitstreamBuffer bitstream_buffer = available_bitstream_buffers_.back();
432 available_bitstream_buffers_.pop_back();
433 scoped_ptr<base::SharedMemory> shm(
434 new base::SharedMemory(bitstream_buffer.handle(), false));
435 RETURN_ON_FAILURE(shm->Map(bitstream_buffer.size()),
436 "Failed to map SHM",
437 kPlatformFailureError);
438 RETURN_ON_FAILURE(size <= shm->mapped_size(),
439 "Encoded buffer too large: " << size << ">"
440 << shm->mapped_size(),
441 kPlatformFailureError);
443 media_codec_->CopyFromOutputBuffer(buf_index, offset, shm->memory(), size);
444 media_codec_->ReleaseOutputBuffer(buf_index, false);
445 --num_buffers_at_codec_;
447 UMA_HISTOGRAM_COUNTS_10000("Media.AVEA.EncodedBufferSizeKB", size / 1024);
448 base::MessageLoop::current()->PostTask(
449 FROM_HERE,
450 base::Bind(&VideoEncodeAccelerator::Client::BitstreamBufferReady,
451 client_ptr_factory_->GetWeakPtr(),
452 bitstream_buffer.id(),
453 size,
454 key_frame));
457 } // namespace content