Apply _RELATIVE relocations ahead of others.
[chromium-blink-merge.git] / content / common / gpu / media / android_video_encode_accelerator.cc
blob9b703d70f975c0734951d596bab1f933e605e2e3
1 // Copyright 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "content/common/gpu/media/android_video_encode_accelerator.h"
7 #include <set>
9 #include "base/bind.h"
10 #include "base/command_line.h"
11 #include "base/logging.h"
12 #include "base/message_loop/message_loop.h"
13 #include "base/metrics/histogram.h"
14 #include "content/common/gpu/gpu_channel.h"
15 #include "content/public/common/content_switches.h"
16 #include "gpu/command_buffer/service/gles2_cmd_decoder.h"
17 #include "media/base/android/media_codec_bridge.h"
18 #include "media/base/bitstream_buffer.h"
19 #include "media/base/limits.h"
20 #include "media/video/picture.h"
21 #include "third_party/libyuv/include/libyuv/convert_from.h"
22 #include "ui/gl/android/scoped_java_surface.h"
23 #include "ui/gl/gl_bindings.h"
25 using media::MediaCodecBridge;
26 using media::VideoCodecBridge;
27 using media::VideoFrame;
29 namespace content {
31 enum PixelFormat {
32 // Subset of MediaCodecInfo.CodecCapabilities.
33 COLOR_FORMAT_YUV420_PLANAR = 19,
34 COLOR_FORMAT_YUV420_SEMIPLANAR = 21,
37 // Helper macros for dealing with failure. If |result| evaluates false, emit
38 // |log| to DLOG(ERROR), register |error| with the client, and return.
39 #define RETURN_ON_FAILURE(result, log, error) \
40 do { \
41 if (!(result)) { \
42 DLOG(ERROR) << log; \
43 if (client_ptr_factory_->GetWeakPtr()) { \
44 client_ptr_factory_->GetWeakPtr()->NotifyError(error); \
45 client_ptr_factory_.reset(); \
46 } \
47 return; \
48 } \
49 } while (0)
51 // Because MediaCodec is thread-hostile (must be poked on a single thread) and
52 // has no callback mechanism (b/11990118), we must drive it by polling for
53 // complete frames (and available input buffers, when the codec is fully
54 // saturated). This function defines the polling delay. The value used is an
55 // arbitrary choice that trades off CPU utilization (spinning) against latency.
56 // Mirrors android_video_decode_accelerator.cc::DecodePollDelay().
57 static inline const base::TimeDelta EncodePollDelay() {
58 // An alternative to this polling scheme could be to dedicate a new thread
59 // (instead of using the ChildThread) to run the MediaCodec, and make that
60 // thread use the timeout-based flavor of MediaCodec's dequeue methods when it
61 // believes the codec should complete "soon" (e.g. waiting for an input
62 // buffer, or waiting for a picture when it knows enough complete input
63 // pictures have been fed to saturate any internal buffering). This is
64 // speculative and it's unclear that this would be a win (nor that there's a
65 // reasonably device-agnostic way to fill in the "believes" above).
66 return base::TimeDelta::FromMilliseconds(10);
69 static inline const base::TimeDelta NoWaitTimeOut() {
70 return base::TimeDelta::FromMicroseconds(0);
73 static bool GetSupportedColorFormatForMime(const std::string& mime,
74 PixelFormat* pixel_format) {
75 std::set<int> formats = MediaCodecBridge::GetEncoderColorFormats(mime);
76 if (formats.count(COLOR_FORMAT_YUV420_SEMIPLANAR) > 0)
77 *pixel_format = COLOR_FORMAT_YUV420_SEMIPLANAR;
78 else if (formats.count(COLOR_FORMAT_YUV420_PLANAR) > 0)
79 *pixel_format = COLOR_FORMAT_YUV420_PLANAR;
80 else
81 return false;
83 return true;
86 AndroidVideoEncodeAccelerator::AndroidVideoEncodeAccelerator()
87 : num_buffers_at_codec_(0),
88 num_output_buffers_(-1),
89 output_buffers_capacity_(0),
90 last_set_bitrate_(0) {}
92 AndroidVideoEncodeAccelerator::~AndroidVideoEncodeAccelerator() {
93 DCHECK(thread_checker_.CalledOnValidThread());
96 std::vector<media::VideoEncodeAccelerator::SupportedProfile>
97 AndroidVideoEncodeAccelerator::GetSupportedProfiles() {
98 std::vector<MediaCodecBridge::CodecsInfo> codecs_info =
99 MediaCodecBridge::GetCodecsInfo();
101 std::vector<SupportedProfile> profiles;
103 #if defined(ENABLE_WEBRTC)
104 const base::CommandLine* cmd_line = base::CommandLine::ForCurrentProcess();
105 if (cmd_line->HasSwitch(switches::kDisableWebRtcHWEncoding))
106 return profiles;
107 #endif
109 for (size_t i = 0; i < codecs_info.size(); ++i) {
110 const MediaCodecBridge::CodecsInfo& info = codecs_info[i];
111 if (info.direction != media::MEDIA_CODEC_ENCODER || info.codecs != "vp8" ||
112 VideoCodecBridge::IsKnownUnaccelerated(media::kCodecVP8,
113 media::MEDIA_CODEC_ENCODER)) {
114 // We're only looking for a HW VP8 encoder.
115 continue;
117 SupportedProfile profile;
118 profile.profile = media::VP8PROFILE_ANY;
119 // Wouldn't it be nice if MediaCodec exposed the maximum capabilities of the
120 // encoder? Sure would be. Too bad it doesn't. So we hard-code some
121 // reasonable defaults.
122 profile.max_resolution.SetSize(1920, 1088);
123 profile.max_framerate_numerator = 30;
124 profile.max_framerate_denominator = 1;
125 profiles.push_back(profile);
127 return profiles;
130 bool AndroidVideoEncodeAccelerator::Initialize(
131 VideoFrame::Format format,
132 const gfx::Size& input_visible_size,
133 media::VideoCodecProfile output_profile,
134 uint32 initial_bitrate,
135 Client* client) {
136 DVLOG(3) << __PRETTY_FUNCTION__ << " format: " << format
137 << ", input_visible_size: " << input_visible_size.ToString()
138 << ", output_profile: " << output_profile
139 << ", initial_bitrate: " << initial_bitrate;
140 DCHECK(!media_codec_);
141 DCHECK(thread_checker_.CalledOnValidThread());
143 client_ptr_factory_.reset(new base::WeakPtrFactory<Client>(client));
145 if (!(media::MediaCodecBridge::SupportsSetParameters() &&
146 format == VideoFrame::I420 &&
147 output_profile == media::VP8PROFILE_ANY)) {
148 DLOG(ERROR) << "Unexpected combo: " << format << ", " << output_profile;
149 return false;
152 last_set_bitrate_ = initial_bitrate;
154 // Only consider using MediaCodec if it's likely backed by hardware.
155 if (media::VideoCodecBridge::IsKnownUnaccelerated(
156 media::kCodecVP8, media::MEDIA_CODEC_ENCODER)) {
157 DLOG(ERROR) << "No HW support";
158 return false;
161 PixelFormat pixel_format = COLOR_FORMAT_YUV420_SEMIPLANAR;
162 if (!GetSupportedColorFormatForMime("video/x-vnd.on2.vp8", &pixel_format)) {
163 DLOG(ERROR) << "No color format support.";
164 return false;
166 media_codec_.reset(media::VideoCodecBridge::CreateEncoder(media::kCodecVP8,
167 input_visible_size,
168 initial_bitrate,
169 INITIAL_FRAMERATE,
170 IFRAME_INTERVAL,
171 pixel_format));
173 if (!media_codec_) {
174 DLOG(ERROR) << "Failed to create/start the codec: "
175 << input_visible_size.ToString();
176 return false;
179 num_output_buffers_ = media_codec_->GetOutputBuffersCount();
180 output_buffers_capacity_ = media_codec_->GetOutputBuffersCapacity();
181 base::MessageLoop::current()->PostTask(
182 FROM_HERE,
183 base::Bind(&VideoEncodeAccelerator::Client::RequireBitstreamBuffers,
184 client_ptr_factory_->GetWeakPtr(),
185 num_output_buffers_,
186 input_visible_size,
187 output_buffers_capacity_));
188 return true;
191 void AndroidVideoEncodeAccelerator::MaybeStartIOTimer() {
192 if (!io_timer_.IsRunning() &&
193 (num_buffers_at_codec_ > 0 || !pending_frames_.empty())) {
194 io_timer_.Start(FROM_HERE,
195 EncodePollDelay(),
196 this,
197 &AndroidVideoEncodeAccelerator::DoIOTask);
201 void AndroidVideoEncodeAccelerator::MaybeStopIOTimer() {
202 if (io_timer_.IsRunning() &&
203 (num_buffers_at_codec_ == 0 && pending_frames_.empty())) {
204 io_timer_.Stop();
208 void AndroidVideoEncodeAccelerator::Encode(
209 const scoped_refptr<VideoFrame>& frame,
210 bool force_keyframe) {
211 DVLOG(3) << __PRETTY_FUNCTION__ << ": " << force_keyframe;
212 DCHECK(thread_checker_.CalledOnValidThread());
213 RETURN_ON_FAILURE(frame->format() == VideoFrame::I420,
214 "Unexpected format",
215 kInvalidArgumentError);
217 // MediaCodec doesn't have a way to specify stride for non-Packed formats, so
218 // we insist on being called with packed frames and no cropping :(
219 RETURN_ON_FAILURE(frame->row_bytes(VideoFrame::kYPlane) ==
220 frame->stride(VideoFrame::kYPlane) &&
221 frame->row_bytes(VideoFrame::kUPlane) ==
222 frame->stride(VideoFrame::kUPlane) &&
223 frame->row_bytes(VideoFrame::kVPlane) ==
224 frame->stride(VideoFrame::kVPlane) &&
225 frame->coded_size() == frame->visible_rect().size(),
226 "Non-packed frame, or visible_rect != coded_size",
227 kInvalidArgumentError);
229 pending_frames_.push(MakeTuple(frame, force_keyframe, base::Time::Now()));
230 DoIOTask();
233 void AndroidVideoEncodeAccelerator::UseOutputBitstreamBuffer(
234 const media::BitstreamBuffer& buffer) {
235 DVLOG(3) << __PRETTY_FUNCTION__ << ": bitstream_buffer_id=" << buffer.id();
236 DCHECK(thread_checker_.CalledOnValidThread());
237 RETURN_ON_FAILURE(buffer.size() >= media_codec_->GetOutputBuffersCapacity(),
238 "Output buffers too small!",
239 kInvalidArgumentError);
240 available_bitstream_buffers_.push_back(buffer);
241 DoIOTask();
244 void AndroidVideoEncodeAccelerator::RequestEncodingParametersChange(
245 uint32 bitrate,
246 uint32 framerate) {
247 DVLOG(3) << __PRETTY_FUNCTION__ << ": bitrate: " << bitrate
248 << ", framerate: " << framerate;
249 DCHECK(thread_checker_.CalledOnValidThread());
250 if (bitrate != last_set_bitrate_) {
251 last_set_bitrate_ = bitrate;
252 media_codec_->SetVideoBitrate(bitrate);
254 // Note: Android's MediaCodec doesn't allow mid-stream adjustments to
255 // framerate, so we ignore that here. This is OK because Android only uses
256 // the framerate value from MediaFormat during configure() as a proxy for
257 // bitrate, and we set that explicitly.
260 void AndroidVideoEncodeAccelerator::Destroy() {
261 DVLOG(3) << __PRETTY_FUNCTION__;
262 DCHECK(thread_checker_.CalledOnValidThread());
263 client_ptr_factory_.reset();
264 if (media_codec_) {
265 if (io_timer_.IsRunning())
266 io_timer_.Stop();
267 media_codec_->Stop();
269 delete this;
272 void AndroidVideoEncodeAccelerator::DoIOTask() {
273 QueueInput();
274 DequeueOutput();
275 MaybeStartIOTimer();
276 MaybeStopIOTimer();
279 void AndroidVideoEncodeAccelerator::QueueInput() {
280 if (!client_ptr_factory_->GetWeakPtr() || pending_frames_.empty())
281 return;
283 int input_buf_index = 0;
284 media::MediaCodecStatus status =
285 media_codec_->DequeueInputBuffer(NoWaitTimeOut(), &input_buf_index);
286 if (status != media::MEDIA_CODEC_OK) {
287 DCHECK(status == media::MEDIA_CODEC_DEQUEUE_INPUT_AGAIN_LATER ||
288 status == media::MEDIA_CODEC_ERROR);
289 RETURN_ON_FAILURE(status != media::MEDIA_CODEC_ERROR,
290 "MediaCodec error",
291 kPlatformFailureError);
292 return;
295 const PendingFrames::value_type& input = pending_frames_.front();
296 bool is_key_frame = input.b;
297 if (is_key_frame) {
298 // Ideally MediaCodec would honor BUFFER_FLAG_SYNC_FRAME so we could
299 // indicate this in the QueueInputBuffer() call below and guarantee _this_
300 // frame be encoded as a key frame, but sadly that flag is ignored.
301 // Instead, we request a key frame "soon".
302 media_codec_->RequestKeyFrameSoon();
304 scoped_refptr<VideoFrame> frame = input.a;
306 uint8* buffer = NULL;
307 size_t capacity = 0;
308 media_codec_->GetInputBuffer(input_buf_index, &buffer, &capacity);
310 size_t queued_size =
311 VideoFrame::AllocationSize(VideoFrame::I420, frame->coded_size());
312 RETURN_ON_FAILURE(capacity >= queued_size,
313 "Failed to get input buffer: " << input_buf_index,
314 kPlatformFailureError);
316 uint8* dst_y = buffer;
317 int dst_stride_y = frame->stride(VideoFrame::kYPlane);
318 uint8* dst_uv = buffer + frame->stride(VideoFrame::kYPlane) *
319 frame->rows(VideoFrame::kYPlane);
320 int dst_stride_uv = frame->stride(VideoFrame::kUPlane) * 2;
321 // Why NV12? Because COLOR_FORMAT_YUV420_SEMIPLANAR. See comment at other
322 // mention of that constant.
323 bool converted = !libyuv::I420ToNV12(frame->data(VideoFrame::kYPlane),
324 frame->stride(VideoFrame::kYPlane),
325 frame->data(VideoFrame::kUPlane),
326 frame->stride(VideoFrame::kUPlane),
327 frame->data(VideoFrame::kVPlane),
328 frame->stride(VideoFrame::kVPlane),
329 dst_y,
330 dst_stride_y,
331 dst_uv,
332 dst_stride_uv,
333 frame->coded_size().width(),
334 frame->coded_size().height());
335 RETURN_ON_FAILURE(converted, "Failed to I420ToNV12!", kPlatformFailureError);
337 fake_input_timestamp_ += base::TimeDelta::FromMicroseconds(1);
338 status = media_codec_->QueueInputBuffer(
339 input_buf_index, NULL, queued_size, fake_input_timestamp_);
340 UMA_HISTOGRAM_TIMES("Media.AVEA.InputQueueTime", base::Time::Now() - input.c);
341 RETURN_ON_FAILURE(status == media::MEDIA_CODEC_OK,
342 "Failed to QueueInputBuffer: " << status,
343 kPlatformFailureError);
344 ++num_buffers_at_codec_;
345 pending_frames_.pop();
348 bool AndroidVideoEncodeAccelerator::DoOutputBuffersSuffice() {
349 // If this returns false ever, then the VEA::Client interface will need to
350 // grow a DismissBitstreamBuffer() call, and VEA::Client impls will have to be
351 // prepared to field multiple requests to RequireBitstreamBuffers().
352 int count = media_codec_->GetOutputBuffersCount();
353 size_t capacity = media_codec_->GetOutputBuffersCapacity();
354 bool ret = media_codec_->GetOutputBuffers() && count <= num_output_buffers_ &&
355 capacity <= output_buffers_capacity_;
356 LOG_IF(ERROR, !ret) << "Need more/bigger buffers; before: "
357 << num_output_buffers_ << "x" << output_buffers_capacity_
358 << ", now: " << count << "x" << capacity;
359 UMA_HISTOGRAM_BOOLEAN("Media.AVEA.OutputBuffersSuffice", ret);
360 return ret;
363 void AndroidVideoEncodeAccelerator::DequeueOutput() {
364 if (!client_ptr_factory_->GetWeakPtr() ||
365 available_bitstream_buffers_.empty() || num_buffers_at_codec_ == 0) {
366 return;
369 int32 buf_index = 0;
370 size_t offset = 0;
371 size_t size = 0;
372 bool key_frame = false;
373 do {
374 media::MediaCodecStatus status = media_codec_->DequeueOutputBuffer(
375 NoWaitTimeOut(), &buf_index, &offset, &size, NULL, NULL, &key_frame);
376 switch (status) {
377 case media::MEDIA_CODEC_DEQUEUE_OUTPUT_AGAIN_LATER:
378 return;
380 case media::MEDIA_CODEC_ERROR:
381 RETURN_ON_FAILURE(false, "Codec error", kPlatformFailureError);
382 // Unreachable because of previous statement, but included for clarity.
383 return;
385 case media::MEDIA_CODEC_OUTPUT_FORMAT_CHANGED: // Fall-through.
386 case media::MEDIA_CODEC_OUTPUT_BUFFERS_CHANGED:
387 RETURN_ON_FAILURE(DoOutputBuffersSuffice(),
388 "Bitstream now requires more/larger buffers",
389 kPlatformFailureError);
390 break;
392 case media::MEDIA_CODEC_OK:
393 DCHECK_GE(buf_index, 0);
394 break;
396 default:
397 NOTREACHED();
398 break;
400 } while (buf_index < 0);
402 media::BitstreamBuffer bitstream_buffer = available_bitstream_buffers_.back();
403 available_bitstream_buffers_.pop_back();
404 scoped_ptr<base::SharedMemory> shm(
405 new base::SharedMemory(bitstream_buffer.handle(), false));
406 RETURN_ON_FAILURE(shm->Map(bitstream_buffer.size()),
407 "Failed to map SHM",
408 kPlatformFailureError);
409 RETURN_ON_FAILURE(size <= shm->mapped_size(),
410 "Encoded buffer too large: " << size << ">"
411 << shm->mapped_size(),
412 kPlatformFailureError);
414 media_codec_->CopyFromOutputBuffer(buf_index, offset, shm->memory(), size);
415 media_codec_->ReleaseOutputBuffer(buf_index, false);
416 --num_buffers_at_codec_;
418 UMA_HISTOGRAM_COUNTS_10000("Media.AVEA.EncodedBufferSizeKB", size / 1024);
419 base::MessageLoop::current()->PostTask(
420 FROM_HERE,
421 base::Bind(&VideoEncodeAccelerator::Client::BitstreamBufferReady,
422 client_ptr_factory_->GetWeakPtr(),
423 bitstream_buffer.id(),
424 size,
425 key_frame));
428 } // namespace content