[Android WebView] Fix webview perf bot switchover to use org.chromium.webview_shell...
[chromium-blink-merge.git] / content / common / gpu / media / android_video_decode_accelerator.cc
blob06d59863d049cacd6c03b22a0ebbf0e908d9755e
1 // Copyright (c) 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "content/common/gpu/media/android_video_decode_accelerator.h"
7 #include "base/bind.h"
8 #include "base/logging.h"
9 #include "base/message_loop/message_loop.h"
10 #include "base/metrics/histogram.h"
11 #include "content/common/gpu/gpu_channel.h"
12 #include "gpu/command_buffer/service/gles2_cmd_decoder.h"
13 #include "media/base/bitstream_buffer.h"
14 #include "media/base/limits.h"
15 #include "media/video/picture.h"
16 #include "ui/gl/android/scoped_java_surface.h"
17 #include "ui/gl/android/surface_texture.h"
18 #include "ui/gl/gl_bindings.h"
20 namespace content {
22 // Helper macros for dealing with failure. If |result| evaluates false, emit
23 // |log| to ERROR, register |error| with the decoder, and return.
24 #define RETURN_ON_FAILURE(result, log, error) \
25 do { \
26 if (!(result)) { \
27 DLOG(ERROR) << log; \
28 base::MessageLoop::current()->PostTask( \
29 FROM_HERE, \
30 base::Bind(&AndroidVideoDecodeAccelerator::NotifyError, \
31 weak_this_factory_.GetWeakPtr(), \
32 error)); \
33 state_ = ERROR; \
34 return; \
35 } \
36 } while (0)
38 // TODO(dwkang): We only need kMaxVideoFrames to pass media stack's prerolling
39 // phase, but 1 is added due to crbug.com/176036. This should be tuned when we
40 // have actual use case.
41 enum { kNumPictureBuffers = media::limits::kMaxVideoFrames + 1 };
43 // Max number of bitstreams notified to the client with
44 // NotifyEndOfBitstreamBuffer() before getting output from the bitstream.
45 enum { kMaxBitstreamsNotifiedInAdvance = 32 };
47 // Because MediaCodec is thread-hostile (must be poked on a single thread) and
48 // has no callback mechanism (b/11990118), we must drive it by polling for
49 // complete frames (and available input buffers, when the codec is fully
50 // saturated). This function defines the polling delay. The value used is an
51 // arbitrary choice that trades off CPU utilization (spinning) against latency.
52 // Mirrors android_video_encode_accelerator.cc:EncodePollDelay().
53 static inline const base::TimeDelta DecodePollDelay() {
54 // An alternative to this polling scheme could be to dedicate a new thread
55 // (instead of using the ChildThread) to run the MediaCodec, and make that
56 // thread use the timeout-based flavor of MediaCodec's dequeue methods when it
57 // believes the codec should complete "soon" (e.g. waiting for an input
58 // buffer, or waiting for a picture when it knows enough complete input
59 // pictures have been fed to saturate any internal buffering). This is
60 // speculative and it's unclear that this would be a win (nor that there's a
61 // reasonably device-agnostic way to fill in the "believes" above).
62 return base::TimeDelta::FromMilliseconds(10);
65 static inline const base::TimeDelta NoWaitTimeOut() {
66 return base::TimeDelta::FromMicroseconds(0);
69 AndroidVideoDecodeAccelerator::AndroidVideoDecodeAccelerator(
70 const base::WeakPtr<gpu::gles2::GLES2Decoder> decoder,
71 const base::Callback<bool(void)>& make_context_current)
72 : client_(NULL),
73 make_context_current_(make_context_current),
74 codec_(media::kCodecH264),
75 state_(NO_ERROR),
76 surface_texture_id_(0),
77 picturebuffers_requested_(false),
78 gl_decoder_(decoder),
79 weak_this_factory_(this) {}
81 AndroidVideoDecodeAccelerator::~AndroidVideoDecodeAccelerator() {
82 DCHECK(thread_checker_.CalledOnValidThread());
85 bool AndroidVideoDecodeAccelerator::Initialize(media::VideoCodecProfile profile,
86 Client* client) {
87 DCHECK(!media_codec_);
88 DCHECK(thread_checker_.CalledOnValidThread());
90 client_ = client;
92 if (profile == media::VP8PROFILE_ANY) {
93 codec_ = media::kCodecVP8;
94 } else {
95 // TODO(dwkang): enable H264 once b/8125974 is fixed.
96 LOG(ERROR) << "Unsupported profile: " << profile;
97 return false;
100 // Only consider using MediaCodec if it's likely backed by hardware.
101 if (media::VideoCodecBridge::IsKnownUnaccelerated(
102 codec_, media::MEDIA_CODEC_DECODER)) {
103 return false;
106 if (!make_context_current_.Run()) {
107 LOG(ERROR) << "Failed to make this decoder's GL context current.";
108 return false;
111 if (!gl_decoder_) {
112 LOG(ERROR) << "Failed to get gles2 decoder instance.";
113 return false;
115 glGenTextures(1, &surface_texture_id_);
116 glActiveTexture(GL_TEXTURE0);
117 glBindTexture(GL_TEXTURE_EXTERNAL_OES, surface_texture_id_);
119 glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
120 glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
121 glTexParameteri(GL_TEXTURE_EXTERNAL_OES,
122 GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
123 glTexParameteri(GL_TEXTURE_EXTERNAL_OES,
124 GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
125 gl_decoder_->RestoreTextureUnitBindings(0);
126 gl_decoder_->RestoreActiveTexture();
128 surface_texture_ = gfx::SurfaceTexture::Create(surface_texture_id_);
130 if (!ConfigureMediaCodec()) {
131 LOG(ERROR) << "Failed to create MediaCodec instance.";
132 return false;
135 return true;
138 void AndroidVideoDecodeAccelerator::DoIOTask() {
139 DCHECK(thread_checker_.CalledOnValidThread());
140 if (state_ == ERROR) {
141 return;
144 QueueInput();
145 DequeueOutput();
148 void AndroidVideoDecodeAccelerator::QueueInput() {
149 DCHECK(thread_checker_.CalledOnValidThread());
150 if (bitstreams_notified_in_advance_.size() > kMaxBitstreamsNotifiedInAdvance)
151 return;
152 if (pending_bitstream_buffers_.empty())
153 return;
155 int input_buf_index = 0;
156 media::MediaCodecStatus status = media_codec_->DequeueInputBuffer(
157 NoWaitTimeOut(), &input_buf_index);
158 if (status != media::MEDIA_CODEC_OK) {
159 DCHECK(status == media::MEDIA_CODEC_DEQUEUE_INPUT_AGAIN_LATER ||
160 status == media::MEDIA_CODEC_ERROR);
161 return;
164 base::Time queued_time = pending_bitstream_buffers_.front().second;
165 UMA_HISTOGRAM_TIMES("Media.AVDA.InputQueueTime",
166 base::Time::Now() - queued_time);
167 media::BitstreamBuffer bitstream_buffer =
168 pending_bitstream_buffers_.front().first;
169 pending_bitstream_buffers_.pop();
171 if (bitstream_buffer.id() == -1) {
172 media_codec_->QueueEOS(input_buf_index);
173 return;
176 // Abuse the presentation time argument to propagate the bitstream
177 // buffer ID to the output, so we can report it back to the client in
178 // PictureReady().
179 base::TimeDelta timestamp =
180 base::TimeDelta::FromMicroseconds(bitstream_buffer.id());
182 scoped_ptr<base::SharedMemory> shm(
183 new base::SharedMemory(bitstream_buffer.handle(), true));
185 RETURN_ON_FAILURE(shm->Map(bitstream_buffer.size()),
186 "Failed to SharedMemory::Map()",
187 UNREADABLE_INPUT);
189 status =
190 media_codec_->QueueInputBuffer(input_buf_index,
191 static_cast<const uint8*>(shm->memory()),
192 bitstream_buffer.size(),
193 timestamp);
194 RETURN_ON_FAILURE(status == media::MEDIA_CODEC_OK,
195 "Failed to QueueInputBuffer: " << status,
196 PLATFORM_FAILURE);
198 // We should call NotifyEndOfBitstreamBuffer(), when no more decoded output
199 // will be returned from the bitstream buffer. However, MediaCodec API is
200 // not enough to guarantee it.
201 // So, here, we calls NotifyEndOfBitstreamBuffer() in advance in order to
202 // keep getting more bitstreams from the client, and throttle them by using
203 // |bitstreams_notified_in_advance_|.
204 // TODO(dwkang): check if there is a way to remove this workaround.
205 base::MessageLoop::current()->PostTask(
206 FROM_HERE,
207 base::Bind(&AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer,
208 weak_this_factory_.GetWeakPtr(),
209 bitstream_buffer.id()));
210 bitstreams_notified_in_advance_.push_back(bitstream_buffer.id());
213 void AndroidVideoDecodeAccelerator::DequeueOutput() {
214 DCHECK(thread_checker_.CalledOnValidThread());
215 if (picturebuffers_requested_ && output_picture_buffers_.empty())
216 return;
218 if (!output_picture_buffers_.empty() && free_picture_ids_.empty()) {
219 // Don't have any picture buffer to send. Need to wait more.
220 return;
223 bool eos = false;
224 base::TimeDelta timestamp;
225 int32 buf_index = 0;
226 do {
227 size_t offset = 0;
228 size_t size = 0;
230 media::MediaCodecStatus status = media_codec_->DequeueOutputBuffer(
231 NoWaitTimeOut(), &buf_index, &offset, &size, &timestamp, &eos, NULL);
232 switch (status) {
233 case media::MEDIA_CODEC_DEQUEUE_OUTPUT_AGAIN_LATER:
234 case media::MEDIA_CODEC_ERROR:
235 return;
237 case media::MEDIA_CODEC_OUTPUT_FORMAT_CHANGED: {
238 int32 width, height;
239 media_codec_->GetOutputFormat(&width, &height);
241 if (!picturebuffers_requested_) {
242 picturebuffers_requested_ = true;
243 size_ = gfx::Size(width, height);
244 base::MessageLoop::current()->PostTask(
245 FROM_HERE,
246 base::Bind(&AndroidVideoDecodeAccelerator::RequestPictureBuffers,
247 weak_this_factory_.GetWeakPtr()));
248 } else {
249 // Dynamic resolution change support is not specified by the Android
250 // platform at and before JB-MR1, so it's not possible to smoothly
251 // continue playback at this point. Instead, error out immediately,
252 // expecting clients to Reset() as appropriate to avoid this.
253 // b/7093648
254 RETURN_ON_FAILURE(size_ == gfx::Size(width, height),
255 "Dynamic resolution change is not supported.",
256 PLATFORM_FAILURE);
258 return;
261 case media::MEDIA_CODEC_OUTPUT_BUFFERS_CHANGED:
262 break;
264 case media::MEDIA_CODEC_OK:
265 DCHECK_GE(buf_index, 0);
266 break;
268 default:
269 NOTREACHED();
270 break;
272 } while (buf_index < 0);
274 // This ignores the emitted ByteBuffer and instead relies on rendering to the
275 // codec's SurfaceTexture and then copying from that texture to the client's
276 // PictureBuffer's texture. This means that each picture's data is written
277 // three times: once to the ByteBuffer, once to the SurfaceTexture, and once
278 // to the client's texture. It would be nicer to either:
279 // 1) Render directly to the client's texture from MediaCodec (one write); or
280 // 2) Upload the ByteBuffer to the client's texture (two writes).
281 // Unfortunately neither is possible:
282 // 1) MediaCodec's use of SurfaceTexture is a singleton, and the texture
283 // written to can't change during the codec's lifetime. b/11990461
284 // 2) The ByteBuffer is likely to contain the pixels in a vendor-specific,
285 // opaque/non-standard format. It's not possible to negotiate the decoder
286 // to emit a specific colorspace, even using HW CSC. b/10706245
287 // So, we live with these two extra copies per picture :(
288 media_codec_->ReleaseOutputBuffer(buf_index, true);
290 if (eos) {
291 base::MessageLoop::current()->PostTask(
292 FROM_HERE,
293 base::Bind(&AndroidVideoDecodeAccelerator::NotifyFlushDone,
294 weak_this_factory_.GetWeakPtr()));
295 } else {
296 int64 bitstream_buffer_id = timestamp.InMicroseconds();
297 SendCurrentSurfaceToClient(static_cast<int32>(bitstream_buffer_id));
299 // Removes ids former or equal than the id from decoder. Note that
300 // |bitstreams_notified_in_advance_| does not mean bitstream ids in decoder
301 // because of frame reordering issue. We just maintain this roughly and use
302 // for the throttling purpose.
303 std::list<int32>::iterator it;
304 for (it = bitstreams_notified_in_advance_.begin();
305 it != bitstreams_notified_in_advance_.end();
306 ++it) {
307 if (*it == bitstream_buffer_id) {
308 bitstreams_notified_in_advance_.erase(
309 bitstreams_notified_in_advance_.begin(), ++it);
310 break;
316 void AndroidVideoDecodeAccelerator::SendCurrentSurfaceToClient(
317 int32 bitstream_id) {
318 DCHECK(thread_checker_.CalledOnValidThread());
319 DCHECK_NE(bitstream_id, -1);
320 DCHECK(!free_picture_ids_.empty());
322 RETURN_ON_FAILURE(make_context_current_.Run(),
323 "Failed to make this decoder's GL context current.",
324 PLATFORM_FAILURE);
326 int32 picture_buffer_id = free_picture_ids_.front();
327 free_picture_ids_.pop();
329 float transfrom_matrix[16];
330 surface_texture_->UpdateTexImage();
331 surface_texture_->GetTransformMatrix(transfrom_matrix);
333 OutputBufferMap::const_iterator i =
334 output_picture_buffers_.find(picture_buffer_id);
335 RETURN_ON_FAILURE(i != output_picture_buffers_.end(),
336 "Can't find a PictureBuffer for " << picture_buffer_id,
337 PLATFORM_FAILURE);
338 uint32 picture_buffer_texture_id = i->second.texture_id();
340 RETURN_ON_FAILURE(gl_decoder_.get(),
341 "Failed to get gles2 decoder instance.",
342 ILLEGAL_STATE);
343 // Defer initializing the CopyTextureCHROMIUMResourceManager until it is
344 // needed because it takes 10s of milliseconds to initialize.
345 if (!copier_) {
346 copier_.reset(new gpu::CopyTextureCHROMIUMResourceManager());
347 copier_->Initialize(gl_decoder_.get());
350 // Here, we copy |surface_texture_id_| to the picture buffer instead of
351 // setting new texture to |surface_texture_| by calling attachToGLContext()
352 // because:
353 // 1. Once we call detachFrameGLContext(), it deletes the texture previous
354 // attached.
355 // 2. SurfaceTexture requires us to apply a transform matrix when we show
356 // the texture.
357 // TODO(hkuang): get the StreamTexture transform matrix in GPU process
358 // instead of using default matrix crbug.com/226218.
359 const static GLfloat default_matrix[16] = {1.0f, 0.0f, 0.0f, 0.0f,
360 0.0f, 1.0f, 0.0f, 0.0f,
361 0.0f, 0.0f, 1.0f, 0.0f,
362 0.0f, 0.0f, 0.0f, 1.0f};
363 copier_->DoCopyTextureWithTransform(gl_decoder_.get(),
364 GL_TEXTURE_EXTERNAL_OES,
365 surface_texture_id_,
366 picture_buffer_texture_id,
367 size_.width(),
368 size_.height(),
369 false,
370 false,
371 false,
372 default_matrix);
374 base::MessageLoop::current()->PostTask(
375 FROM_HERE, base::Bind(&AndroidVideoDecodeAccelerator::NotifyPictureReady,
376 weak_this_factory_.GetWeakPtr(),
377 media::Picture(picture_buffer_id, bitstream_id,
378 gfx::Rect(size_), false)));
381 void AndroidVideoDecodeAccelerator::Decode(
382 const media::BitstreamBuffer& bitstream_buffer) {
383 DCHECK(thread_checker_.CalledOnValidThread());
384 if (bitstream_buffer.id() != -1 && bitstream_buffer.size() == 0) {
385 base::MessageLoop::current()->PostTask(
386 FROM_HERE,
387 base::Bind(&AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer,
388 weak_this_factory_.GetWeakPtr(),
389 bitstream_buffer.id()));
390 return;
393 pending_bitstream_buffers_.push(
394 std::make_pair(bitstream_buffer, base::Time::Now()));
396 DoIOTask();
399 void AndroidVideoDecodeAccelerator::AssignPictureBuffers(
400 const std::vector<media::PictureBuffer>& buffers) {
401 DCHECK(thread_checker_.CalledOnValidThread());
402 DCHECK(output_picture_buffers_.empty());
403 DCHECK(free_picture_ids_.empty());
405 for (size_t i = 0; i < buffers.size(); ++i) {
406 RETURN_ON_FAILURE(buffers[i].size() == size_,
407 "Invalid picture buffer size was passed.",
408 INVALID_ARGUMENT);
409 int32 id = buffers[i].id();
410 output_picture_buffers_.insert(std::make_pair(id, buffers[i]));
411 free_picture_ids_.push(id);
412 // Since the client might be re-using |picture_buffer_id| values, forget
413 // about previously-dismissed IDs now. See ReusePictureBuffer() comment
414 // about "zombies" for why we maintain this set in the first place.
415 dismissed_picture_ids_.erase(id);
418 RETURN_ON_FAILURE(output_picture_buffers_.size() == kNumPictureBuffers,
419 "Invalid picture buffers were passed.",
420 INVALID_ARGUMENT);
422 DoIOTask();
425 void AndroidVideoDecodeAccelerator::ReusePictureBuffer(
426 int32 picture_buffer_id) {
427 DCHECK(thread_checker_.CalledOnValidThread());
429 // This ReusePictureBuffer() might have been in a pipe somewhere (queued in
430 // IPC, or in a PostTask either at the sender or receiver) when we sent a
431 // DismissPictureBuffer() for this |picture_buffer_id|. Account for such
432 // potential "zombie" IDs here.
433 if (dismissed_picture_ids_.erase(picture_buffer_id))
434 return;
436 free_picture_ids_.push(picture_buffer_id);
438 DoIOTask();
441 void AndroidVideoDecodeAccelerator::Flush() {
442 DCHECK(thread_checker_.CalledOnValidThread());
444 Decode(media::BitstreamBuffer(-1, base::SharedMemoryHandle(), 0));
447 bool AndroidVideoDecodeAccelerator::ConfigureMediaCodec() {
448 DCHECK(thread_checker_.CalledOnValidThread());
449 DCHECK(surface_texture_.get());
451 gfx::ScopedJavaSurface surface(surface_texture_.get());
453 // Pass a dummy 320x240 canvas size and let the codec signal the real size
454 // when it's known from the bitstream.
455 media_codec_.reset(media::VideoCodecBridge::CreateDecoder(
456 codec_, false, gfx::Size(320, 240), surface.j_surface().obj(), NULL));
457 if (!media_codec_)
458 return false;
460 io_timer_.Start(FROM_HERE,
461 DecodePollDelay(),
462 this,
463 &AndroidVideoDecodeAccelerator::DoIOTask);
464 return true;
467 void AndroidVideoDecodeAccelerator::Reset() {
468 DCHECK(thread_checker_.CalledOnValidThread());
470 while (!pending_bitstream_buffers_.empty()) {
471 int32 bitstream_buffer_id = pending_bitstream_buffers_.front().first.id();
472 pending_bitstream_buffers_.pop();
474 if (bitstream_buffer_id != -1) {
475 base::MessageLoop::current()->PostTask(
476 FROM_HERE,
477 base::Bind(&AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer,
478 weak_this_factory_.GetWeakPtr(),
479 bitstream_buffer_id));
482 bitstreams_notified_in_advance_.clear();
484 for (OutputBufferMap::iterator it = output_picture_buffers_.begin();
485 it != output_picture_buffers_.end();
486 ++it) {
487 client_->DismissPictureBuffer(it->first);
488 dismissed_picture_ids_.insert(it->first);
490 output_picture_buffers_.clear();
491 std::queue<int32> empty;
492 std::swap(free_picture_ids_, empty);
493 CHECK(free_picture_ids_.empty());
494 picturebuffers_requested_ = false;
496 // On some devices, and up to at least JB-MR1,
497 // - flush() can fail after EOS (b/8125974); and
498 // - mid-stream resolution change is unsupported (b/7093648).
499 // To cope with these facts, we always stop & restart the codec on Reset().
500 io_timer_.Stop();
501 media_codec_->Stop();
502 ConfigureMediaCodec();
503 state_ = NO_ERROR;
505 base::MessageLoop::current()->PostTask(
506 FROM_HERE,
507 base::Bind(&AndroidVideoDecodeAccelerator::NotifyResetDone,
508 weak_this_factory_.GetWeakPtr()));
511 void AndroidVideoDecodeAccelerator::Destroy() {
512 DCHECK(thread_checker_.CalledOnValidThread());
514 weak_this_factory_.InvalidateWeakPtrs();
515 if (media_codec_) {
516 io_timer_.Stop();
517 media_codec_->Stop();
519 if (surface_texture_id_)
520 glDeleteTextures(1, &surface_texture_id_);
521 if (copier_)
522 copier_->Destroy();
523 delete this;
526 bool AndroidVideoDecodeAccelerator::CanDecodeOnIOThread() {
527 return false;
530 void AndroidVideoDecodeAccelerator::RequestPictureBuffers() {
531 client_->ProvidePictureBuffers(kNumPictureBuffers, size_, GL_TEXTURE_2D);
534 void AndroidVideoDecodeAccelerator::NotifyPictureReady(
535 const media::Picture& picture) {
536 client_->PictureReady(picture);
539 void AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer(
540 int input_buffer_id) {
541 client_->NotifyEndOfBitstreamBuffer(input_buffer_id);
544 void AndroidVideoDecodeAccelerator::NotifyFlushDone() {
545 client_->NotifyFlushDone();
548 void AndroidVideoDecodeAccelerator::NotifyResetDone() {
549 client_->NotifyResetDone();
552 void AndroidVideoDecodeAccelerator::NotifyError(
553 media::VideoDecodeAccelerator::Error error) {
554 client_->NotifyError(error);
557 // static
558 media::VideoDecodeAccelerator::SupportedProfiles
559 AndroidVideoDecodeAccelerator::GetSupportedProfiles() {
560 SupportedProfiles profiles;
561 if (media::VideoCodecBridge::IsKnownUnaccelerated(
562 media::kCodecVP8, media::MEDIA_CODEC_DECODER)) {
563 return profiles;
565 SupportedProfile profile;
566 profile.profile = media::VP8PROFILE_ANY;
567 profile.min_resolution.SetSize(16, 16);
568 profile.max_resolution.SetSize(1920, 1088);
569 profiles.push_back(profile);
570 return profiles;
573 } // namespace content