1 // Copyright (c) 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "content/common/gpu/media/android_video_decode_accelerator.h"
8 #include "base/logging.h"
9 #include "base/message_loop/message_loop.h"
10 #include "base/metrics/histogram.h"
11 #include "content/common/gpu/gpu_channel.h"
12 #include "gpu/command_buffer/service/gles2_cmd_decoder.h"
13 #include "media/base/bitstream_buffer.h"
14 #include "media/base/limits.h"
15 #include "media/video/picture.h"
16 #include "ui/gl/android/scoped_java_surface.h"
17 #include "ui/gl/android/surface_texture.h"
18 #include "ui/gl/gl_bindings.h"
22 // Helper macros for dealing with failure. If |result| evaluates false, emit
23 // |log| to ERROR, register |error| with the decoder, and return.
24 #define RETURN_ON_FAILURE(result, log, error) \
28 base::MessageLoop::current()->PostTask( \
30 base::Bind(&AndroidVideoDecodeAccelerator::NotifyError, \
31 weak_this_factory_.GetWeakPtr(), \
38 // TODO(dwkang): We only need kMaxVideoFrames to pass media stack's prerolling
39 // phase, but 1 is added due to crbug.com/176036. This should be tuned when we
40 // have actual use case.
41 enum { kNumPictureBuffers
= media::limits::kMaxVideoFrames
+ 1 };
43 // Max number of bitstreams notified to the client with
44 // NotifyEndOfBitstreamBuffer() before getting output from the bitstream.
45 enum { kMaxBitstreamsNotifiedInAdvance
= 32 };
47 // Because MediaCodec is thread-hostile (must be poked on a single thread) and
48 // has no callback mechanism (b/11990118), we must drive it by polling for
49 // complete frames (and available input buffers, when the codec is fully
50 // saturated). This function defines the polling delay. The value used is an
51 // arbitrary choice that trades off CPU utilization (spinning) against latency.
52 // Mirrors android_video_encode_accelerator.cc:EncodePollDelay().
53 static inline const base::TimeDelta
DecodePollDelay() {
54 // An alternative to this polling scheme could be to dedicate a new thread
55 // (instead of using the ChildThread) to run the MediaCodec, and make that
56 // thread use the timeout-based flavor of MediaCodec's dequeue methods when it
57 // believes the codec should complete "soon" (e.g. waiting for an input
58 // buffer, or waiting for a picture when it knows enough complete input
59 // pictures have been fed to saturate any internal buffering). This is
60 // speculative and it's unclear that this would be a win (nor that there's a
61 // reasonably device-agnostic way to fill in the "believes" above).
62 return base::TimeDelta::FromMilliseconds(10);
65 static inline const base::TimeDelta
NoWaitTimeOut() {
66 return base::TimeDelta::FromMicroseconds(0);
69 AndroidVideoDecodeAccelerator::AndroidVideoDecodeAccelerator(
70 const base::WeakPtr
<gpu::gles2::GLES2Decoder
> decoder
,
71 const base::Callback
<bool(void)>& make_context_current
)
73 make_context_current_(make_context_current
),
74 codec_(media::kCodecH264
),
76 surface_texture_id_(0),
77 picturebuffers_requested_(false),
79 weak_this_factory_(this) {}
81 AndroidVideoDecodeAccelerator::~AndroidVideoDecodeAccelerator() {
82 DCHECK(thread_checker_
.CalledOnValidThread());
85 bool AndroidVideoDecodeAccelerator::Initialize(media::VideoCodecProfile profile
,
87 DCHECK(!media_codec_
);
88 DCHECK(thread_checker_
.CalledOnValidThread());
92 if (profile
== media::VP8PROFILE_ANY
) {
93 codec_
= media::kCodecVP8
;
95 // TODO(dwkang): enable H264 once b/8125974 is fixed.
96 LOG(ERROR
) << "Unsupported profile: " << profile
;
100 // Only consider using MediaCodec if it's likely backed by hardware.
101 if (media::VideoCodecBridge::IsKnownUnaccelerated(
102 codec_
, media::MEDIA_CODEC_DECODER
)) {
106 if (!make_context_current_
.Run()) {
107 LOG(ERROR
) << "Failed to make this decoder's GL context current.";
112 LOG(ERROR
) << "Failed to get gles2 decoder instance.";
115 glGenTextures(1, &surface_texture_id_
);
116 glActiveTexture(GL_TEXTURE0
);
117 glBindTexture(GL_TEXTURE_EXTERNAL_OES
, surface_texture_id_
);
119 glTexParameteri(GL_TEXTURE_EXTERNAL_OES
, GL_TEXTURE_MAG_FILTER
, GL_NEAREST
);
120 glTexParameteri(GL_TEXTURE_EXTERNAL_OES
, GL_TEXTURE_MIN_FILTER
, GL_NEAREST
);
121 glTexParameteri(GL_TEXTURE_EXTERNAL_OES
,
122 GL_TEXTURE_WRAP_S
, GL_CLAMP_TO_EDGE
);
123 glTexParameteri(GL_TEXTURE_EXTERNAL_OES
,
124 GL_TEXTURE_WRAP_T
, GL_CLAMP_TO_EDGE
);
125 gl_decoder_
->RestoreTextureUnitBindings(0);
126 gl_decoder_
->RestoreActiveTexture();
128 surface_texture_
= gfx::SurfaceTexture::Create(surface_texture_id_
);
130 if (!ConfigureMediaCodec()) {
131 LOG(ERROR
) << "Failed to create MediaCodec instance.";
138 void AndroidVideoDecodeAccelerator::DoIOTask() {
139 DCHECK(thread_checker_
.CalledOnValidThread());
140 if (state_
== ERROR
) {
148 void AndroidVideoDecodeAccelerator::QueueInput() {
149 DCHECK(thread_checker_
.CalledOnValidThread());
150 if (bitstreams_notified_in_advance_
.size() > kMaxBitstreamsNotifiedInAdvance
)
152 if (pending_bitstream_buffers_
.empty())
155 int input_buf_index
= 0;
156 media::MediaCodecStatus status
= media_codec_
->DequeueInputBuffer(
157 NoWaitTimeOut(), &input_buf_index
);
158 if (status
!= media::MEDIA_CODEC_OK
) {
159 DCHECK(status
== media::MEDIA_CODEC_DEQUEUE_INPUT_AGAIN_LATER
||
160 status
== media::MEDIA_CODEC_ERROR
);
164 base::Time queued_time
= pending_bitstream_buffers_
.front().second
;
165 UMA_HISTOGRAM_TIMES("Media.AVDA.InputQueueTime",
166 base::Time::Now() - queued_time
);
167 media::BitstreamBuffer bitstream_buffer
=
168 pending_bitstream_buffers_
.front().first
;
169 pending_bitstream_buffers_
.pop();
171 if (bitstream_buffer
.id() == -1) {
172 media_codec_
->QueueEOS(input_buf_index
);
176 // Abuse the presentation time argument to propagate the bitstream
177 // buffer ID to the output, so we can report it back to the client in
179 base::TimeDelta timestamp
=
180 base::TimeDelta::FromMicroseconds(bitstream_buffer
.id());
182 scoped_ptr
<base::SharedMemory
> shm(
183 new base::SharedMemory(bitstream_buffer
.handle(), true));
185 RETURN_ON_FAILURE(shm
->Map(bitstream_buffer
.size()),
186 "Failed to SharedMemory::Map()",
190 media_codec_
->QueueInputBuffer(input_buf_index
,
191 static_cast<const uint8
*>(shm
->memory()),
192 bitstream_buffer
.size(),
194 RETURN_ON_FAILURE(status
== media::MEDIA_CODEC_OK
,
195 "Failed to QueueInputBuffer: " << status
,
198 // We should call NotifyEndOfBitstreamBuffer(), when no more decoded output
199 // will be returned from the bitstream buffer. However, MediaCodec API is
200 // not enough to guarantee it.
201 // So, here, we calls NotifyEndOfBitstreamBuffer() in advance in order to
202 // keep getting more bitstreams from the client, and throttle them by using
203 // |bitstreams_notified_in_advance_|.
204 // TODO(dwkang): check if there is a way to remove this workaround.
205 base::MessageLoop::current()->PostTask(
207 base::Bind(&AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer
,
208 weak_this_factory_
.GetWeakPtr(),
209 bitstream_buffer
.id()));
210 bitstreams_notified_in_advance_
.push_back(bitstream_buffer
.id());
213 void AndroidVideoDecodeAccelerator::DequeueOutput() {
214 DCHECK(thread_checker_
.CalledOnValidThread());
215 if (picturebuffers_requested_
&& output_picture_buffers_
.empty())
218 if (!output_picture_buffers_
.empty() && free_picture_ids_
.empty()) {
219 // Don't have any picture buffer to send. Need to wait more.
224 base::TimeDelta timestamp
;
230 media::MediaCodecStatus status
= media_codec_
->DequeueOutputBuffer(
231 NoWaitTimeOut(), &buf_index
, &offset
, &size
, ×tamp
, &eos
, NULL
);
233 case media::MEDIA_CODEC_DEQUEUE_OUTPUT_AGAIN_LATER
:
234 case media::MEDIA_CODEC_ERROR
:
237 case media::MEDIA_CODEC_OUTPUT_FORMAT_CHANGED
: {
239 media_codec_
->GetOutputFormat(&width
, &height
);
241 if (!picturebuffers_requested_
) {
242 picturebuffers_requested_
= true;
243 size_
= gfx::Size(width
, height
);
244 base::MessageLoop::current()->PostTask(
246 base::Bind(&AndroidVideoDecodeAccelerator::RequestPictureBuffers
,
247 weak_this_factory_
.GetWeakPtr()));
249 // Dynamic resolution change support is not specified by the Android
250 // platform at and before JB-MR1, so it's not possible to smoothly
251 // continue playback at this point. Instead, error out immediately,
252 // expecting clients to Reset() as appropriate to avoid this.
254 RETURN_ON_FAILURE(size_
== gfx::Size(width
, height
),
255 "Dynamic resolution change is not supported.",
261 case media::MEDIA_CODEC_OUTPUT_BUFFERS_CHANGED
:
262 RETURN_ON_FAILURE(media_codec_
->GetOutputBuffers(),
263 "Cannot get output buffer from MediaCodec.",
267 case media::MEDIA_CODEC_OK
:
268 DCHECK_GE(buf_index
, 0);
275 } while (buf_index
< 0);
277 // This ignores the emitted ByteBuffer and instead relies on rendering to the
278 // codec's SurfaceTexture and then copying from that texture to the client's
279 // PictureBuffer's texture. This means that each picture's data is written
280 // three times: once to the ByteBuffer, once to the SurfaceTexture, and once
281 // to the client's texture. It would be nicer to either:
282 // 1) Render directly to the client's texture from MediaCodec (one write); or
283 // 2) Upload the ByteBuffer to the client's texture (two writes).
284 // Unfortunately neither is possible:
285 // 1) MediaCodec's use of SurfaceTexture is a singleton, and the texture
286 // written to can't change during the codec's lifetime. b/11990461
287 // 2) The ByteBuffer is likely to contain the pixels in a vendor-specific,
288 // opaque/non-standard format. It's not possible to negotiate the decoder
289 // to emit a specific colorspace, even using HW CSC. b/10706245
290 // So, we live with these two extra copies per picture :(
291 media_codec_
->ReleaseOutputBuffer(buf_index
, true);
294 base::MessageLoop::current()->PostTask(
296 base::Bind(&AndroidVideoDecodeAccelerator::NotifyFlushDone
,
297 weak_this_factory_
.GetWeakPtr()));
299 int64 bitstream_buffer_id
= timestamp
.InMicroseconds();
300 SendCurrentSurfaceToClient(static_cast<int32
>(bitstream_buffer_id
));
302 // Removes ids former or equal than the id from decoder. Note that
303 // |bitstreams_notified_in_advance_| does not mean bitstream ids in decoder
304 // because of frame reordering issue. We just maintain this roughly and use
305 // for the throttling purpose.
306 std::list
<int32
>::iterator it
;
307 for (it
= bitstreams_notified_in_advance_
.begin();
308 it
!= bitstreams_notified_in_advance_
.end();
310 if (*it
== bitstream_buffer_id
) {
311 bitstreams_notified_in_advance_
.erase(
312 bitstreams_notified_in_advance_
.begin(), ++it
);
319 void AndroidVideoDecodeAccelerator::SendCurrentSurfaceToClient(
320 int32 bitstream_id
) {
321 DCHECK(thread_checker_
.CalledOnValidThread());
322 DCHECK_NE(bitstream_id
, -1);
323 DCHECK(!free_picture_ids_
.empty());
325 RETURN_ON_FAILURE(make_context_current_
.Run(),
326 "Failed to make this decoder's GL context current.",
329 int32 picture_buffer_id
= free_picture_ids_
.front();
330 free_picture_ids_
.pop();
332 float transfrom_matrix
[16];
333 surface_texture_
->UpdateTexImage();
334 surface_texture_
->GetTransformMatrix(transfrom_matrix
);
336 OutputBufferMap::const_iterator i
=
337 output_picture_buffers_
.find(picture_buffer_id
);
338 RETURN_ON_FAILURE(i
!= output_picture_buffers_
.end(),
339 "Can't find a PictureBuffer for " << picture_buffer_id
,
341 uint32 picture_buffer_texture_id
= i
->second
.texture_id();
343 RETURN_ON_FAILURE(gl_decoder_
.get(),
344 "Failed to get gles2 decoder instance.",
346 // Defer initializing the CopyTextureCHROMIUMResourceManager until it is
347 // needed because it takes 10s of milliseconds to initialize.
349 copier_
.reset(new gpu::CopyTextureCHROMIUMResourceManager());
350 copier_
->Initialize(gl_decoder_
.get());
353 // Here, we copy |surface_texture_id_| to the picture buffer instead of
354 // setting new texture to |surface_texture_| by calling attachToGLContext()
356 // 1. Once we call detachFrameGLContext(), it deletes the texture previous
358 // 2. SurfaceTexture requires us to apply a transform matrix when we show
360 // TODO(hkuang): get the StreamTexture transform matrix in GPU process
361 // instead of using default matrix crbug.com/226218.
362 const static GLfloat default_matrix
[16] = {1.0f
, 0.0f
, 0.0f
, 0.0f
,
363 0.0f
, 1.0f
, 0.0f
, 0.0f
,
364 0.0f
, 0.0f
, 1.0f
, 0.0f
,
365 0.0f
, 0.0f
, 0.0f
, 1.0f
};
366 copier_
->DoCopyTextureWithTransform(gl_decoder_
.get(),
367 GL_TEXTURE_EXTERNAL_OES
,
369 picture_buffer_texture_id
,
378 base::MessageLoop::current()->PostTask(
381 &AndroidVideoDecodeAccelerator::NotifyPictureReady
,
382 weak_this_factory_
.GetWeakPtr(),
383 media::Picture(picture_buffer_id
, bitstream_id
, gfx::Rect(size_
))));
386 void AndroidVideoDecodeAccelerator::Decode(
387 const media::BitstreamBuffer
& bitstream_buffer
) {
388 DCHECK(thread_checker_
.CalledOnValidThread());
389 if (bitstream_buffer
.id() != -1 && bitstream_buffer
.size() == 0) {
390 base::MessageLoop::current()->PostTask(
392 base::Bind(&AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer
,
393 weak_this_factory_
.GetWeakPtr(),
394 bitstream_buffer
.id()));
398 pending_bitstream_buffers_
.push(
399 std::make_pair(bitstream_buffer
, base::Time::Now()));
404 void AndroidVideoDecodeAccelerator::AssignPictureBuffers(
405 const std::vector
<media::PictureBuffer
>& buffers
) {
406 DCHECK(thread_checker_
.CalledOnValidThread());
407 DCHECK(output_picture_buffers_
.empty());
408 DCHECK(free_picture_ids_
.empty());
410 for (size_t i
= 0; i
< buffers
.size(); ++i
) {
411 RETURN_ON_FAILURE(buffers
[i
].size() == size_
,
412 "Invalid picture buffer size was passed.",
414 int32 id
= buffers
[i
].id();
415 output_picture_buffers_
.insert(std::make_pair(id
, buffers
[i
]));
416 free_picture_ids_
.push(id
);
417 // Since the client might be re-using |picture_buffer_id| values, forget
418 // about previously-dismissed IDs now. See ReusePictureBuffer() comment
419 // about "zombies" for why we maintain this set in the first place.
420 dismissed_picture_ids_
.erase(id
);
423 RETURN_ON_FAILURE(output_picture_buffers_
.size() == kNumPictureBuffers
,
424 "Invalid picture buffers were passed.",
430 void AndroidVideoDecodeAccelerator::ReusePictureBuffer(
431 int32 picture_buffer_id
) {
432 DCHECK(thread_checker_
.CalledOnValidThread());
434 // This ReusePictureBuffer() might have been in a pipe somewhere (queued in
435 // IPC, or in a PostTask either at the sender or receiver) when we sent a
436 // DismissPictureBuffer() for this |picture_buffer_id|. Account for such
437 // potential "zombie" IDs here.
438 if (dismissed_picture_ids_
.erase(picture_buffer_id
))
441 free_picture_ids_
.push(picture_buffer_id
);
446 void AndroidVideoDecodeAccelerator::Flush() {
447 DCHECK(thread_checker_
.CalledOnValidThread());
449 Decode(media::BitstreamBuffer(-1, base::SharedMemoryHandle(), 0));
452 bool AndroidVideoDecodeAccelerator::ConfigureMediaCodec() {
453 DCHECK(thread_checker_
.CalledOnValidThread());
454 DCHECK(surface_texture_
.get());
456 gfx::ScopedJavaSurface
surface(surface_texture_
.get());
458 // Pass a dummy 320x240 canvas size and let the codec signal the real size
459 // when it's known from the bitstream.
460 media_codec_
.reset(media::VideoCodecBridge::CreateDecoder(
461 codec_
, false, gfx::Size(320, 240), surface
.j_surface().obj(), NULL
));
465 io_timer_
.Start(FROM_HERE
,
468 &AndroidVideoDecodeAccelerator::DoIOTask
);
472 void AndroidVideoDecodeAccelerator::Reset() {
473 DCHECK(thread_checker_
.CalledOnValidThread());
475 while (!pending_bitstream_buffers_
.empty()) {
476 int32 bitstream_buffer_id
= pending_bitstream_buffers_
.front().first
.id();
477 pending_bitstream_buffers_
.pop();
479 if (bitstream_buffer_id
!= -1) {
480 base::MessageLoop::current()->PostTask(
482 base::Bind(&AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer
,
483 weak_this_factory_
.GetWeakPtr(),
484 bitstream_buffer_id
));
487 bitstreams_notified_in_advance_
.clear();
489 for (OutputBufferMap::iterator it
= output_picture_buffers_
.begin();
490 it
!= output_picture_buffers_
.end();
492 client_
->DismissPictureBuffer(it
->first
);
493 dismissed_picture_ids_
.insert(it
->first
);
495 output_picture_buffers_
.clear();
496 std::queue
<int32
> empty
;
497 std::swap(free_picture_ids_
, empty
);
498 CHECK(free_picture_ids_
.empty());
499 picturebuffers_requested_
= false;
501 // On some devices, and up to at least JB-MR1,
502 // - flush() can fail after EOS (b/8125974); and
503 // - mid-stream resolution change is unsupported (b/7093648).
504 // To cope with these facts, we always stop & restart the codec on Reset().
506 media_codec_
->Stop();
507 ConfigureMediaCodec();
510 base::MessageLoop::current()->PostTask(
512 base::Bind(&AndroidVideoDecodeAccelerator::NotifyResetDone
,
513 weak_this_factory_
.GetWeakPtr()));
516 void AndroidVideoDecodeAccelerator::Destroy() {
517 DCHECK(thread_checker_
.CalledOnValidThread());
519 weak_this_factory_
.InvalidateWeakPtrs();
522 media_codec_
->Stop();
524 if (surface_texture_id_
)
525 glDeleteTextures(1, &surface_texture_id_
);
531 bool AndroidVideoDecodeAccelerator::CanDecodeOnIOThread() {
535 void AndroidVideoDecodeAccelerator::RequestPictureBuffers() {
536 client_
->ProvidePictureBuffers(kNumPictureBuffers
, size_
, GL_TEXTURE_2D
);
539 void AndroidVideoDecodeAccelerator::NotifyPictureReady(
540 const media::Picture
& picture
) {
541 client_
->PictureReady(picture
);
544 void AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer(
545 int input_buffer_id
) {
546 client_
->NotifyEndOfBitstreamBuffer(input_buffer_id
);
549 void AndroidVideoDecodeAccelerator::NotifyFlushDone() {
550 client_
->NotifyFlushDone();
553 void AndroidVideoDecodeAccelerator::NotifyResetDone() {
554 client_
->NotifyResetDone();
557 void AndroidVideoDecodeAccelerator::NotifyError(
558 media::VideoDecodeAccelerator::Error error
) {
559 client_
->NotifyError(error
);
562 } // namespace content