Merge Chromium + Blink git repositories
[chromium-blink-merge.git] / content / common / gpu / media / android_video_decode_accelerator.cc
blobf03328214275b8777cd134881ee0ecea2d3b3711
1 // Copyright (c) 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "content/common/gpu/media/android_video_decode_accelerator.h"
7 #include "base/bind.h"
8 #include "base/logging.h"
9 #include "base/message_loop/message_loop.h"
10 #include "base/metrics/histogram.h"
11 #include "base/trace_event/trace_event.h"
12 #include "content/common/gpu/gpu_channel.h"
13 #include "content/common/gpu/media/avda_return_on_failure.h"
14 #include "gpu/command_buffer/service/gles2_cmd_decoder.h"
15 #include "media/base/bitstream_buffer.h"
16 #include "media/base/limits.h"
17 #include "media/base/timestamp_constants.h"
18 #include "media/base/video_decoder_config.h"
19 #include "media/video/picture.h"
20 #include "ui/gl/android/scoped_java_surface.h"
21 #include "ui/gl/android/surface_texture.h"
22 #include "ui/gl/gl_bindings.h"
24 namespace content {
26 // Max number of bitstreams notified to the client with
27 // NotifyEndOfBitstreamBuffer() before getting output from the bitstream.
28 enum { kMaxBitstreamsNotifiedInAdvance = 32 };
30 #if defined(ENABLE_MEDIA_PIPELINE_ON_ANDROID)
31 // MediaCodec is only guaranteed to support baseline, but some devices may
32 // support others. Advertise support for all H264 profiles and let the
33 // MediaCodec fail when decoding if it's not actually supported. It's assumed
34 // that consumers won't have software fallback for H264 on Android anyway.
35 static const media::VideoCodecProfile kSupportedH264Profiles[] = {
36 media::H264PROFILE_BASELINE,
37 media::H264PROFILE_MAIN,
38 media::H264PROFILE_EXTENDED,
39 media::H264PROFILE_HIGH,
40 media::H264PROFILE_HIGH10PROFILE,
41 media::H264PROFILE_HIGH422PROFILE,
42 media::H264PROFILE_HIGH444PREDICTIVEPROFILE,
43 media::H264PROFILE_SCALABLEBASELINE,
44 media::H264PROFILE_SCALABLEHIGH,
45 media::H264PROFILE_STEREOHIGH,
46 media::H264PROFILE_MULTIVIEWHIGH
48 #endif
50 // Because MediaCodec is thread-hostile (must be poked on a single thread) and
51 // has no callback mechanism (b/11990118), we must drive it by polling for
52 // complete frames (and available input buffers, when the codec is fully
53 // saturated). This function defines the polling delay. The value used is an
54 // arbitrary choice that trades off CPU utilization (spinning) against latency.
55 // Mirrors android_video_encode_accelerator.cc:EncodePollDelay().
56 static inline const base::TimeDelta DecodePollDelay() {
57 // An alternative to this polling scheme could be to dedicate a new thread
58 // (instead of using the ChildThread) to run the MediaCodec, and make that
59 // thread use the timeout-based flavor of MediaCodec's dequeue methods when it
60 // believes the codec should complete "soon" (e.g. waiting for an input
61 // buffer, or waiting for a picture when it knows enough complete input
62 // pictures have been fed to saturate any internal buffering). This is
63 // speculative and it's unclear that this would be a win (nor that there's a
64 // reasonably device-agnostic way to fill in the "believes" above).
65 return base::TimeDelta::FromMilliseconds(10);
68 static inline const base::TimeDelta NoWaitTimeOut() {
69 return base::TimeDelta::FromMicroseconds(0);
72 AndroidVideoDecodeAccelerator::AndroidVideoDecodeAccelerator(
73 const base::WeakPtr<gpu::gles2::GLES2Decoder> decoder,
74 const base::Callback<bool(void)>& make_context_current,
75 scoped_ptr<BackingStrategy> strategy)
76 : client_(NULL),
77 make_context_current_(make_context_current),
78 codec_(media::kCodecH264),
79 state_(NO_ERROR),
80 surface_texture_id_(0),
81 picturebuffers_requested_(false),
82 gl_decoder_(decoder),
83 strategy_(strategy.Pass()),
84 weak_this_factory_(this) {}
86 AndroidVideoDecodeAccelerator::~AndroidVideoDecodeAccelerator() {
87 DCHECK(thread_checker_.CalledOnValidThread());
90 bool AndroidVideoDecodeAccelerator::Initialize(media::VideoCodecProfile profile,
91 Client* client) {
92 DCHECK(!media_codec_);
93 DCHECK(thread_checker_.CalledOnValidThread());
94 TRACE_EVENT0("media", "AVDA::Initialize");
96 client_ = client;
97 codec_ = VideoCodecProfileToVideoCodec(profile);
99 strategy_->SetStateProvider(this);
101 bool profile_supported = codec_ == media::kCodecVP8;
102 #if defined(ENABLE_MEDIA_PIPELINE_ON_ANDROID)
103 profile_supported |=
104 (codec_ == media::kCodecVP9 || codec_ == media::kCodecH264);
105 #endif
107 if (!profile_supported) {
108 LOG(ERROR) << "Unsupported profile: " << profile;
109 return false;
112 // Only use MediaCodec for VP8/9 if it's likely backed by hardware.
113 if ((codec_ == media::kCodecVP8 || codec_ == media::kCodecVP9) &&
114 media::VideoCodecBridge::IsKnownUnaccelerated(
115 codec_, media::MEDIA_CODEC_DECODER)) {
116 DVLOG(1) << "Initialization failed: "
117 << (codec_ == media::kCodecVP8 ? "vp8" : "vp9")
118 << " is not hardware accelerated";
119 return false;
122 if (!make_context_current_.Run()) {
123 LOG(ERROR) << "Failed to make this decoder's GL context current.";
124 return false;
127 if (!gl_decoder_) {
128 LOG(ERROR) << "Failed to get gles2 decoder instance.";
129 return false;
131 glGenTextures(1, &surface_texture_id_);
132 glActiveTexture(GL_TEXTURE0);
133 glBindTexture(GL_TEXTURE_EXTERNAL_OES, surface_texture_id_);
135 glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
136 glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
137 glTexParameteri(GL_TEXTURE_EXTERNAL_OES,
138 GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
139 glTexParameteri(GL_TEXTURE_EXTERNAL_OES,
140 GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
141 gl_decoder_->RestoreTextureUnitBindings(0);
142 gl_decoder_->RestoreActiveTexture();
144 surface_texture_ = gfx::SurfaceTexture::Create(surface_texture_id_);
146 if (!ConfigureMediaCodec()) {
147 LOG(ERROR) << "Failed to create MediaCodec instance.";
148 return false;
151 return true;
154 void AndroidVideoDecodeAccelerator::DoIOTask() {
155 DCHECK(thread_checker_.CalledOnValidThread());
156 TRACE_EVENT0("media", "AVDA::DoIOTask");
157 if (state_ == ERROR) {
158 return;
161 QueueInput();
162 DequeueOutput();
165 void AndroidVideoDecodeAccelerator::QueueInput() {
166 DCHECK(thread_checker_.CalledOnValidThread());
167 TRACE_EVENT0("media", "AVDA::QueueInput");
168 if (bitstreams_notified_in_advance_.size() > kMaxBitstreamsNotifiedInAdvance)
169 return;
170 if (pending_bitstream_buffers_.empty())
171 return;
173 int input_buf_index = 0;
174 media::MediaCodecStatus status = media_codec_->DequeueInputBuffer(
175 NoWaitTimeOut(), &input_buf_index);
176 if (status != media::MEDIA_CODEC_OK) {
177 DCHECK(status == media::MEDIA_CODEC_DEQUEUE_INPUT_AGAIN_LATER ||
178 status == media::MEDIA_CODEC_ERROR);
179 return;
182 base::Time queued_time = pending_bitstream_buffers_.front().second;
183 UMA_HISTOGRAM_TIMES("Media.AVDA.InputQueueTime",
184 base::Time::Now() - queued_time);
185 media::BitstreamBuffer bitstream_buffer =
186 pending_bitstream_buffers_.front().first;
187 pending_bitstream_buffers_.pop();
188 TRACE_COUNTER1("media", "AVDA::PendingBitstreamBufferCount",
189 pending_bitstream_buffers_.size());
191 if (bitstream_buffer.id() == -1) {
192 media_codec_->QueueEOS(input_buf_index);
193 return;
196 scoped_ptr<base::SharedMemory> shm(
197 new base::SharedMemory(bitstream_buffer.handle(), true));
198 RETURN_ON_FAILURE(this, shm->Map(bitstream_buffer.size()),
199 "Failed to SharedMemory::Map()", UNREADABLE_INPUT);
201 const base::TimeDelta presentation_timestamp =
202 bitstream_buffer.presentation_timestamp();
203 DCHECK(presentation_timestamp != media::kNoTimestamp())
204 << "Bitstream buffers must have valid presentation timestamps";
205 // There may already be a bitstream buffer with this timestamp, e.g., VP9 alt
206 // ref frames, but it's OK to overwrite it because we only expect a single
207 // output frame to have that timestamp. AVDA clients only use the bitstream
208 // buffer id in the returned Pictures to map a bitstream buffer back to a
209 // timestamp on their side, so either one of the bitstream buffer ids will
210 // result in them finding the right timestamp.
211 bitstream_buffers_in_decoder_[presentation_timestamp] = bitstream_buffer.id();
213 status = media_codec_->QueueInputBuffer(
214 input_buf_index, static_cast<const uint8*>(shm->memory()),
215 bitstream_buffer.size(), presentation_timestamp);
216 RETURN_ON_FAILURE(this, status == media::MEDIA_CODEC_OK,
217 "Failed to QueueInputBuffer: " << status, PLATFORM_FAILURE);
219 // We should call NotifyEndOfBitstreamBuffer(), when no more decoded output
220 // will be returned from the bitstream buffer. However, MediaCodec API is
221 // not enough to guarantee it.
222 // So, here, we calls NotifyEndOfBitstreamBuffer() in advance in order to
223 // keep getting more bitstreams from the client, and throttle them by using
224 // |bitstreams_notified_in_advance_|.
225 // TODO(dwkang): check if there is a way to remove this workaround.
226 base::MessageLoop::current()->PostTask(
227 FROM_HERE,
228 base::Bind(&AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer,
229 weak_this_factory_.GetWeakPtr(),
230 bitstream_buffer.id()));
231 bitstreams_notified_in_advance_.push_back(bitstream_buffer.id());
234 void AndroidVideoDecodeAccelerator::DequeueOutput() {
235 DCHECK(thread_checker_.CalledOnValidThread());
236 TRACE_EVENT0("media", "AVDA::DequeueOutput");
237 if (picturebuffers_requested_ && output_picture_buffers_.empty())
238 return;
240 if (!output_picture_buffers_.empty() && free_picture_ids_.empty()) {
241 // Don't have any picture buffer to send. Need to wait more.
242 return;
245 bool eos = false;
246 base::TimeDelta presentation_timestamp;
247 int32 buf_index = 0;
248 do {
249 size_t offset = 0;
250 size_t size = 0;
252 TRACE_EVENT_BEGIN0("media", "AVDA::DequeueOutputBuffer");
253 media::MediaCodecStatus status = media_codec_->DequeueOutputBuffer(
254 NoWaitTimeOut(), &buf_index, &offset, &size, &presentation_timestamp,
255 &eos, NULL);
256 TRACE_EVENT_END2("media", "AVDA::DequeueOutputBuffer", "status", status,
257 "presentation_timestamp (ms)",
258 presentation_timestamp.InMilliseconds());
259 switch (status) {
260 case media::MEDIA_CODEC_DEQUEUE_OUTPUT_AGAIN_LATER:
261 case media::MEDIA_CODEC_ERROR:
262 return;
264 case media::MEDIA_CODEC_OUTPUT_FORMAT_CHANGED: {
265 int32 width, height;
266 media_codec_->GetOutputFormat(&width, &height);
268 if (!picturebuffers_requested_) {
269 picturebuffers_requested_ = true;
270 size_ = gfx::Size(width, height);
271 base::MessageLoop::current()->PostTask(
272 FROM_HERE,
273 base::Bind(&AndroidVideoDecodeAccelerator::RequestPictureBuffers,
274 weak_this_factory_.GetWeakPtr()));
275 } else {
276 // Dynamic resolution change support is not specified by the Android
277 // platform at and before JB-MR1, so it's not possible to smoothly
278 // continue playback at this point. Instead, error out immediately,
279 // expecting clients to Reset() as appropriate to avoid this.
280 // b/7093648
281 RETURN_ON_FAILURE(this, size_ == gfx::Size(width, height),
282 "Dynamic resolution change is not supported.",
283 PLATFORM_FAILURE);
285 return;
288 case media::MEDIA_CODEC_OUTPUT_BUFFERS_CHANGED:
289 break;
291 case media::MEDIA_CODEC_OK:
292 DCHECK_GE(buf_index, 0);
293 break;
295 default:
296 NOTREACHED();
297 break;
299 } while (buf_index < 0);
301 if (eos) {
302 media_codec_->ReleaseOutputBuffer(buf_index, false);
303 base::MessageLoop::current()->PostTask(
304 FROM_HERE,
305 base::Bind(&AndroidVideoDecodeAccelerator::NotifyFlushDone,
306 weak_this_factory_.GetWeakPtr()));
307 } else {
308 // Get the bitstream buffer id from the timestamp.
309 auto it = bitstream_buffers_in_decoder_.find(presentation_timestamp);
310 // Require the decoder to output at most one frame for each distinct input
311 // buffer timestamp. A VP9 alt ref frame is a case where an input buffer,
312 // with a possibly unique timestamp, will not result in a corresponding
313 // output frame.
314 CHECK(it != bitstream_buffers_in_decoder_.end())
315 << "Unexpected output frame timestamp";
316 const int32 bitstream_buffer_id = it->second;
317 bitstream_buffers_in_decoder_.erase(bitstream_buffers_in_decoder_.begin(),
318 ++it);
319 SendCurrentSurfaceToClient(buf_index, bitstream_buffer_id);
321 // Removes ids former or equal than the id from decoder. Note that
322 // |bitstreams_notified_in_advance_| does not mean bitstream ids in decoder
323 // because of frame reordering issue. We just maintain this roughly and use
324 // for the throttling purpose.
325 for (auto bitstream_it = bitstreams_notified_in_advance_.begin();
326 bitstream_it != bitstreams_notified_in_advance_.end();
327 ++bitstream_it) {
328 if (*bitstream_it == bitstream_buffer_id) {
329 bitstreams_notified_in_advance_.erase(
330 bitstreams_notified_in_advance_.begin(), ++bitstream_it);
331 break;
337 void AndroidVideoDecodeAccelerator::SendCurrentSurfaceToClient(
338 int32 codec_buffer_index,
339 int32 bitstream_id) {
340 DCHECK(thread_checker_.CalledOnValidThread());
341 DCHECK_NE(bitstream_id, -1);
342 DCHECK(!free_picture_ids_.empty());
343 TRACE_EVENT0("media", "AVDA::SendCurrentSurfaceToClient");
345 RETURN_ON_FAILURE(this, make_context_current_.Run(),
346 "Failed to make this decoder's GL context current.",
347 PLATFORM_FAILURE);
349 int32 picture_buffer_id = free_picture_ids_.front();
350 free_picture_ids_.pop();
351 TRACE_COUNTER1("media", "AVDA::FreePictureIds", free_picture_ids_.size());
353 OutputBufferMap::const_iterator i =
354 output_picture_buffers_.find(picture_buffer_id);
355 RETURN_ON_FAILURE(this, i != output_picture_buffers_.end(),
356 "Can't find a PictureBuffer for " << picture_buffer_id,
357 PLATFORM_FAILURE);
359 // Connect the PictureBuffer to the decoded frame, via whatever
360 // mechanism the strategy likes.
361 strategy_->AssignCurrentSurfaceToPictureBuffer(codec_buffer_index, i->second);
363 // TODO(henryhsu): Pass (0, 0) as visible size will cause several test
364 // cases failed. We should make sure |size_| is coded size or visible size.
365 base::MessageLoop::current()->PostTask(
366 FROM_HERE, base::Bind(&AndroidVideoDecodeAccelerator::NotifyPictureReady,
367 weak_this_factory_.GetWeakPtr(),
368 media::Picture(picture_buffer_id, bitstream_id,
369 gfx::Rect(size_), false)));
372 void AndroidVideoDecodeAccelerator::Decode(
373 const media::BitstreamBuffer& bitstream_buffer) {
374 DCHECK(thread_checker_.CalledOnValidThread());
375 if (bitstream_buffer.id() != -1 && bitstream_buffer.size() == 0) {
376 base::MessageLoop::current()->PostTask(
377 FROM_HERE,
378 base::Bind(&AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer,
379 weak_this_factory_.GetWeakPtr(), bitstream_buffer.id()));
380 return;
383 pending_bitstream_buffers_.push(
384 std::make_pair(bitstream_buffer, base::Time::Now()));
385 TRACE_COUNTER1("media", "AVDA::PendingBitstreamBufferCount",
386 pending_bitstream_buffers_.size());
388 DoIOTask();
391 void AndroidVideoDecodeAccelerator::RequestPictureBuffers() {
392 client_->ProvidePictureBuffers(strategy_->GetNumPictureBuffers(), size_,
393 strategy_->GetTextureTarget());
396 void AndroidVideoDecodeAccelerator::AssignPictureBuffers(
397 const std::vector<media::PictureBuffer>& buffers) {
398 DCHECK(thread_checker_.CalledOnValidThread());
399 DCHECK(output_picture_buffers_.empty());
400 DCHECK(free_picture_ids_.empty());
402 for (size_t i = 0; i < buffers.size(); ++i) {
403 RETURN_ON_FAILURE(this, buffers[i].size() == size_,
404 "Invalid picture buffer size was passed.",
405 INVALID_ARGUMENT);
406 int32 id = buffers[i].id();
407 output_picture_buffers_.insert(std::make_pair(id, buffers[i]));
408 free_picture_ids_.push(id);
409 // Since the client might be re-using |picture_buffer_id| values, forget
410 // about previously-dismissed IDs now. See ReusePictureBuffer() comment
411 // about "zombies" for why we maintain this set in the first place.
412 dismissed_picture_ids_.erase(id);
414 TRACE_COUNTER1("media", "AVDA::FreePictureIds", free_picture_ids_.size());
416 RETURN_ON_FAILURE(
417 this, output_picture_buffers_.size() >= strategy_->GetNumPictureBuffers(),
418 "Invalid picture buffers were passed.", INVALID_ARGUMENT);
420 DoIOTask();
423 void AndroidVideoDecodeAccelerator::ReusePictureBuffer(
424 int32 picture_buffer_id) {
425 DCHECK(thread_checker_.CalledOnValidThread());
427 // This ReusePictureBuffer() might have been in a pipe somewhere (queued in
428 // IPC, or in a PostTask either at the sender or receiver) when we sent a
429 // DismissPictureBuffer() for this |picture_buffer_id|. Account for such
430 // potential "zombie" IDs here.
431 if (dismissed_picture_ids_.erase(picture_buffer_id))
432 return;
434 free_picture_ids_.push(picture_buffer_id);
435 TRACE_COUNTER1("media", "AVDA::FreePictureIds", free_picture_ids_.size());
437 DoIOTask();
440 void AndroidVideoDecodeAccelerator::Flush() {
441 DCHECK(thread_checker_.CalledOnValidThread());
443 Decode(media::BitstreamBuffer(-1, base::SharedMemoryHandle(), 0));
446 bool AndroidVideoDecodeAccelerator::ConfigureMediaCodec() {
447 DCHECK(thread_checker_.CalledOnValidThread());
448 DCHECK(surface_texture_.get());
449 TRACE_EVENT0("media", "AVDA::ConfigureMediaCodec");
451 gfx::ScopedJavaSurface surface(surface_texture_.get());
453 // Pass a dummy 320x240 canvas size and let the codec signal the real size
454 // when it's known from the bitstream.
455 media_codec_.reset(media::VideoCodecBridge::CreateDecoder(
456 codec_, false, gfx::Size(320, 240), surface.j_surface().obj(), NULL));
457 if (!media_codec_)
458 return false;
460 io_timer_.Start(FROM_HERE,
461 DecodePollDelay(),
462 this,
463 &AndroidVideoDecodeAccelerator::DoIOTask);
464 return true;
467 void AndroidVideoDecodeAccelerator::Reset() {
468 DCHECK(thread_checker_.CalledOnValidThread());
469 TRACE_EVENT0("media", "AVDA::Reset");
471 while (!pending_bitstream_buffers_.empty()) {
472 int32 bitstream_buffer_id = pending_bitstream_buffers_.front().first.id();
473 pending_bitstream_buffers_.pop();
475 if (bitstream_buffer_id != -1) {
476 base::MessageLoop::current()->PostTask(
477 FROM_HERE,
478 base::Bind(&AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer,
479 weak_this_factory_.GetWeakPtr(), bitstream_buffer_id));
482 TRACE_COUNTER1("media", "AVDA::PendingBitstreamBufferCount", 0);
483 bitstreams_notified_in_advance_.clear();
485 for (OutputBufferMap::iterator it = output_picture_buffers_.begin();
486 it != output_picture_buffers_.end();
487 ++it) {
488 client_->DismissPictureBuffer(it->first);
489 dismissed_picture_ids_.insert(it->first);
491 output_picture_buffers_.clear();
492 std::queue<int32> empty;
493 std::swap(free_picture_ids_, empty);
494 CHECK(free_picture_ids_.empty());
495 picturebuffers_requested_ = false;
496 bitstream_buffers_in_decoder_.clear();
498 // On some devices, and up to at least JB-MR1,
499 // - flush() can fail after EOS (b/8125974); and
500 // - mid-stream resolution change is unsupported (b/7093648).
501 // To cope with these facts, we always stop & restart the codec on Reset().
502 io_timer_.Stop();
503 media_codec_->Stop();
504 ConfigureMediaCodec();
505 state_ = NO_ERROR;
507 base::MessageLoop::current()->PostTask(
508 FROM_HERE,
509 base::Bind(&AndroidVideoDecodeAccelerator::NotifyResetDone,
510 weak_this_factory_.GetWeakPtr()));
513 void AndroidVideoDecodeAccelerator::Destroy() {
514 DCHECK(thread_checker_.CalledOnValidThread());
516 strategy_->Cleanup();
518 weak_this_factory_.InvalidateWeakPtrs();
519 if (media_codec_) {
520 io_timer_.Stop();
521 media_codec_->Stop();
523 if (surface_texture_id_)
524 glDeleteTextures(1, &surface_texture_id_);
525 delete this;
528 bool AndroidVideoDecodeAccelerator::CanDecodeOnIOThread() {
529 return false;
532 const gfx::Size& AndroidVideoDecodeAccelerator::GetSize() const {
533 return size_;
536 const base::ThreadChecker& AndroidVideoDecodeAccelerator::ThreadChecker()
537 const {
538 return thread_checker_;
541 gfx::SurfaceTexture* AndroidVideoDecodeAccelerator::GetSurfaceTexture() const {
542 return surface_texture_.get();
545 uint32 AndroidVideoDecodeAccelerator::GetSurfaceTextureId() const {
546 return surface_texture_id_;
549 gpu::gles2::GLES2Decoder* AndroidVideoDecodeAccelerator::GetGlDecoder() const {
550 return gl_decoder_.get();
553 media::VideoCodecBridge* AndroidVideoDecodeAccelerator::GetMediaCodec() {
554 return media_codec_.get();
557 void AndroidVideoDecodeAccelerator::PostError(
558 const ::tracked_objects::Location& from_here,
559 media::VideoDecodeAccelerator::Error error) {
560 base::MessageLoop::current()->PostTask(
561 from_here, base::Bind(&AndroidVideoDecodeAccelerator::NotifyError,
562 weak_this_factory_.GetWeakPtr(), error));
563 state_ = ERROR;
566 void AndroidVideoDecodeAccelerator::NotifyPictureReady(
567 const media::Picture& picture) {
568 client_->PictureReady(picture);
571 void AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer(
572 int input_buffer_id) {
573 client_->NotifyEndOfBitstreamBuffer(input_buffer_id);
576 void AndroidVideoDecodeAccelerator::NotifyFlushDone() {
577 client_->NotifyFlushDone();
580 void AndroidVideoDecodeAccelerator::NotifyResetDone() {
581 client_->NotifyResetDone();
584 void AndroidVideoDecodeAccelerator::NotifyError(
585 media::VideoDecodeAccelerator::Error error) {
586 client_->NotifyError(error);
589 // static
590 media::VideoDecodeAccelerator::SupportedProfiles
591 AndroidVideoDecodeAccelerator::GetSupportedProfiles() {
592 SupportedProfiles profiles;
594 if (!media::VideoCodecBridge::IsKnownUnaccelerated(
595 media::kCodecVP8, media::MEDIA_CODEC_DECODER)) {
596 SupportedProfile profile;
597 profile.profile = media::VP8PROFILE_ANY;
598 profile.min_resolution.SetSize(0, 0);
599 profile.max_resolution.SetSize(1920, 1088);
600 profiles.push_back(profile);
603 #if defined(ENABLE_MEDIA_PIPELINE_ON_ANDROID)
604 if (!media::VideoCodecBridge::IsKnownUnaccelerated(
605 media::kCodecVP9, media::MEDIA_CODEC_DECODER)) {
606 SupportedProfile profile;
607 profile.profile = media::VP9PROFILE_ANY;
608 profile.min_resolution.SetSize(0, 0);
609 profile.max_resolution.SetSize(1920, 1088);
610 profiles.push_back(profile);
613 for (const auto& supported_profile : kSupportedH264Profiles) {
614 SupportedProfile profile;
615 profile.profile = supported_profile;
616 profile.min_resolution.SetSize(0, 0);
617 // Advertise support for 4k and let the MediaCodec fail when decoding if it
618 // doesn't support the resolution. It's assumed that consumers won't have
619 // software fallback for H264 on Android anyway.
620 profile.max_resolution.SetSize(3840, 2160);
621 profiles.push_back(profile);
623 #endif
625 return profiles;
628 } // namespace content