Roll src/third_party/WebKit a452221:9ff6d11 (svn 202117:202119)
[chromium-blink-merge.git] / content / renderer / media / android / webmediaplayer_android.cc
blobb1aa7c35b9f28d0930ef4b97c0055bf92dcaefcf
1 // Copyright 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "content/renderer/media/android/webmediaplayer_android.h"
7 #include <limits>
9 #include "base/android/build_info.h"
10 #include "base/bind.h"
11 #include "base/callback_helpers.h"
12 #include "base/command_line.h"
13 #include "base/files/file_path.h"
14 #include "base/logging.h"
15 #include "base/metrics/histogram.h"
16 #include "base/single_thread_task_runner.h"
17 #include "base/strings/string_number_conversions.h"
18 #include "base/strings/utf_string_conversions.h"
19 #include "cc/blink/web_layer_impl.h"
20 #include "cc/layers/video_layer.h"
21 #include "content/public/common/content_client.h"
22 #include "content/public/common/content_switches.h"
23 #include "content/public/common/renderer_preferences.h"
24 #include "content/public/renderer/render_frame.h"
25 #include "content/renderer/media/android/renderer_demuxer_android.h"
26 #include "content/renderer/media/android/renderer_media_player_manager.h"
27 #include "content/renderer/media/crypto/render_cdm_factory.h"
28 #include "content/renderer/media/crypto/renderer_cdm_manager.h"
29 #include "content/renderer/render_frame_impl.h"
30 #include "content/renderer/render_thread_impl.h"
31 #include "content/renderer/render_view_impl.h"
32 #include "gpu/GLES2/gl2extchromium.h"
33 #include "gpu/command_buffer/client/gles2_interface.h"
34 #include "gpu/command_buffer/common/mailbox_holder.h"
35 #include "media/base/android/media_common_android.h"
36 #include "media/base/android/media_player_android.h"
37 #include "media/base/bind_to_current_loop.h"
38 #include "media/base/cdm_context.h"
39 #include "media/base/key_systems.h"
40 #include "media/base/media_keys.h"
41 #include "media/base/media_log.h"
42 #include "media/base/media_switches.h"
43 #include "media/base/timestamp_constants.h"
44 #include "media/base/video_frame.h"
45 #include "media/blink/webcontentdecryptionmodule_impl.h"
46 #include "media/blink/webmediaplayer_delegate.h"
47 #include "net/base/mime_util.h"
48 #include "third_party/WebKit/public/platform/Platform.h"
49 #include "third_party/WebKit/public/platform/WebContentDecryptionModuleResult.h"
50 #include "third_party/WebKit/public/platform/WebEncryptedMediaTypes.h"
51 #include "third_party/WebKit/public/platform/WebGraphicsContext3DProvider.h"
52 #include "third_party/WebKit/public/platform/WebMediaPlayerClient.h"
53 #include "third_party/WebKit/public/platform/WebMediaPlayerEncryptedMediaClient.h"
54 #include "third_party/WebKit/public/platform/WebString.h"
55 #include "third_party/WebKit/public/platform/WebURL.h"
56 #include "third_party/WebKit/public/web/WebDocument.h"
57 #include "third_party/WebKit/public/web/WebFrame.h"
58 #include "third_party/WebKit/public/web/WebRuntimeFeatures.h"
59 #include "third_party/WebKit/public/web/WebSecurityOrigin.h"
60 #include "third_party/WebKit/public/web/WebView.h"
61 #include "third_party/skia/include/core/SkCanvas.h"
62 #include "third_party/skia/include/core/SkPaint.h"
63 #include "third_party/skia/include/core/SkTypeface.h"
64 #include "third_party/skia/include/gpu/GrContext.h"
65 #include "third_party/skia/include/gpu/SkGrPixelRef.h"
66 #include "ui/gfx/image/image.h"
68 static const uint32 kGLTextureExternalOES = 0x8D65;
69 static const int kSDKVersionToSupportSecurityOriginCheck = 20;
71 using blink::WebMediaPlayer;
72 using blink::WebSize;
73 using blink::WebString;
74 using blink::WebURL;
75 using gpu::gles2::GLES2Interface;
76 using media::LogHelper;
77 using media::MediaLog;
78 using media::MediaPlayerAndroid;
79 using media::VideoFrame;
81 namespace {
82 // Prefix for histograms related to Encrypted Media Extensions.
83 const char* kMediaEme = "Media.EME.";
85 // File-static function is to allow it to run even after WMPA is deleted.
86 void OnReleaseTexture(
87 const scoped_refptr<content::StreamTextureFactory>& factories,
88 uint32 texture_id,
89 uint32 release_sync_point) {
90 GLES2Interface* gl = factories->ContextGL();
91 gl->WaitSyncPointCHROMIUM(release_sync_point);
92 gl->DeleteTextures(1, &texture_id);
93 // Flush to ensure that the stream texture gets deleted in a timely fashion.
94 gl->ShallowFlushCHROMIUM();
97 bool IsSkBitmapProperlySizedTexture(const SkBitmap* bitmap,
98 const gfx::Size& size) {
99 return bitmap->getTexture() && bitmap->width() == size.width() &&
100 bitmap->height() == size.height();
103 bool AllocateSkBitmapTexture(GrContext* gr,
104 SkBitmap* bitmap,
105 const gfx::Size& size) {
106 DCHECK(gr);
107 GrTextureDesc desc;
108 // Use kRGBA_8888_GrPixelConfig, not kSkia8888_GrPixelConfig, to avoid
109 // RGBA to BGRA conversion.
110 desc.fConfig = kRGBA_8888_GrPixelConfig;
111 // kRenderTarget_GrTextureFlagBit avoids a copy before readback in skia.
112 desc.fFlags = kRenderTarget_GrSurfaceFlag;
113 desc.fSampleCnt = 0;
114 desc.fOrigin = kTopLeft_GrSurfaceOrigin;
115 desc.fWidth = size.width();
116 desc.fHeight = size.height();
117 skia::RefPtr<GrTexture> texture = skia::AdoptRef(
118 gr->textureProvider()->refScratchTexture(
119 desc, GrTextureProvider::kExact_ScratchTexMatch));
120 if (!texture.get())
121 return false;
123 SkImageInfo info = SkImageInfo::MakeN32Premul(desc.fWidth, desc.fHeight);
124 SkGrPixelRef* pixel_ref = SkNEW_ARGS(SkGrPixelRef, (info, texture.get()));
125 if (!pixel_ref)
126 return false;
127 bitmap->setInfo(info);
128 bitmap->setPixelRef(pixel_ref)->unref();
129 return true;
132 class SyncPointClientImpl : public media::VideoFrame::SyncPointClient {
133 public:
134 explicit SyncPointClientImpl(
135 blink::WebGraphicsContext3D* web_graphics_context)
136 : web_graphics_context_(web_graphics_context) {}
137 ~SyncPointClientImpl() override {}
138 uint32 InsertSyncPoint() override {
139 return web_graphics_context_->insertSyncPoint();
141 void WaitSyncPoint(uint32 sync_point) override {
142 web_graphics_context_->waitSyncPoint(sync_point);
145 private:
146 blink::WebGraphicsContext3D* web_graphics_context_;
149 } // namespace
151 namespace content {
153 WebMediaPlayerAndroid::WebMediaPlayerAndroid(
154 blink::WebFrame* frame,
155 blink::WebMediaPlayerClient* client,
156 blink::WebMediaPlayerEncryptedMediaClient* encrypted_client,
157 base::WeakPtr<media::WebMediaPlayerDelegate> delegate,
158 RendererMediaPlayerManager* player_manager,
159 media::CdmFactory* cdm_factory,
160 scoped_refptr<StreamTextureFactory> factory,
161 const media::WebMediaPlayerParams& params)
162 : RenderFrameObserver(RenderFrame::FromWebFrame(frame)),
163 frame_(frame),
164 client_(client),
165 encrypted_client_(encrypted_client),
166 delegate_(delegate),
167 defer_load_cb_(params.defer_load_cb()),
168 buffered_(static_cast<size_t>(1)),
169 media_task_runner_(params.media_task_runner()),
170 ignore_metadata_duration_change_(false),
171 pending_seek_(false),
172 seeking_(false),
173 did_loading_progress_(false),
174 player_manager_(player_manager),
175 cdm_factory_(cdm_factory),
176 media_permission_(params.media_permission()),
177 network_state_(WebMediaPlayer::NetworkStateEmpty),
178 ready_state_(WebMediaPlayer::ReadyStateHaveNothing),
179 texture_id_(0),
180 stream_id_(0),
181 is_player_initialized_(false),
182 is_playing_(false),
183 needs_establish_peer_(true),
184 has_size_info_(false),
185 // Threaded compositing isn't enabled universally yet.
186 compositor_task_runner_(
187 params.compositor_task_runner()
188 ? params.compositor_task_runner()
189 : base::ThreadTaskRunnerHandle::Get()),
190 stream_texture_factory_(factory),
191 needs_external_surface_(false),
192 is_fullscreen_(false),
193 video_frame_provider_client_(NULL),
194 player_type_(MEDIA_PLAYER_TYPE_URL),
195 is_remote_(false),
196 media_log_(params.media_log()),
197 init_data_type_(media::EmeInitDataType::UNKNOWN),
198 cdm_context_(NULL),
199 allow_stored_credentials_(false),
200 is_local_resource_(false),
201 interpolator_(&default_tick_clock_),
202 weak_factory_(this) {
203 DCHECK(player_manager_);
204 DCHECK(cdm_factory_);
206 DCHECK(main_thread_checker_.CalledOnValidThread());
207 stream_texture_factory_->AddObserver(this);
209 player_id_ = player_manager_->RegisterMediaPlayer(this);
211 #if defined(VIDEO_HOLE)
212 const RendererPreferences& prefs =
213 static_cast<RenderFrameImpl*>(render_frame())
214 ->render_view()
215 ->renderer_preferences();
216 force_use_overlay_embedded_video_ = prefs.use_view_overlay_for_all_video;
217 if (force_use_overlay_embedded_video_ ||
218 player_manager_->ShouldUseVideoOverlayForEmbeddedEncryptedVideo()) {
219 // Defer stream texture creation until we are sure it's necessary.
220 needs_establish_peer_ = false;
221 current_frame_ = VideoFrame::CreateBlackFrame(gfx::Size(1, 1));
223 #endif // defined(VIDEO_HOLE)
224 TryCreateStreamTextureProxyIfNeeded();
225 interpolator_.SetUpperBound(base::TimeDelta());
227 if (params.initial_cdm()) {
228 cdm_context_ = media::ToWebContentDecryptionModuleImpl(params.initial_cdm())
229 ->GetCdmContext();
233 WebMediaPlayerAndroid::~WebMediaPlayerAndroid() {
234 DCHECK(main_thread_checker_.CalledOnValidThread());
235 SetVideoFrameProviderClient(NULL);
236 client_->setWebLayer(NULL);
238 if (is_player_initialized_)
239 player_manager_->DestroyPlayer(player_id_);
241 player_manager_->UnregisterMediaPlayer(player_id_);
243 if (stream_id_) {
244 GLES2Interface* gl = stream_texture_factory_->ContextGL();
245 gl->DeleteTextures(1, &texture_id_);
246 // Flush to ensure that the stream texture gets deleted in a timely fashion.
247 gl->ShallowFlushCHROMIUM();
248 texture_id_ = 0;
249 texture_mailbox_ = gpu::Mailbox();
250 stream_id_ = 0;
254 base::AutoLock auto_lock(current_frame_lock_);
255 current_frame_ = NULL;
258 if (delegate_)
259 delegate_->PlayerGone(this);
261 stream_texture_factory_->RemoveObserver(this);
263 if (media_source_delegate_) {
264 // Part of |media_source_delegate_| needs to be stopped on the media thread.
265 // Wait until |media_source_delegate_| is fully stopped before tearing
266 // down other objects.
267 base::WaitableEvent waiter(false, false);
268 media_source_delegate_->Stop(
269 base::Bind(&base::WaitableEvent::Signal, base::Unretained(&waiter)));
270 waiter.Wait();
274 void WebMediaPlayerAndroid::load(LoadType load_type,
275 const blink::WebURL& url,
276 CORSMode cors_mode) {
277 if (!defer_load_cb_.is_null()) {
278 defer_load_cb_.Run(base::Bind(&WebMediaPlayerAndroid::DoLoad,
279 weak_factory_.GetWeakPtr(), load_type, url,
280 cors_mode));
281 return;
283 DoLoad(load_type, url, cors_mode);
286 void WebMediaPlayerAndroid::DoLoad(LoadType load_type,
287 const blink::WebURL& url,
288 CORSMode cors_mode) {
289 DCHECK(main_thread_checker_.CalledOnValidThread());
291 media::ReportMetrics(load_type, GURL(url),
292 GURL(frame_->document().securityOrigin().toString()));
294 switch (load_type) {
295 case LoadTypeURL:
296 player_type_ = MEDIA_PLAYER_TYPE_URL;
297 break;
299 case LoadTypeMediaSource:
300 player_type_ = MEDIA_PLAYER_TYPE_MEDIA_SOURCE;
301 break;
303 case LoadTypeMediaStream:
304 CHECK(false) << "WebMediaPlayerAndroid doesn't support MediaStream on "
305 "this platform";
306 return;
309 url_ = url;
310 is_local_resource_ = IsLocalResource();
311 int demuxer_client_id = 0;
312 if (player_type_ != MEDIA_PLAYER_TYPE_URL) {
313 RendererDemuxerAndroid* demuxer =
314 RenderThreadImpl::current()->renderer_demuxer();
315 demuxer_client_id = demuxer->GetNextDemuxerClientID();
317 media_source_delegate_.reset(new MediaSourceDelegate(
318 demuxer, demuxer_client_id, media_task_runner_, media_log_));
320 if (player_type_ == MEDIA_PLAYER_TYPE_MEDIA_SOURCE) {
321 media_source_delegate_->InitializeMediaSource(
322 base::Bind(&WebMediaPlayerAndroid::OnMediaSourceOpened,
323 weak_factory_.GetWeakPtr()),
324 base::Bind(&WebMediaPlayerAndroid::OnEncryptedMediaInitData,
325 weak_factory_.GetWeakPtr()),
326 base::Bind(&WebMediaPlayerAndroid::SetDecryptorReadyCB,
327 weak_factory_.GetWeakPtr()),
328 base::Bind(&WebMediaPlayerAndroid::UpdateNetworkState,
329 weak_factory_.GetWeakPtr()),
330 base::Bind(&WebMediaPlayerAndroid::OnDurationChanged,
331 weak_factory_.GetWeakPtr()),
332 base::Bind(&WebMediaPlayerAndroid::OnWaitingForDecryptionKey,
333 weak_factory_.GetWeakPtr()));
334 InitializePlayer(url_, frame_->document().firstPartyForCookies(),
335 true, demuxer_client_id);
337 } else {
338 info_loader_.reset(
339 new MediaInfoLoader(
340 url,
341 cors_mode,
342 base::Bind(&WebMediaPlayerAndroid::DidLoadMediaInfo,
343 weak_factory_.GetWeakPtr())));
344 info_loader_->Start(frame_);
347 UpdateNetworkState(WebMediaPlayer::NetworkStateLoading);
348 UpdateReadyState(WebMediaPlayer::ReadyStateHaveNothing);
351 void WebMediaPlayerAndroid::DidLoadMediaInfo(
352 MediaInfoLoader::Status status,
353 const GURL& redirected_url,
354 const GURL& first_party_for_cookies,
355 bool allow_stored_credentials) {
356 DCHECK(main_thread_checker_.CalledOnValidThread());
357 DCHECK(!media_source_delegate_);
358 if (status == MediaInfoLoader::kFailed) {
359 info_loader_.reset();
360 UpdateNetworkState(WebMediaPlayer::NetworkStateNetworkError);
361 return;
363 redirected_url_ = redirected_url;
364 InitializePlayer(
365 redirected_url, first_party_for_cookies, allow_stored_credentials, 0);
367 UpdateNetworkState(WebMediaPlayer::NetworkStateIdle);
370 bool WebMediaPlayerAndroid::IsLocalResource() {
371 if (url_.SchemeIsFile() || url_.SchemeIsBlob())
372 return true;
374 std::string host = url_.host();
375 if (!host.compare("localhost") || !host.compare("127.0.0.1") ||
376 !host.compare("[::1]")) {
377 return true;
380 return false;
383 void WebMediaPlayerAndroid::play() {
384 DCHECK(main_thread_checker_.CalledOnValidThread());
386 // For HLS streams, some devices cannot detect the video size unless a surface
387 // texture is bind to it. See http://crbug.com/400145.
388 #if defined(VIDEO_HOLE)
389 if ((hasVideo() || IsHLSStream()) && needs_external_surface_ &&
390 !is_fullscreen_) {
391 DCHECK(!needs_establish_peer_);
392 player_manager_->RequestExternalSurface(player_id_, last_computed_rect_);
394 #endif // defined(VIDEO_HOLE)
396 TryCreateStreamTextureProxyIfNeeded();
397 // There is no need to establish the surface texture peer for fullscreen
398 // video.
399 if ((hasVideo() || IsHLSStream()) && needs_establish_peer_ &&
400 !is_fullscreen_) {
401 EstablishSurfaceTexturePeer();
404 if (paused())
405 player_manager_->Start(player_id_);
406 UpdatePlayingState(true);
407 UpdateNetworkState(WebMediaPlayer::NetworkStateLoading);
410 void WebMediaPlayerAndroid::pause() {
411 DCHECK(main_thread_checker_.CalledOnValidThread());
412 Pause(true);
415 void WebMediaPlayerAndroid::requestRemotePlayback() {
416 player_manager_->RequestRemotePlayback(player_id_);
419 void WebMediaPlayerAndroid::requestRemotePlaybackControl() {
420 player_manager_->RequestRemotePlaybackControl(player_id_);
423 void WebMediaPlayerAndroid::seek(double seconds) {
424 DCHECK(main_thread_checker_.CalledOnValidThread());
425 DVLOG(1) << __FUNCTION__ << "(" << seconds << ")";
427 base::TimeDelta new_seek_time = base::TimeDelta::FromSecondsD(seconds);
429 if (seeking_) {
430 if (new_seek_time == seek_time_) {
431 if (media_source_delegate_) {
432 // Don't suppress any redundant in-progress MSE seek. There could have
433 // been changes to the underlying buffers after seeking the demuxer and
434 // before receiving OnSeekComplete() for the currently in-progress seek.
435 MEDIA_LOG(DEBUG, media_log_)
436 << "Detected MediaSource seek to same time as in-progress seek to "
437 << seek_time_ << ".";
438 } else {
439 // Suppress all redundant seeks if unrestricted by media source
440 // demuxer API.
441 pending_seek_ = false;
442 return;
446 pending_seek_ = true;
447 pending_seek_time_ = new_seek_time;
449 if (media_source_delegate_)
450 media_source_delegate_->CancelPendingSeek(pending_seek_time_);
452 // Later, OnSeekComplete will trigger the pending seek.
453 return;
456 seeking_ = true;
457 seek_time_ = new_seek_time;
459 if (media_source_delegate_)
460 media_source_delegate_->StartWaitingForSeek(seek_time_);
462 // Kick off the asynchronous seek!
463 player_manager_->Seek(player_id_, seek_time_);
466 bool WebMediaPlayerAndroid::supportsSave() const {
467 return false;
470 void WebMediaPlayerAndroid::setRate(double rate) {
471 NOTIMPLEMENTED();
474 void WebMediaPlayerAndroid::setVolume(double volume) {
475 DCHECK(main_thread_checker_.CalledOnValidThread());
476 player_manager_->SetVolume(player_id_, volume);
479 void WebMediaPlayerAndroid::setSinkId(
480 const blink::WebString& device_id,
481 media::WebSetSinkIdCB* raw_web_callbacks) {
482 DCHECK(main_thread_checker_.CalledOnValidThread());
483 scoped_ptr<media::WebSetSinkIdCB> web_callbacks(raw_web_callbacks);
484 web_callbacks->onError(new blink::WebSetSinkIdError(
485 blink::WebSetSinkIdError::ErrorTypeNotSupported, "Not Supported"));
488 bool WebMediaPlayerAndroid::hasVideo() const {
489 DCHECK(main_thread_checker_.CalledOnValidThread());
490 // If we have obtained video size information before, use it.
491 if (has_size_info_)
492 return !natural_size_.isEmpty();
494 // TODO(qinmin): need a better method to determine whether the current media
495 // content contains video. Android does not provide any function to do
496 // this.
497 // We don't know whether the current media content has video unless
498 // the player is prepared. If the player is not prepared, we fall back
499 // to the mime-type. There may be no mime-type on a redirect URL.
500 // In that case, we conservatively assume it contains video so that
501 // enterfullscreen call will not fail.
502 if (!url_.has_path())
503 return false;
504 std::string mime;
505 if (!net::GetMimeTypeFromFile(base::FilePath(url_.path()), &mime))
506 return true;
507 return mime.find("audio/") == std::string::npos;
510 bool WebMediaPlayerAndroid::hasAudio() const {
511 DCHECK(main_thread_checker_.CalledOnValidThread());
512 if (!url_.has_path())
513 return false;
514 std::string mime;
515 if (!net::GetMimeTypeFromFile(base::FilePath(url_.path()), &mime))
516 return true;
518 if (mime.find("audio/") != std::string::npos ||
519 mime.find("video/") != std::string::npos ||
520 mime.find("application/ogg") != std::string::npos ||
521 mime.find("application/x-mpegurl") != std::string::npos) {
522 return true;
524 return false;
527 bool WebMediaPlayerAndroid::isRemote() const {
528 return is_remote_;
531 bool WebMediaPlayerAndroid::paused() const {
532 return !is_playing_;
535 bool WebMediaPlayerAndroid::seeking() const {
536 return seeking_;
539 double WebMediaPlayerAndroid::duration() const {
540 DCHECK(main_thread_checker_.CalledOnValidThread());
541 // HTML5 spec requires duration to be NaN if readyState is HAVE_NOTHING
542 if (ready_state_ == WebMediaPlayer::ReadyStateHaveNothing)
543 return std::numeric_limits<double>::quiet_NaN();
545 if (duration_ == media::kInfiniteDuration())
546 return std::numeric_limits<double>::infinity();
548 return duration_.InSecondsF();
551 double WebMediaPlayerAndroid::timelineOffset() const {
552 DCHECK(main_thread_checker_.CalledOnValidThread());
553 base::Time timeline_offset;
554 if (media_source_delegate_)
555 timeline_offset = media_source_delegate_->GetTimelineOffset();
557 if (timeline_offset.is_null())
558 return std::numeric_limits<double>::quiet_NaN();
560 return timeline_offset.ToJsTime();
563 double WebMediaPlayerAndroid::currentTime() const {
564 DCHECK(main_thread_checker_.CalledOnValidThread());
565 // If the player is processing a seek, return the seek time.
566 // Blink may still query us if updatePlaybackState() occurs while seeking.
567 if (seeking()) {
568 return pending_seek_ ?
569 pending_seek_time_.InSecondsF() : seek_time_.InSecondsF();
572 return std::min(
573 (const_cast<media::TimeDeltaInterpolator*>(
574 &interpolator_))->GetInterpolatedTime(), duration_).InSecondsF();
577 WebSize WebMediaPlayerAndroid::naturalSize() const {
578 return natural_size_;
581 WebMediaPlayer::NetworkState WebMediaPlayerAndroid::networkState() const {
582 return network_state_;
585 WebMediaPlayer::ReadyState WebMediaPlayerAndroid::readyState() const {
586 return ready_state_;
589 blink::WebTimeRanges WebMediaPlayerAndroid::buffered() const {
590 if (media_source_delegate_)
591 return media_source_delegate_->Buffered();
592 return buffered_;
595 blink::WebTimeRanges WebMediaPlayerAndroid::seekable() const {
596 if (ready_state_ < WebMediaPlayer::ReadyStateHaveMetadata)
597 return blink::WebTimeRanges();
599 // TODO(dalecurtis): Technically this allows seeking on media which return an
600 // infinite duration. While not expected, disabling this breaks semi-live
601 // players, http://crbug.com/427412.
602 const blink::WebTimeRange seekable_range(0.0, duration());
603 return blink::WebTimeRanges(&seekable_range, 1);
606 bool WebMediaPlayerAndroid::didLoadingProgress() {
607 bool ret = did_loading_progress_;
608 did_loading_progress_ = false;
609 return ret;
612 void WebMediaPlayerAndroid::paint(blink::WebCanvas* canvas,
613 const blink::WebRect& rect,
614 unsigned char alpha,
615 SkXfermode::Mode mode) {
616 DCHECK(main_thread_checker_.CalledOnValidThread());
617 scoped_ptr<blink::WebGraphicsContext3DProvider> provider =
618 scoped_ptr<blink::WebGraphicsContext3DProvider>(blink::Platform::current(
619 )->createSharedOffscreenGraphicsContext3DProvider());
620 if (!provider)
621 return;
622 blink::WebGraphicsContext3D* context3D = provider->context3d();
623 if (!context3D)
624 return;
626 // Copy video texture into a RGBA texture based bitmap first as video texture
627 // on Android is GL_TEXTURE_EXTERNAL_OES which is not supported by Skia yet.
628 // The bitmap's size needs to be the same as the video and use naturalSize()
629 // here. Check if we could reuse existing texture based bitmap.
630 // Otherwise, release existing texture based bitmap and allocate
631 // a new one based on video size.
632 if (!IsSkBitmapProperlySizedTexture(&bitmap_, naturalSize())) {
633 if (!AllocateSkBitmapTexture(provider->grContext(), &bitmap_,
634 naturalSize())) {
635 return;
639 unsigned textureId = static_cast<unsigned>(
640 (bitmap_.getTexture())->getTextureHandle());
641 if (!copyVideoTextureToPlatformTexture(context3D, textureId,
642 GL_RGBA, GL_UNSIGNED_BYTE, true, false)) {
643 return;
646 // Draw the texture based bitmap onto the Canvas. If the canvas is
647 // hardware based, this will do a GPU-GPU texture copy.
648 // If the canvas is software based, the texture based bitmap will be
649 // readbacked to system memory then draw onto the canvas.
650 SkRect dest;
651 dest.set(rect.x, rect.y, rect.x + rect.width, rect.y + rect.height);
652 SkPaint paint;
653 paint.setAlpha(alpha);
654 paint.setXfermodeMode(mode);
655 // It is not necessary to pass the dest into the drawBitmap call since all
656 // the context have been set up before calling paintCurrentFrameInContext.
657 canvas->drawBitmapRect(bitmap_, dest, &paint);
660 bool WebMediaPlayerAndroid::copyVideoTextureToPlatformTexture(
661 blink::WebGraphicsContext3D* web_graphics_context,
662 unsigned int texture,
663 unsigned int internal_format,
664 unsigned int type,
665 bool premultiply_alpha,
666 bool flip_y) {
667 DCHECK(main_thread_checker_.CalledOnValidThread());
668 // Don't allow clients to copy an encrypted video frame.
669 if (needs_external_surface_)
670 return false;
672 scoped_refptr<VideoFrame> video_frame;
674 base::AutoLock auto_lock(current_frame_lock_);
675 video_frame = current_frame_;
678 if (!video_frame.get() || !video_frame->HasTextures())
679 return false;
680 DCHECK_EQ(1u, media::VideoFrame::NumPlanes(video_frame->format()));
681 const gpu::MailboxHolder& mailbox_holder = video_frame->mailbox_holder(0);
682 DCHECK((!is_remote_ &&
683 mailbox_holder.texture_target == GL_TEXTURE_EXTERNAL_OES) ||
684 (is_remote_ && mailbox_holder.texture_target == GL_TEXTURE_2D));
686 web_graphics_context->waitSyncPoint(mailbox_holder.sync_point);
688 // Ensure the target of texture is set before copyTextureCHROMIUM, otherwise
689 // an invalid texture target may be used for copy texture.
690 uint32 src_texture =
691 web_graphics_context->createAndConsumeTextureCHROMIUM(
692 mailbox_holder.texture_target, mailbox_holder.mailbox.name);
694 // Application itself needs to take care of setting the right flip_y
695 // value down to get the expected result.
696 // flip_y==true means to reverse the video orientation while
697 // flip_y==false means to keep the intrinsic orientation.
698 web_graphics_context->copyTextureCHROMIUM(
699 GL_TEXTURE_2D, src_texture, texture, internal_format, type,
700 flip_y, premultiply_alpha, false);
702 web_graphics_context->deleteTexture(src_texture);
703 web_graphics_context->flush();
705 SyncPointClientImpl client(web_graphics_context);
706 video_frame->UpdateReleaseSyncPoint(&client);
707 return true;
710 bool WebMediaPlayerAndroid::hasSingleSecurityOrigin() const {
711 DCHECK(main_thread_checker_.CalledOnValidThread());
712 if (player_type_ != MEDIA_PLAYER_TYPE_URL)
713 return true;
715 if (!info_loader_ || !info_loader_->HasSingleOrigin())
716 return false;
718 // TODO(qinmin): The url might be redirected when android media player
719 // requests the stream. As a result, we cannot guarantee there is only
720 // a single origin. Only if the HTTP request was made without credentials,
721 // we will honor the return value from HasSingleSecurityOriginInternal()
722 // in pre-L android versions.
723 // Check http://crbug.com/334204.
724 if (!allow_stored_credentials_)
725 return true;
727 return base::android::BuildInfo::GetInstance()->sdk_int() >=
728 kSDKVersionToSupportSecurityOriginCheck;
731 bool WebMediaPlayerAndroid::didPassCORSAccessCheck() const {
732 DCHECK(main_thread_checker_.CalledOnValidThread());
733 if (info_loader_)
734 return info_loader_->DidPassCORSAccessCheck();
735 return false;
738 double WebMediaPlayerAndroid::mediaTimeForTimeValue(double timeValue) const {
739 return base::TimeDelta::FromSecondsD(timeValue).InSecondsF();
742 unsigned WebMediaPlayerAndroid::decodedFrameCount() const {
743 if (media_source_delegate_)
744 return media_source_delegate_->DecodedFrameCount();
745 NOTIMPLEMENTED();
746 return 0;
749 unsigned WebMediaPlayerAndroid::droppedFrameCount() const {
750 if (media_source_delegate_)
751 return media_source_delegate_->DroppedFrameCount();
752 NOTIMPLEMENTED();
753 return 0;
756 unsigned WebMediaPlayerAndroid::audioDecodedByteCount() const {
757 if (media_source_delegate_)
758 return media_source_delegate_->AudioDecodedByteCount();
759 NOTIMPLEMENTED();
760 return 0;
763 unsigned WebMediaPlayerAndroid::videoDecodedByteCount() const {
764 if (media_source_delegate_)
765 return media_source_delegate_->VideoDecodedByteCount();
766 NOTIMPLEMENTED();
767 return 0;
770 void WebMediaPlayerAndroid::OnMediaMetadataChanged(
771 base::TimeDelta duration, int width, int height, bool success) {
772 DCHECK(main_thread_checker_.CalledOnValidThread());
773 bool need_to_signal_duration_changed = false;
775 if (is_local_resource_)
776 UpdateNetworkState(WebMediaPlayer::NetworkStateLoaded);
778 // For HLS streams, the reported duration may be zero for infinite streams.
779 // See http://crbug.com/501213.
780 if (duration.is_zero() && IsHLSStream())
781 duration = media::kInfiniteDuration();
783 // Update duration, if necessary, prior to ready state updates that may
784 // cause duration() query.
785 if (!ignore_metadata_duration_change_ && duration_ != duration) {
786 duration_ = duration;
787 if (is_local_resource_)
788 buffered_[0].end = duration_.InSecondsF();
789 // Client readyState transition from HAVE_NOTHING to HAVE_METADATA
790 // already triggers a durationchanged event. If this is a different
791 // transition, remember to signal durationchanged.
792 // Do not ever signal durationchanged on metadata change in MSE case
793 // because OnDurationChanged() handles this.
794 if (ready_state_ > WebMediaPlayer::ReadyStateHaveNothing &&
795 player_type_ != MEDIA_PLAYER_TYPE_MEDIA_SOURCE) {
796 need_to_signal_duration_changed = true;
800 if (ready_state_ != WebMediaPlayer::ReadyStateHaveEnoughData) {
801 UpdateReadyState(WebMediaPlayer::ReadyStateHaveMetadata);
802 UpdateReadyState(WebMediaPlayer::ReadyStateHaveEnoughData);
805 // TODO(wolenetz): Should we just abort early and set network state to an
806 // error if success == false? See http://crbug.com/248399
807 if (success)
808 OnVideoSizeChanged(width, height);
810 if (need_to_signal_duration_changed)
811 client_->durationChanged();
814 void WebMediaPlayerAndroid::OnPlaybackComplete() {
815 // When playback is about to finish, android media player often stops
816 // at a time which is smaller than the duration. This makes webkit never
817 // know that the playback has finished. To solve this, we set the
818 // current time to media duration when OnPlaybackComplete() get called.
819 interpolator_.SetBounds(duration_, duration_);
820 client_->timeChanged();
822 // If the loop attribute is set, timeChanged() will update the current time
823 // to 0. It will perform a seek to 0. Issue a command to the player to start
824 // playing after seek completes.
825 if (seeking_ && seek_time_ == base::TimeDelta())
826 player_manager_->Start(player_id_);
829 void WebMediaPlayerAndroid::OnBufferingUpdate(int percentage) {
830 buffered_[0].end = duration() * percentage / 100;
831 did_loading_progress_ = true;
834 void WebMediaPlayerAndroid::OnSeekRequest(const base::TimeDelta& time_to_seek) {
835 DCHECK(main_thread_checker_.CalledOnValidThread());
836 client_->requestSeek(time_to_seek.InSecondsF());
839 void WebMediaPlayerAndroid::OnSeekComplete(
840 const base::TimeDelta& current_time) {
841 DCHECK(main_thread_checker_.CalledOnValidThread());
842 seeking_ = false;
843 if (pending_seek_) {
844 pending_seek_ = false;
845 seek(pending_seek_time_.InSecondsF());
846 return;
848 interpolator_.SetBounds(current_time, current_time);
850 UpdateReadyState(WebMediaPlayer::ReadyStateHaveEnoughData);
852 client_->timeChanged();
855 void WebMediaPlayerAndroid::OnMediaError(int error_type) {
856 switch (error_type) {
857 case MediaPlayerAndroid::MEDIA_ERROR_FORMAT:
858 UpdateNetworkState(WebMediaPlayer::NetworkStateFormatError);
859 break;
860 case MediaPlayerAndroid::MEDIA_ERROR_DECODE:
861 UpdateNetworkState(WebMediaPlayer::NetworkStateDecodeError);
862 break;
863 case MediaPlayerAndroid::MEDIA_ERROR_NOT_VALID_FOR_PROGRESSIVE_PLAYBACK:
864 UpdateNetworkState(WebMediaPlayer::NetworkStateFormatError);
865 break;
866 case MediaPlayerAndroid::MEDIA_ERROR_INVALID_CODE:
867 break;
869 client_->repaint();
872 void WebMediaPlayerAndroid::OnVideoSizeChanged(int width, int height) {
873 DCHECK(main_thread_checker_.CalledOnValidThread());
875 // For HLS streams, a bogus empty size may be reported at first, followed by
876 // the actual size only once playback begins. See http://crbug.com/509972.
877 if (!has_size_info_ && width == 0 && height == 0 && IsHLSStream())
878 return;
880 has_size_info_ = true;
881 if (natural_size_.width == width && natural_size_.height == height)
882 return;
884 #if defined(VIDEO_HOLE)
885 // Use H/W surface for encrypted video.
886 // TODO(qinmin): Change this so that only EME needs the H/W surface
887 if (force_use_overlay_embedded_video_ ||
888 (media_source_delegate_ && media_source_delegate_->IsVideoEncrypted() &&
889 player_manager_->ShouldUseVideoOverlayForEmbeddedEncryptedVideo())) {
890 needs_external_surface_ = true;
891 if (!paused() && !is_fullscreen_)
892 player_manager_->RequestExternalSurface(player_id_, last_computed_rect_);
893 } else if (!stream_texture_proxy_) {
894 // Do deferred stream texture creation finally.
895 SetNeedsEstablishPeer(true);
896 TryCreateStreamTextureProxyIfNeeded();
898 #endif // defined(VIDEO_HOLE)
899 natural_size_.width = width;
900 natural_size_.height = height;
902 // When play() gets called, |natural_size_| may still be empty and
903 // EstablishSurfaceTexturePeer() will not get called. As a result, the video
904 // may play without a surface texture. When we finally get the valid video
905 // size here, we should call EstablishSurfaceTexturePeer() if it has not been
906 // previously called.
907 if (!paused() && needs_establish_peer_)
908 EstablishSurfaceTexturePeer();
910 ReallocateVideoFrame();
912 // For hidden video element (with style "display:none"), ensure the texture
913 // size is set.
914 if (!is_remote_ && cached_stream_texture_size_ != natural_size_) {
915 stream_texture_factory_->SetStreamTextureSize(
916 stream_id_, gfx::Size(natural_size_.width, natural_size_.height));
917 cached_stream_texture_size_ = natural_size_;
920 // Lazily allocate compositing layer.
921 if (!video_weblayer_) {
922 video_weblayer_.reset(new cc_blink::WebLayerImpl(
923 cc::VideoLayer::Create(cc_blink::WebLayerImpl::LayerSettings(), this,
924 media::VIDEO_ROTATION_0)));
925 client_->setWebLayer(video_weblayer_.get());
929 void WebMediaPlayerAndroid::OnTimeUpdate(base::TimeDelta current_timestamp,
930 base::TimeTicks current_time_ticks) {
931 DCHECK(main_thread_checker_.CalledOnValidThread());
933 if (seeking())
934 return;
936 // Compensate the current_timestamp with the IPC latency.
937 base::TimeDelta lower_bound =
938 base::TimeTicks::Now() - current_time_ticks + current_timestamp;
939 base::TimeDelta upper_bound = lower_bound;
940 // We should get another time update in about |kTimeUpdateInterval|
941 // milliseconds.
942 if (is_playing_) {
943 upper_bound += base::TimeDelta::FromMilliseconds(
944 media::kTimeUpdateInterval);
946 // if the lower_bound is smaller than the current time, just use the current
947 // time so that the timer is always progressing.
948 lower_bound =
949 std::max(lower_bound, base::TimeDelta::FromSecondsD(currentTime()));
950 if (lower_bound > upper_bound)
951 upper_bound = lower_bound;
952 interpolator_.SetBounds(lower_bound, upper_bound);
955 void WebMediaPlayerAndroid::OnConnectedToRemoteDevice(
956 const std::string& remote_playback_message) {
957 DCHECK(main_thread_checker_.CalledOnValidThread());
958 DCHECK(!media_source_delegate_);
959 DrawRemotePlaybackText(remote_playback_message);
960 is_remote_ = true;
961 SetNeedsEstablishPeer(false);
962 client_->connectedToRemoteDevice();
965 void WebMediaPlayerAndroid::OnDisconnectedFromRemoteDevice() {
966 DCHECK(main_thread_checker_.CalledOnValidThread());
967 DCHECK(!media_source_delegate_);
968 SetNeedsEstablishPeer(true);
969 if (!paused())
970 EstablishSurfaceTexturePeer();
971 is_remote_ = false;
972 ReallocateVideoFrame();
973 client_->disconnectedFromRemoteDevice();
976 void WebMediaPlayerAndroid::OnDidExitFullscreen() {
977 // |needs_external_surface_| is always false on non-TV devices.
978 if (!needs_external_surface_)
979 SetNeedsEstablishPeer(true);
980 // We had the fullscreen surface connected to Android MediaPlayer,
981 // so reconnect our surface texture for embedded playback.
982 if (!paused() && needs_establish_peer_) {
983 TryCreateStreamTextureProxyIfNeeded();
984 EstablishSurfaceTexturePeer();
987 #if defined(VIDEO_HOLE)
988 if (!paused() && needs_external_surface_)
989 player_manager_->RequestExternalSurface(player_id_, last_computed_rect_);
990 #endif // defined(VIDEO_HOLE)
991 is_fullscreen_ = false;
992 client_->repaint();
995 void WebMediaPlayerAndroid::OnMediaPlayerPlay() {
996 UpdatePlayingState(true);
997 client_->playbackStateChanged();
1000 void WebMediaPlayerAndroid::OnMediaPlayerPause() {
1001 UpdatePlayingState(false);
1002 client_->playbackStateChanged();
1005 void WebMediaPlayerAndroid::OnRemoteRouteAvailabilityChanged(
1006 bool routes_available) {
1007 client_->remoteRouteAvailabilityChanged(routes_available);
1010 void WebMediaPlayerAndroid::OnDurationChanged(const base::TimeDelta& duration) {
1011 DCHECK(main_thread_checker_.CalledOnValidThread());
1012 // Only MSE |player_type_| registers this callback.
1013 DCHECK_EQ(player_type_, MEDIA_PLAYER_TYPE_MEDIA_SOURCE);
1015 // Cache the new duration value and trust it over any subsequent duration
1016 // values received in OnMediaMetadataChanged().
1017 duration_ = duration;
1018 ignore_metadata_duration_change_ = true;
1020 // Notify MediaPlayerClient that duration has changed, if > HAVE_NOTHING.
1021 if (ready_state_ > WebMediaPlayer::ReadyStateHaveNothing)
1022 client_->durationChanged();
1025 void WebMediaPlayerAndroid::UpdateNetworkState(
1026 WebMediaPlayer::NetworkState state) {
1027 DCHECK(main_thread_checker_.CalledOnValidThread());
1028 if (ready_state_ == WebMediaPlayer::ReadyStateHaveNothing &&
1029 (state == WebMediaPlayer::NetworkStateNetworkError ||
1030 state == WebMediaPlayer::NetworkStateDecodeError)) {
1031 // Any error that occurs before reaching ReadyStateHaveMetadata should
1032 // be considered a format error.
1033 network_state_ = WebMediaPlayer::NetworkStateFormatError;
1034 } else {
1035 network_state_ = state;
1037 client_->networkStateChanged();
1040 void WebMediaPlayerAndroid::UpdateReadyState(
1041 WebMediaPlayer::ReadyState state) {
1042 ready_state_ = state;
1043 client_->readyStateChanged();
1046 void WebMediaPlayerAndroid::OnPlayerReleased() {
1047 // |needs_external_surface_| is always false on non-TV devices.
1048 if (!needs_external_surface_)
1049 needs_establish_peer_ = true;
1051 if (is_playing_)
1052 OnMediaPlayerPause();
1054 #if defined(VIDEO_HOLE)
1055 last_computed_rect_ = gfx::RectF();
1056 #endif // defined(VIDEO_HOLE)
1059 void WebMediaPlayerAndroid::ReleaseMediaResources() {
1060 switch (network_state_) {
1061 // Pause the media player and inform WebKit if the player is in a good
1062 // shape.
1063 case WebMediaPlayer::NetworkStateIdle:
1064 case WebMediaPlayer::NetworkStateLoading:
1065 case WebMediaPlayer::NetworkStateLoaded:
1066 Pause(false);
1067 client_->playbackStateChanged();
1068 break;
1069 // If a WebMediaPlayer instance has entered into one of these states,
1070 // the internal network state in HTMLMediaElement could be set to empty.
1071 // And calling playbackStateChanged() could get this object deleted.
1072 case WebMediaPlayer::NetworkStateEmpty:
1073 case WebMediaPlayer::NetworkStateFormatError:
1074 case WebMediaPlayer::NetworkStateNetworkError:
1075 case WebMediaPlayer::NetworkStateDecodeError:
1076 break;
1078 player_manager_->ReleaseResources(player_id_);
1079 if (!needs_external_surface_)
1080 SetNeedsEstablishPeer(true);
1083 void WebMediaPlayerAndroid::OnDestruct() {
1084 NOTREACHED() << "WebMediaPlayer should be destroyed before any "
1085 "RenderFrameObserver::OnDestruct() gets called when "
1086 "the RenderFrame goes away.";
1089 void WebMediaPlayerAndroid::InitializePlayer(
1090 const GURL& url,
1091 const GURL& first_party_for_cookies,
1092 bool allow_stored_credentials,
1093 int demuxer_client_id) {
1094 ReportHLSMetrics();
1096 allow_stored_credentials_ = allow_stored_credentials;
1097 player_manager_->Initialize(
1098 player_type_, player_id_, url, first_party_for_cookies, demuxer_client_id,
1099 frame_->document().url(), allow_stored_credentials);
1100 is_player_initialized_ = true;
1102 if (is_fullscreen_)
1103 player_manager_->EnterFullscreen(player_id_);
1105 if (cdm_context_)
1106 SetCdmInternal(base::Bind(&media::IgnoreCdmAttached));
1109 void WebMediaPlayerAndroid::Pause(bool is_media_related_action) {
1110 player_manager_->Pause(player_id_, is_media_related_action);
1111 UpdatePlayingState(false);
1114 void WebMediaPlayerAndroid::DrawRemotePlaybackText(
1115 const std::string& remote_playback_message) {
1116 DCHECK(main_thread_checker_.CalledOnValidThread());
1117 if (!video_weblayer_)
1118 return;
1120 // TODO(johnme): Should redraw this frame if the layer bounds change; but
1121 // there seems no easy way to listen for the layer resizing (as opposed to
1122 // OnVideoSizeChanged, which is when the frame sizes of the video file
1123 // change). Perhaps have to poll (on main thread of course)?
1124 gfx::Size video_size_css_px = video_weblayer_->bounds();
1125 float device_scale_factor = frame_->view()->deviceScaleFactor();
1126 // canvas_size will be the size in device pixels when pageScaleFactor == 1
1127 gfx::Size canvas_size(
1128 static_cast<int>(video_size_css_px.width() * device_scale_factor),
1129 static_cast<int>(video_size_css_px.height() * device_scale_factor));
1131 SkBitmap bitmap;
1132 bitmap.allocN32Pixels(canvas_size.width(), canvas_size.height());
1134 // Create the canvas and draw the "Casting to <Chromecast>" text on it.
1135 SkCanvas canvas(bitmap);
1136 canvas.drawColor(SK_ColorBLACK);
1138 const SkScalar kTextSize(40);
1139 const SkScalar kMinPadding(40);
1141 SkPaint paint;
1142 paint.setAntiAlias(true);
1143 paint.setFilterQuality(kHigh_SkFilterQuality);
1144 paint.setColor(SK_ColorWHITE);
1145 paint.setTypeface(SkTypeface::CreateFromName("sans", SkTypeface::kBold));
1146 paint.setTextSize(kTextSize);
1148 // Calculate the vertical margin from the top
1149 SkPaint::FontMetrics font_metrics;
1150 paint.getFontMetrics(&font_metrics);
1151 SkScalar sk_vertical_margin = kMinPadding - font_metrics.fAscent;
1153 // Measure the width of the entire text to display
1154 size_t display_text_width = paint.measureText(
1155 remote_playback_message.c_str(), remote_playback_message.size());
1156 std::string display_text(remote_playback_message);
1158 if (display_text_width + (kMinPadding * 2) > canvas_size.width()) {
1159 // The text is too long to fit in one line, truncate it and append ellipsis
1160 // to the end.
1162 // First, figure out how much of the canvas the '...' will take up.
1163 const std::string kTruncationEllipsis("\xE2\x80\xA6");
1164 SkScalar sk_ellipse_width = paint.measureText(
1165 kTruncationEllipsis.c_str(), kTruncationEllipsis.size());
1167 // Then calculate how much of the text can be drawn with the '...' appended
1168 // to the end of the string.
1169 SkScalar sk_max_original_text_width(
1170 canvas_size.width() - (kMinPadding * 2) - sk_ellipse_width);
1171 size_t sk_max_original_text_length = paint.breakText(
1172 remote_playback_message.c_str(),
1173 remote_playback_message.size(),
1174 sk_max_original_text_width);
1176 // Remove the part of the string that doesn't fit and append '...'.
1177 display_text.erase(sk_max_original_text_length,
1178 remote_playback_message.size() - sk_max_original_text_length);
1179 display_text.append(kTruncationEllipsis);
1180 display_text_width = paint.measureText(
1181 display_text.c_str(), display_text.size());
1184 // Center the text horizontally.
1185 SkScalar sk_horizontal_margin =
1186 (canvas_size.width() - display_text_width) / 2.0;
1187 canvas.drawText(display_text.c_str(),
1188 display_text.size(),
1189 sk_horizontal_margin,
1190 sk_vertical_margin,
1191 paint);
1193 GLES2Interface* gl = stream_texture_factory_->ContextGL();
1194 GLuint remote_playback_texture_id = 0;
1195 gl->GenTextures(1, &remote_playback_texture_id);
1196 GLuint texture_target = GL_TEXTURE_2D;
1197 gl->BindTexture(texture_target, remote_playback_texture_id);
1198 gl->TexParameteri(texture_target, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
1199 gl->TexParameteri(texture_target, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
1200 gl->TexParameteri(texture_target, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
1201 gl->TexParameteri(texture_target, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
1204 SkAutoLockPixels lock(bitmap);
1205 gl->TexImage2D(texture_target,
1206 0 /* level */,
1207 GL_RGBA /* internalformat */,
1208 bitmap.width(),
1209 bitmap.height(),
1210 0 /* border */,
1211 GL_RGBA /* format */,
1212 GL_UNSIGNED_BYTE /* type */,
1213 bitmap.getPixels());
1216 gpu::Mailbox texture_mailbox;
1217 gl->GenMailboxCHROMIUM(texture_mailbox.name);
1218 gl->ProduceTextureCHROMIUM(texture_target, texture_mailbox.name);
1219 gl->Flush();
1220 GLuint texture_mailbox_sync_point = gl->InsertSyncPointCHROMIUM();
1222 scoped_refptr<VideoFrame> new_frame = VideoFrame::WrapNativeTexture(
1223 media::PIXEL_FORMAT_ARGB,
1224 gpu::MailboxHolder(texture_mailbox, texture_target,
1225 texture_mailbox_sync_point),
1226 media::BindToCurrentLoop(base::Bind(&OnReleaseTexture,
1227 stream_texture_factory_,
1228 remote_playback_texture_id)),
1229 canvas_size /* coded_size */, gfx::Rect(canvas_size) /* visible_rect */,
1230 canvas_size /* natural_size */, base::TimeDelta() /* timestamp */);
1231 SetCurrentFrameInternal(new_frame);
1234 void WebMediaPlayerAndroid::ReallocateVideoFrame() {
1235 DCHECK(main_thread_checker_.CalledOnValidThread());
1236 if (needs_external_surface_) {
1237 // VideoFrame::CreateHoleFrame is only defined under VIDEO_HOLE.
1238 #if defined(VIDEO_HOLE)
1239 if (!natural_size_.isEmpty()) {
1240 // Now we finally know that "stream texture" and "video frame" won't
1241 // be needed. EME uses "external surface" and "video hole" instead.
1242 RemoveSurfaceTextureAndProxy();
1243 scoped_refptr<VideoFrame> new_frame =
1244 VideoFrame::CreateHoleFrame(natural_size_);
1245 SetCurrentFrameInternal(new_frame);
1246 // Force the client to grab the hole frame.
1247 client_->repaint();
1249 #else
1250 NOTIMPLEMENTED() << "Hole punching not supported without VIDEO_HOLE flag";
1251 #endif // defined(VIDEO_HOLE)
1252 } else if (!is_remote_ && texture_id_) {
1253 GLES2Interface* gl = stream_texture_factory_->ContextGL();
1254 GLuint texture_target = kGLTextureExternalOES;
1255 GLuint texture_id_ref = gl->CreateAndConsumeTextureCHROMIUM(
1256 texture_target, texture_mailbox_.name);
1257 gl->Flush();
1258 GLuint texture_mailbox_sync_point = gl->InsertSyncPointCHROMIUM();
1260 scoped_refptr<VideoFrame> new_frame = VideoFrame::WrapNativeTexture(
1261 media::PIXEL_FORMAT_ARGB,
1262 gpu::MailboxHolder(texture_mailbox_, texture_target,
1263 texture_mailbox_sync_point),
1264 media::BindToCurrentLoop(base::Bind(
1265 &OnReleaseTexture, stream_texture_factory_, texture_id_ref)),
1266 natural_size_, gfx::Rect(natural_size_), natural_size_,
1267 base::TimeDelta());
1268 SetCurrentFrameInternal(new_frame);
1272 void WebMediaPlayerAndroid::SetVideoFrameProviderClient(
1273 cc::VideoFrameProvider::Client* client) {
1274 // This is called from both the main renderer thread and the compositor
1275 // thread (when the main thread is blocked).
1277 // Set the callback target when a frame is produced. Need to do this before
1278 // StopUsingProvider to ensure we really stop using the client.
1279 if (stream_texture_proxy_) {
1280 stream_texture_proxy_->BindToLoop(stream_id_, client,
1281 compositor_task_runner_);
1284 if (video_frame_provider_client_ && video_frame_provider_client_ != client)
1285 video_frame_provider_client_->StopUsingProvider();
1286 video_frame_provider_client_ = client;
1289 void WebMediaPlayerAndroid::SetCurrentFrameInternal(
1290 scoped_refptr<media::VideoFrame>& video_frame) {
1291 DCHECK(main_thread_checker_.CalledOnValidThread());
1292 base::AutoLock auto_lock(current_frame_lock_);
1293 current_frame_ = video_frame;
1296 bool WebMediaPlayerAndroid::UpdateCurrentFrame(base::TimeTicks deadline_min,
1297 base::TimeTicks deadline_max) {
1298 NOTIMPLEMENTED();
1299 return false;
1302 bool WebMediaPlayerAndroid::HasCurrentFrame() {
1303 base::AutoLock auto_lock(current_frame_lock_);
1304 return current_frame_;
1307 scoped_refptr<media::VideoFrame> WebMediaPlayerAndroid::GetCurrentFrame() {
1308 scoped_refptr<VideoFrame> video_frame;
1310 base::AutoLock auto_lock(current_frame_lock_);
1311 video_frame = current_frame_;
1314 return video_frame;
1317 void WebMediaPlayerAndroid::PutCurrentFrame() {
1320 void WebMediaPlayerAndroid::ResetStreamTextureProxy() {
1321 DCHECK(main_thread_checker_.CalledOnValidThread());
1323 RemoveSurfaceTextureAndProxy();
1325 TryCreateStreamTextureProxyIfNeeded();
1326 if (needs_establish_peer_ && is_playing_)
1327 EstablishSurfaceTexturePeer();
1330 void WebMediaPlayerAndroid::RemoveSurfaceTextureAndProxy() {
1331 DCHECK(main_thread_checker_.CalledOnValidThread());
1333 if (stream_id_) {
1334 GLES2Interface* gl = stream_texture_factory_->ContextGL();
1335 gl->DeleteTextures(1, &texture_id_);
1336 // Flush to ensure that the stream texture gets deleted in a timely fashion.
1337 gl->ShallowFlushCHROMIUM();
1338 texture_id_ = 0;
1339 texture_mailbox_ = gpu::Mailbox();
1340 stream_id_ = 0;
1342 stream_texture_proxy_.reset();
1343 needs_establish_peer_ = !needs_external_surface_ && !is_remote_ &&
1344 !is_fullscreen_ &&
1345 (hasVideo() || IsHLSStream());
1348 void WebMediaPlayerAndroid::TryCreateStreamTextureProxyIfNeeded() {
1349 DCHECK(main_thread_checker_.CalledOnValidThread());
1350 // Already created.
1351 if (stream_texture_proxy_)
1352 return;
1354 // No factory to create proxy.
1355 if (!stream_texture_factory_.get())
1356 return;
1358 // Not needed for hole punching.
1359 if (!needs_establish_peer_)
1360 return;
1362 stream_texture_proxy_.reset(stream_texture_factory_->CreateProxy());
1363 if (stream_texture_proxy_) {
1364 DoCreateStreamTexture();
1365 ReallocateVideoFrame();
1366 if (video_frame_provider_client_) {
1367 stream_texture_proxy_->BindToLoop(
1368 stream_id_, video_frame_provider_client_, compositor_task_runner_);
1373 void WebMediaPlayerAndroid::EstablishSurfaceTexturePeer() {
1374 DCHECK(main_thread_checker_.CalledOnValidThread());
1375 if (!stream_texture_proxy_)
1376 return;
1378 if (stream_texture_factory_.get() && stream_id_)
1379 stream_texture_factory_->EstablishPeer(stream_id_, player_id_);
1381 // Set the deferred size because the size was changed in remote mode.
1382 if (!is_remote_ && cached_stream_texture_size_ != natural_size_) {
1383 stream_texture_factory_->SetStreamTextureSize(
1384 stream_id_, gfx::Size(natural_size_.width, natural_size_.height));
1385 cached_stream_texture_size_ = natural_size_;
1388 needs_establish_peer_ = false;
1391 void WebMediaPlayerAndroid::DoCreateStreamTexture() {
1392 DCHECK(main_thread_checker_.CalledOnValidThread());
1393 DCHECK(!stream_id_);
1394 DCHECK(!texture_id_);
1395 stream_id_ = stream_texture_factory_->CreateStreamTexture(
1396 kGLTextureExternalOES, &texture_id_, &texture_mailbox_);
1399 void WebMediaPlayerAndroid::SetNeedsEstablishPeer(bool needs_establish_peer) {
1400 needs_establish_peer_ = needs_establish_peer;
1403 void WebMediaPlayerAndroid::setPoster(const blink::WebURL& poster) {
1404 player_manager_->SetPoster(player_id_, poster);
1407 void WebMediaPlayerAndroid::UpdatePlayingState(bool is_playing) {
1408 if (is_playing == is_playing_)
1409 return;
1411 is_playing_ = is_playing;
1413 if (is_playing)
1414 interpolator_.StartInterpolating();
1415 else
1416 interpolator_.StopInterpolating();
1418 if (delegate_) {
1419 if (is_playing)
1420 delegate_->DidPlay(this);
1421 else
1422 delegate_->DidPause(this);
1426 #if defined(VIDEO_HOLE)
1427 bool WebMediaPlayerAndroid::UpdateBoundaryRectangle() {
1428 if (!video_weblayer_)
1429 return false;
1431 // Compute the geometry of video frame layer.
1432 cc::Layer* layer = video_weblayer_->layer();
1433 gfx::RectF rect(layer->bounds());
1434 while (layer) {
1435 rect.Offset(layer->position().OffsetFromOrigin());
1436 layer = layer->parent();
1439 // Return false when the geometry hasn't been changed from the last time.
1440 if (last_computed_rect_ == rect)
1441 return false;
1443 // Store the changed geometry information when it is actually changed.
1444 last_computed_rect_ = rect;
1445 return true;
1448 const gfx::RectF WebMediaPlayerAndroid::GetBoundaryRectangle() {
1449 return last_computed_rect_;
1451 #endif
1453 // The following EME related code is copied from WebMediaPlayerImpl.
1454 // TODO(xhwang): Remove duplicate code between WebMediaPlayerAndroid and
1455 // WebMediaPlayerImpl.
1457 // Convert a WebString to ASCII, falling back on an empty string in the case
1458 // of a non-ASCII string.
1459 static std::string ToASCIIOrEmpty(const blink::WebString& string) {
1460 return base::IsStringASCII(string)
1461 ? base::UTF16ToASCII(base::StringPiece16(string))
1462 : std::string();
1465 // Helper functions to report media EME related stats to UMA. They follow the
1466 // convention of more commonly used macros UMA_HISTOGRAM_ENUMERATION and
1467 // UMA_HISTOGRAM_COUNTS. The reason that we cannot use those macros directly is
1468 // that UMA_* macros require the names to be constant throughout the process'
1469 // lifetime.
1471 static void EmeUMAHistogramEnumeration(const std::string& key_system,
1472 const std::string& method,
1473 int sample,
1474 int boundary_value) {
1475 base::LinearHistogram::FactoryGet(
1476 kMediaEme + media::GetKeySystemNameForUMA(key_system) + "." + method,
1477 1, boundary_value, boundary_value + 1,
1478 base::Histogram::kUmaTargetedHistogramFlag)->Add(sample);
1481 static void EmeUMAHistogramCounts(const std::string& key_system,
1482 const std::string& method,
1483 int sample) {
1484 // Use the same parameters as UMA_HISTOGRAM_COUNTS.
1485 base::Histogram::FactoryGet(
1486 kMediaEme + media::GetKeySystemNameForUMA(key_system) + "." + method,
1487 1, 1000000, 50, base::Histogram::kUmaTargetedHistogramFlag)->Add(sample);
1490 // Helper enum for reporting generateKeyRequest/addKey histograms.
1491 enum MediaKeyException {
1492 kUnknownResultId,
1493 kSuccess,
1494 kKeySystemNotSupported,
1495 kInvalidPlayerState,
1496 kMaxMediaKeyException
1499 static MediaKeyException MediaKeyExceptionForUMA(
1500 WebMediaPlayer::MediaKeyException e) {
1501 switch (e) {
1502 case WebMediaPlayer::MediaKeyExceptionKeySystemNotSupported:
1503 return kKeySystemNotSupported;
1504 case WebMediaPlayer::MediaKeyExceptionInvalidPlayerState:
1505 return kInvalidPlayerState;
1506 case WebMediaPlayer::MediaKeyExceptionNoError:
1507 return kSuccess;
1508 default:
1509 return kUnknownResultId;
1513 // Helper for converting |key_system| name and exception |e| to a pair of enum
1514 // values from above, for reporting to UMA.
1515 static void ReportMediaKeyExceptionToUMA(const std::string& method,
1516 const std::string& key_system,
1517 WebMediaPlayer::MediaKeyException e) {
1518 MediaKeyException result_id = MediaKeyExceptionForUMA(e);
1519 DCHECK_NE(result_id, kUnknownResultId) << e;
1520 EmeUMAHistogramEnumeration(
1521 key_system, method, result_id, kMaxMediaKeyException);
1524 bool WebMediaPlayerAndroid::IsKeySystemSupported(
1525 const std::string& key_system) {
1526 // On Android, EME only works with MSE.
1527 return player_type_ == MEDIA_PLAYER_TYPE_MEDIA_SOURCE &&
1528 media::PrefixedIsSupportedConcreteKeySystem(key_system);
1531 WebMediaPlayer::MediaKeyException WebMediaPlayerAndroid::generateKeyRequest(
1532 const WebString& key_system,
1533 const unsigned char* init_data,
1534 unsigned init_data_length) {
1535 DVLOG(1) << "generateKeyRequest: " << base::string16(key_system) << ": "
1536 << std::string(reinterpret_cast<const char*>(init_data),
1537 static_cast<size_t>(init_data_length));
1539 std::string ascii_key_system =
1540 media::GetUnprefixedKeySystemName(ToASCIIOrEmpty(key_system));
1542 WebMediaPlayer::MediaKeyException e =
1543 GenerateKeyRequestInternal(ascii_key_system, init_data, init_data_length);
1544 ReportMediaKeyExceptionToUMA("generateKeyRequest", ascii_key_system, e);
1545 return e;
1548 // Guess the type of |init_data|. This is only used to handle some corner cases
1549 // so we keep it as simple as possible without breaking major use cases.
1550 static media::EmeInitDataType GuessInitDataType(const unsigned char* init_data,
1551 unsigned init_data_length) {
1552 // Most WebM files use KeyId of 16 bytes. CENC init data is always >16 bytes.
1553 if (init_data_length == 16)
1554 return media::EmeInitDataType::WEBM;
1556 return media::EmeInitDataType::CENC;
1559 // TODO(xhwang): Report an error when there is encrypted stream but EME is
1560 // not enabled. Currently the player just doesn't start and waits for
1561 // ever.
1562 WebMediaPlayer::MediaKeyException
1563 WebMediaPlayerAndroid::GenerateKeyRequestInternal(
1564 const std::string& key_system,
1565 const unsigned char* init_data,
1566 unsigned init_data_length) {
1567 if (!IsKeySystemSupported(key_system))
1568 return WebMediaPlayer::MediaKeyExceptionKeySystemNotSupported;
1570 if (!proxy_decryptor_) {
1571 DCHECK(current_key_system_.empty());
1572 proxy_decryptor_.reset(new media::ProxyDecryptor(
1573 media_permission_,
1574 player_manager_->ShouldUseVideoOverlayForEmbeddedEncryptedVideo(),
1575 base::Bind(&WebMediaPlayerAndroid::OnKeyAdded,
1576 weak_factory_.GetWeakPtr()),
1577 base::Bind(&WebMediaPlayerAndroid::OnKeyError,
1578 weak_factory_.GetWeakPtr()),
1579 base::Bind(&WebMediaPlayerAndroid::OnKeyMessage,
1580 weak_factory_.GetWeakPtr())));
1582 GURL security_origin(frame_->document().securityOrigin().toString());
1583 proxy_decryptor_->CreateCdm(
1584 cdm_factory_, key_system, security_origin,
1585 base::Bind(&WebMediaPlayerAndroid::OnCdmContextReady,
1586 weak_factory_.GetWeakPtr()));
1587 current_key_system_ = key_system;
1590 // We do not support run-time switching between key systems for now.
1591 DCHECK(!current_key_system_.empty());
1592 if (key_system != current_key_system_)
1593 return WebMediaPlayer::MediaKeyExceptionInvalidPlayerState;
1595 media::EmeInitDataType init_data_type = init_data_type_;
1596 if (init_data_type == media::EmeInitDataType::UNKNOWN)
1597 init_data_type = GuessInitDataType(init_data, init_data_length);
1599 proxy_decryptor_->GenerateKeyRequest(init_data_type, init_data,
1600 init_data_length);
1602 return WebMediaPlayer::MediaKeyExceptionNoError;
1605 WebMediaPlayer::MediaKeyException WebMediaPlayerAndroid::addKey(
1606 const WebString& key_system,
1607 const unsigned char* key,
1608 unsigned key_length,
1609 const unsigned char* init_data,
1610 unsigned init_data_length,
1611 const WebString& session_id) {
1612 DVLOG(1) << "addKey: " << base::string16(key_system) << ": "
1613 << std::string(reinterpret_cast<const char*>(key),
1614 static_cast<size_t>(key_length)) << ", "
1615 << std::string(reinterpret_cast<const char*>(init_data),
1616 static_cast<size_t>(init_data_length)) << " ["
1617 << base::string16(session_id) << "]";
1619 std::string ascii_key_system =
1620 media::GetUnprefixedKeySystemName(ToASCIIOrEmpty(key_system));
1621 std::string ascii_session_id = ToASCIIOrEmpty(session_id);
1623 WebMediaPlayer::MediaKeyException e = AddKeyInternal(ascii_key_system,
1624 key,
1625 key_length,
1626 init_data,
1627 init_data_length,
1628 ascii_session_id);
1629 ReportMediaKeyExceptionToUMA("addKey", ascii_key_system, e);
1630 return e;
1633 WebMediaPlayer::MediaKeyException WebMediaPlayerAndroid::AddKeyInternal(
1634 const std::string& key_system,
1635 const unsigned char* key,
1636 unsigned key_length,
1637 const unsigned char* init_data,
1638 unsigned init_data_length,
1639 const std::string& session_id) {
1640 DCHECK(key);
1641 DCHECK_GT(key_length, 0u);
1643 if (!IsKeySystemSupported(key_system))
1644 return WebMediaPlayer::MediaKeyExceptionKeySystemNotSupported;
1646 if (current_key_system_.empty() || key_system != current_key_system_)
1647 return WebMediaPlayer::MediaKeyExceptionInvalidPlayerState;
1649 proxy_decryptor_->AddKey(
1650 key, key_length, init_data, init_data_length, session_id);
1651 return WebMediaPlayer::MediaKeyExceptionNoError;
1654 WebMediaPlayer::MediaKeyException WebMediaPlayerAndroid::cancelKeyRequest(
1655 const WebString& key_system,
1656 const WebString& session_id) {
1657 DVLOG(1) << "cancelKeyRequest: " << base::string16(key_system) << ": "
1658 << " [" << base::string16(session_id) << "]";
1660 std::string ascii_key_system =
1661 media::GetUnprefixedKeySystemName(ToASCIIOrEmpty(key_system));
1662 std::string ascii_session_id = ToASCIIOrEmpty(session_id);
1664 WebMediaPlayer::MediaKeyException e =
1665 CancelKeyRequestInternal(ascii_key_system, ascii_session_id);
1666 ReportMediaKeyExceptionToUMA("cancelKeyRequest", ascii_key_system, e);
1667 return e;
1670 WebMediaPlayer::MediaKeyException
1671 WebMediaPlayerAndroid::CancelKeyRequestInternal(const std::string& key_system,
1672 const std::string& session_id) {
1673 if (!IsKeySystemSupported(key_system))
1674 return WebMediaPlayer::MediaKeyExceptionKeySystemNotSupported;
1676 if (current_key_system_.empty() || key_system != current_key_system_)
1677 return WebMediaPlayer::MediaKeyExceptionInvalidPlayerState;
1679 proxy_decryptor_->CancelKeyRequest(session_id);
1680 return WebMediaPlayer::MediaKeyExceptionNoError;
1683 void WebMediaPlayerAndroid::setContentDecryptionModule(
1684 blink::WebContentDecryptionModule* cdm,
1685 blink::WebContentDecryptionModuleResult result) {
1686 DCHECK(main_thread_checker_.CalledOnValidThread());
1688 // Once the CDM is set it can't be cleared as there may be frames being
1689 // decrypted on other threads. So fail this request.
1690 // http://crbug.com/462365#c7.
1691 if (!cdm) {
1692 result.completeWithError(
1693 blink::WebContentDecryptionModuleExceptionInvalidStateError, 0,
1694 "The existing MediaKeys object cannot be removed at this time.");
1695 return;
1698 cdm_context_ = media::ToWebContentDecryptionModuleImpl(cdm)->GetCdmContext();
1700 if (is_player_initialized_) {
1701 SetCdmInternal(media::BindToCurrentLoop(
1702 base::Bind(&WebMediaPlayerAndroid::ContentDecryptionModuleAttached,
1703 weak_factory_.GetWeakPtr(), result)));
1704 } else {
1705 // No pipeline/decoder connected, so resolve the promise. When something
1706 // is connected, setting the CDM will happen in SetDecryptorReadyCB().
1707 ContentDecryptionModuleAttached(result, true);
1711 void WebMediaPlayerAndroid::ContentDecryptionModuleAttached(
1712 blink::WebContentDecryptionModuleResult result,
1713 bool success) {
1714 if (success) {
1715 result.complete();
1716 return;
1719 result.completeWithError(
1720 blink::WebContentDecryptionModuleExceptionNotSupportedError,
1722 "Unable to set MediaKeys object");
1725 void WebMediaPlayerAndroid::OnKeyAdded(const std::string& session_id) {
1726 EmeUMAHistogramCounts(current_key_system_, "KeyAdded", 1);
1728 encrypted_client_->keyAdded(
1729 WebString::fromUTF8(media::GetPrefixedKeySystemName(current_key_system_)),
1730 WebString::fromUTF8(session_id));
1733 void WebMediaPlayerAndroid::OnKeyError(const std::string& session_id,
1734 media::MediaKeys::KeyError error_code,
1735 uint32 system_code) {
1736 EmeUMAHistogramEnumeration(current_key_system_, "KeyError",
1737 error_code, media::MediaKeys::kMaxKeyError);
1739 unsigned short short_system_code = 0;
1740 if (system_code > std::numeric_limits<unsigned short>::max()) {
1741 LOG(WARNING) << "system_code exceeds unsigned short limit.";
1742 short_system_code = std::numeric_limits<unsigned short>::max();
1743 } else {
1744 short_system_code = static_cast<unsigned short>(system_code);
1747 encrypted_client_->keyError(
1748 WebString::fromUTF8(media::GetPrefixedKeySystemName(current_key_system_)),
1749 WebString::fromUTF8(session_id),
1750 static_cast<blink::WebMediaPlayerEncryptedMediaClient::MediaKeyErrorCode>(
1751 error_code),
1752 short_system_code);
1755 void WebMediaPlayerAndroid::OnKeyMessage(const std::string& session_id,
1756 const std::vector<uint8>& message,
1757 const GURL& destination_url) {
1758 DCHECK(destination_url.is_empty() || destination_url.is_valid());
1760 encrypted_client_->keyMessage(
1761 WebString::fromUTF8(media::GetPrefixedKeySystemName(current_key_system_)),
1762 WebString::fromUTF8(session_id), message.empty() ? NULL : &message[0],
1763 message.size(), destination_url);
1766 void WebMediaPlayerAndroid::OnMediaSourceOpened(
1767 blink::WebMediaSource* web_media_source) {
1768 client_->mediaSourceOpened(web_media_source);
1771 void WebMediaPlayerAndroid::OnEncryptedMediaInitData(
1772 media::EmeInitDataType init_data_type,
1773 const std::vector<uint8>& init_data) {
1774 DCHECK(main_thread_checker_.CalledOnValidThread());
1776 // Do not fire NeedKey event if encrypted media is not enabled.
1777 if (!blink::WebRuntimeFeatures::isPrefixedEncryptedMediaEnabled() &&
1778 !blink::WebRuntimeFeatures::isEncryptedMediaEnabled()) {
1779 return;
1782 UMA_HISTOGRAM_COUNTS(kMediaEme + std::string("NeedKey"), 1);
1784 DCHECK(init_data_type != media::EmeInitDataType::UNKNOWN);
1785 DLOG_IF(WARNING, init_data_type_ != media::EmeInitDataType::UNKNOWN &&
1786 init_data_type != init_data_type_)
1787 << "Mixed init data type not supported. The new type is ignored.";
1788 if (init_data_type_ == media::EmeInitDataType::UNKNOWN)
1789 init_data_type_ = init_data_type;
1791 encrypted_client_->encrypted(ConvertToWebInitDataType(init_data_type),
1792 vector_as_array(&init_data), init_data.size());
1795 void WebMediaPlayerAndroid::OnWaitingForDecryptionKey() {
1796 encrypted_client_->didBlockPlaybackWaitingForKey();
1798 // TODO(jrummell): didResumePlaybackBlockedForKey() should only be called
1799 // when a key has been successfully added (e.g. OnSessionKeysChange() with
1800 // |has_additional_usable_key| = true). http://crbug.com/461903
1801 encrypted_client_->didResumePlaybackBlockedForKey();
1804 void WebMediaPlayerAndroid::OnCdmContextReady(media::CdmContext* cdm_context) {
1805 DCHECK(!cdm_context_);
1807 if (!cdm_context) {
1808 LOG(ERROR) << "CdmContext not available (e.g. CDM creation failed).";
1809 return;
1812 cdm_context_ = cdm_context;
1814 if (is_player_initialized_)
1815 SetCdmInternal(base::Bind(&media::IgnoreCdmAttached));
1818 void WebMediaPlayerAndroid::SetCdmInternal(
1819 const media::CdmAttachedCB& cdm_attached_cb) {
1820 DCHECK(cdm_context_ && is_player_initialized_);
1821 DCHECK(cdm_context_->GetDecryptor() ||
1822 cdm_context_->GetCdmId() != media::CdmContext::kInvalidCdmId)
1823 << "CDM should support either a Decryptor or a CDM ID.";
1825 media::Decryptor* decryptor = cdm_context_->GetDecryptor();
1827 // Note:
1828 // - If |decryptor| is non-null, only handles |decryptor_ready_cb_| and
1829 // ignores the CDM ID.
1830 // - If |decryptor| is null (in which case the CDM ID should be valid),
1831 // returns any pending |decryptor_ready_cb_| with null, so that
1832 // MediaSourceDelegate will fall back to use a browser side (IPC-based) CDM,
1833 // then calls SetCdm() through the |player_manager_|.
1835 if (decryptor) {
1836 if (!decryptor_ready_cb_.is_null()) {
1837 base::ResetAndReturn(&decryptor_ready_cb_)
1838 .Run(decryptor, cdm_attached_cb);
1839 } else {
1840 cdm_attached_cb.Run(true);
1842 return;
1845 // |decryptor| is null.
1846 if (!decryptor_ready_cb_.is_null()) {
1847 base::ResetAndReturn(&decryptor_ready_cb_)
1848 .Run(nullptr, base::Bind(&media::IgnoreCdmAttached));
1851 DCHECK(cdm_context_->GetCdmId() != media::CdmContext::kInvalidCdmId);
1852 player_manager_->SetCdm(player_id_, cdm_context_->GetCdmId());
1853 cdm_attached_cb.Run(true);
1856 void WebMediaPlayerAndroid::SetDecryptorReadyCB(
1857 const media::DecryptorReadyCB& decryptor_ready_cb) {
1858 DCHECK(main_thread_checker_.CalledOnValidThread());
1859 DCHECK(is_player_initialized_);
1861 // Cancels the previous decryptor request.
1862 if (decryptor_ready_cb.is_null()) {
1863 if (!decryptor_ready_cb_.is_null()) {
1864 base::ResetAndReturn(&decryptor_ready_cb_)
1865 .Run(NULL, base::Bind(&media::IgnoreCdmAttached));
1867 return;
1870 // TODO(xhwang): Support multiple decryptor notification request (e.g. from
1871 // video and audio). The current implementation is okay for the current
1872 // media pipeline since we initialize audio and video decoders in sequence.
1873 // But WebMediaPlayerImpl should not depend on media pipeline's implementation
1874 // detail.
1875 DCHECK(decryptor_ready_cb_.is_null());
1877 if (cdm_context_) {
1878 decryptor_ready_cb.Run(cdm_context_->GetDecryptor(),
1879 base::Bind(&media::IgnoreCdmAttached));
1880 return;
1883 decryptor_ready_cb_ = decryptor_ready_cb;
1886 bool WebMediaPlayerAndroid::supportsOverlayFullscreenVideo() {
1887 return true;
1890 void WebMediaPlayerAndroid::enterFullscreen() {
1891 if (is_player_initialized_)
1892 player_manager_->EnterFullscreen(player_id_);
1893 SetNeedsEstablishPeer(false);
1894 is_fullscreen_ = true;
1897 bool WebMediaPlayerAndroid::IsHLSStream() const {
1898 std::string mime;
1899 GURL url = redirected_url_.is_empty() ? url_ : redirected_url_;
1900 if (!net::GetMimeTypeFromFile(base::FilePath(url.path()), &mime))
1901 return false;
1902 return !mime.compare("application/x-mpegurl");
1905 void WebMediaPlayerAndroid::ReportHLSMetrics() const {
1906 if (player_type_ != MEDIA_PLAYER_TYPE_URL)
1907 return;
1909 bool is_hls = IsHLSStream();
1910 UMA_HISTOGRAM_BOOLEAN("Media.Android.IsHttpLiveStreamingMedia", is_hls);
1911 if (is_hls) {
1912 media::RecordOriginOfHLSPlayback(
1913 GURL(frame_->document().securityOrigin().toString()));
1917 } // namespace content