1 // Copyright 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "content/renderer/media/webmediaplayer_impl.h"
12 #include "base/bind.h"
13 #include "base/callback.h"
14 #include "base/callback_helpers.h"
15 #include "base/command_line.h"
16 #include "base/debug/alias.h"
17 #include "base/debug/crash_logging.h"
18 #include "base/debug/trace_event.h"
19 #include "base/message_loop/message_loop_proxy.h"
20 #include "base/metrics/histogram.h"
21 #include "base/single_thread_task_runner.h"
22 #include "base/synchronization/waitable_event.h"
23 #include "cc/blink/web_layer_impl.h"
24 #include "cc/layers/video_layer.h"
25 #include "content/public/common/content_switches.h"
26 #include "content/public/renderer/render_frame.h"
27 #include "content/renderer/media/buffered_data_source.h"
28 #include "content/renderer/media/crypto/encrypted_media_player_support.h"
29 #include "content/renderer/media/render_media_log.h"
30 #include "content/renderer/media/texttrack_impl.h"
31 #include "content/renderer/media/webaudiosourceprovider_impl.h"
32 #include "content/renderer/media/webinbandtexttrack_impl.h"
33 #include "content/renderer/media/webmediaplayer_delegate.h"
34 #include "content/renderer/media/webmediaplayer_params.h"
35 #include "content/renderer/media/webmediaplayer_util.h"
36 #include "content/renderer/media/webmediasource_impl.h"
37 #include "content/renderer/render_thread_impl.h"
38 #include "gpu/GLES2/gl2extchromium.h"
39 #include "gpu/command_buffer/common/mailbox_holder.h"
40 #include "media/audio/null_audio_sink.h"
41 #include "media/base/audio_hardware_config.h"
42 #include "media/base/bind_to_current_loop.h"
43 #include "media/base/filter_collection.h"
44 #include "media/base/limits.h"
45 #include "media/base/media_log.h"
46 #include "media/base/media_switches.h"
47 #include "media/base/pipeline.h"
48 #include "media/base/text_renderer.h"
49 #include "media/base/video_frame.h"
50 #include "media/filters/audio_renderer_impl.h"
51 #include "media/filters/chunk_demuxer.h"
52 #include "media/filters/ffmpeg_audio_decoder.h"
53 #include "media/filters/ffmpeg_demuxer.h"
54 #include "media/filters/ffmpeg_video_decoder.h"
55 #include "media/filters/gpu_video_accelerator_factories.h"
56 #include "media/filters/gpu_video_decoder.h"
57 #include "media/filters/opus_audio_decoder.h"
58 #include "media/filters/renderer_impl.h"
59 #include "media/filters/video_renderer_impl.h"
60 #include "media/filters/vpx_video_decoder.h"
61 #include "third_party/WebKit/public/platform/WebMediaSource.h"
62 #include "third_party/WebKit/public/platform/WebRect.h"
63 #include "third_party/WebKit/public/platform/WebSize.h"
64 #include "third_party/WebKit/public/platform/WebString.h"
65 #include "third_party/WebKit/public/platform/WebURL.h"
66 #include "third_party/WebKit/public/web/WebLocalFrame.h"
67 #include "third_party/WebKit/public/web/WebSecurityOrigin.h"
68 #include "third_party/WebKit/public/web/WebView.h"
70 using blink::WebCanvas
;
71 using blink::WebMediaPlayer
;
74 using blink::WebString
;
75 using media::PipelineStatus
;
79 // Limits the range of playback rate.
81 // TODO(kylep): Revisit these.
83 // Vista has substantially lower performance than XP or Windows7. If you speed
84 // up a video too much, it can't keep up, and rendering stops updating except on
85 // the time bar. For really high speeds, audio becomes a bottleneck and we just
86 // use up the data we have, which may not achieve the speed requested, but will
89 // A very slow speed, ie 0.00000001x, causes the machine to lock up. (It seems
90 // like a busy loop). It gets unresponsive, although its not completely dead.
92 // Also our timers are not very accurate (especially for ogg), which becomes
93 // evident at low speeds and on Vista. Since other speeds are risky and outside
94 // the norms, we think 1/16x to 16x is a safe and useful range for now.
95 const double kMinRate
= 0.0625;
96 const double kMaxRate
= 16.0;
98 class SyncPointClientImpl
: public media::VideoFrame::SyncPointClient
{
100 explicit SyncPointClientImpl(
101 blink::WebGraphicsContext3D
* web_graphics_context
)
102 : web_graphics_context_(web_graphics_context
) {}
103 virtual ~SyncPointClientImpl() {}
104 virtual uint32
InsertSyncPoint() OVERRIDE
{
105 return web_graphics_context_
->insertSyncPoint();
107 virtual void WaitSyncPoint(uint32 sync_point
) OVERRIDE
{
108 web_graphics_context_
->waitSyncPoint(sync_point
);
112 blink::WebGraphicsContext3D
* web_graphics_context_
;
119 class BufferedDataSourceHostImpl
;
121 #define COMPILE_ASSERT_MATCHING_ENUM(name) \
122 COMPILE_ASSERT(static_cast<int>(WebMediaPlayer::CORSMode ## name) == \
123 static_cast<int>(BufferedResourceLoader::k ## name), \
125 COMPILE_ASSERT_MATCHING_ENUM(Unspecified
);
126 COMPILE_ASSERT_MATCHING_ENUM(Anonymous
);
127 COMPILE_ASSERT_MATCHING_ENUM(UseCredentials
);
128 #undef COMPILE_ASSERT_MATCHING_ENUM
130 #define BIND_TO_RENDER_LOOP(function) \
131 (DCHECK(main_task_runner_->BelongsToCurrentThread()), \
132 media::BindToCurrentLoop(base::Bind(function, AsWeakPtr())))
134 #define BIND_TO_RENDER_LOOP1(function, arg1) \
135 (DCHECK(main_task_runner_->BelongsToCurrentThread()), \
136 media::BindToCurrentLoop(base::Bind(function, AsWeakPtr(), arg1)))
138 static void LogMediaSourceError(const scoped_refptr
<media::MediaLog
>& media_log
,
139 const std::string
& error
) {
140 media_log
->AddEvent(media_log
->CreateMediaSourceErrorEvent(error
));
143 WebMediaPlayerImpl::WebMediaPlayerImpl(
144 blink::WebLocalFrame
* frame
,
145 blink::WebMediaPlayerClient
* client
,
146 base::WeakPtr
<WebMediaPlayerDelegate
> delegate
,
147 const WebMediaPlayerParams
& params
)
149 network_state_(WebMediaPlayer::NetworkStateEmpty
),
150 ready_state_(WebMediaPlayer::ReadyStateHaveNothing
),
152 main_task_runner_(base::MessageLoopProxy::current()),
154 RenderThreadImpl::current()->GetMediaThreadTaskRunner()),
155 media_log_(new RenderMediaLog()),
156 pipeline_(media_task_runner_
, media_log_
.get()),
157 load_type_(LoadTypeURL
),
161 playback_rate_(0.0f
),
162 pending_seek_(false),
163 pending_seek_seconds_(0.0f
),
164 should_notify_time_changed_(false),
167 defer_load_cb_(params
.defer_load_cb()),
168 gpu_factories_(RenderThreadImpl::current()->GetGpuFactories()),
169 supports_save_(true),
170 chunk_demuxer_(NULL
),
171 // Threaded compositing isn't enabled universally yet.
172 compositor_task_runner_(
173 RenderThreadImpl::current()->compositor_message_loop_proxy()
174 ? RenderThreadImpl::current()->compositor_message_loop_proxy()
175 : base::MessageLoopProxy::current()),
176 compositor_(new VideoFrameCompositor(
177 BIND_TO_RENDER_LOOP(&WebMediaPlayerImpl::OnNaturalSizeChanged
),
178 BIND_TO_RENDER_LOOP(&WebMediaPlayerImpl::OnOpacityChanged
))),
179 text_track_index_(0),
180 encrypted_media_support_(EncryptedMediaPlayerSupport::Create(client
)) {
181 DCHECK(encrypted_media_support_
);
183 media_log_
->AddEvent(
184 media_log_
->CreateEvent(media::MediaLogEvent::WEBMEDIAPLAYER_CREATED
));
186 // |gpu_factories_| requires that its entry points be called on its
187 // |GetTaskRunner()|. Since |pipeline_| will own decoders created from the
188 // factories, require that their message loops are identical.
189 DCHECK(!gpu_factories_
.get() ||
190 (gpu_factories_
->GetTaskRunner() == media_task_runner_
.get()));
192 // Use the null sink if no sink was provided.
193 audio_source_provider_
= new WebAudioSourceProviderImpl(
194 params
.audio_renderer_sink().get()
195 ? params
.audio_renderer_sink()
196 : new media::NullAudioSink(media_task_runner_
));
199 WebMediaPlayerImpl::~WebMediaPlayerImpl() {
200 client_
->setWebLayer(NULL
);
202 DCHECK(main_task_runner_
->BelongsToCurrentThread());
203 media_log_
->AddEvent(
204 media_log_
->CreateEvent(media::MediaLogEvent::WEBMEDIAPLAYER_DESTROYED
));
207 delegate_
->PlayerGone(this);
209 // Abort any pending IO so stopping the pipeline doesn't get blocked.
211 data_source_
->Abort();
212 if (chunk_demuxer_
) {
213 chunk_demuxer_
->Shutdown();
214 chunk_demuxer_
= NULL
;
217 gpu_factories_
= NULL
;
219 // Make sure to kill the pipeline so there's no more media threads running.
220 // Note: stopping the pipeline might block for a long time.
221 base::WaitableEvent
waiter(false, false);
223 base::Bind(&base::WaitableEvent::Signal
, base::Unretained(&waiter
)));
226 compositor_task_runner_
->DeleteSoon(FROM_HERE
, compositor_
);
229 void WebMediaPlayerImpl::load(LoadType load_type
, const blink::WebURL
& url
,
230 CORSMode cors_mode
) {
231 DVLOG(1) << __FUNCTION__
<< "(" << load_type
<< ", " << url
<< ", "
233 if (!defer_load_cb_
.is_null()) {
234 defer_load_cb_
.Run(base::Bind(
235 &WebMediaPlayerImpl::DoLoad
, AsWeakPtr(), load_type
, url
, cors_mode
));
238 DoLoad(load_type
, url
, cors_mode
);
241 void WebMediaPlayerImpl::DoLoad(LoadType load_type
,
242 const blink::WebURL
& url
,
243 CORSMode cors_mode
) {
244 DCHECK(main_task_runner_
->BelongsToCurrentThread());
247 ReportMediaSchemeUma(gurl
);
249 // Set subresource URL for crash reporting.
250 base::debug::SetCrashKeyValue("subresource_url", gurl
.spec());
252 load_type_
= load_type
;
254 SetNetworkState(WebMediaPlayer::NetworkStateLoading
);
255 SetReadyState(WebMediaPlayer::ReadyStateHaveNothing
);
256 media_log_
->AddEvent(media_log_
->CreateLoadEvent(url
.spec()));
258 // Media source pipelines can start immediately.
259 if (load_type
== LoadTypeMediaSource
) {
260 supports_save_
= false;
265 // Otherwise it's a regular request which requires resolving the URL first.
266 data_source_
.reset(new BufferedDataSource(
268 static_cast<BufferedResourceLoader::CORSMode
>(cors_mode
),
272 &buffered_data_source_host_
,
273 base::Bind(&WebMediaPlayerImpl::NotifyDownloading
, AsWeakPtr())));
274 data_source_
->Initialize(
275 base::Bind(&WebMediaPlayerImpl::DataSourceInitialized
, AsWeakPtr()));
276 data_source_
->SetPreload(preload_
);
279 void WebMediaPlayerImpl::play() {
280 DVLOG(1) << __FUNCTION__
;
281 DCHECK(main_task_runner_
->BelongsToCurrentThread());
284 pipeline_
.SetPlaybackRate(playback_rate_
);
286 data_source_
->MediaIsPlaying();
288 media_log_
->AddEvent(media_log_
->CreateEvent(media::MediaLogEvent::PLAY
));
291 delegate_
->DidPlay(this);
294 void WebMediaPlayerImpl::pause() {
295 DVLOG(1) << __FUNCTION__
;
296 DCHECK(main_task_runner_
->BelongsToCurrentThread());
299 pipeline_
.SetPlaybackRate(0.0f
);
301 data_source_
->MediaIsPaused();
302 paused_time_
= pipeline_
.GetMediaTime();
304 media_log_
->AddEvent(media_log_
->CreateEvent(media::MediaLogEvent::PAUSE
));
307 delegate_
->DidPause(this);
310 bool WebMediaPlayerImpl::supportsSave() const {
311 DCHECK(main_task_runner_
->BelongsToCurrentThread());
312 return supports_save_
;
315 void WebMediaPlayerImpl::seek(double seconds
) {
316 DVLOG(1) << __FUNCTION__
<< "(" << seconds
<< ")";
317 DCHECK(main_task_runner_
->BelongsToCurrentThread());
319 if (ready_state_
> WebMediaPlayer::ReadyStateHaveMetadata
)
320 SetReadyState(WebMediaPlayer::ReadyStateHaveMetadata
);
322 base::TimeDelta seek_time
= ConvertSecondsToTimestamp(seconds
);
325 pending_seek_
= true;
326 pending_seek_seconds_
= seconds
;
328 chunk_demuxer_
->CancelPendingSeek(seek_time
);
332 media_log_
->AddEvent(media_log_
->CreateSeekEvent(seconds
));
334 // Update our paused time.
336 paused_time_
= seek_time
;
341 chunk_demuxer_
->StartWaitingForSeek(seek_time
);
343 // Kick off the asynchronous seek!
346 BIND_TO_RENDER_LOOP1(&WebMediaPlayerImpl::OnPipelineSeeked
, true));
349 void WebMediaPlayerImpl::setRate(double rate
) {
350 DVLOG(1) << __FUNCTION__
<< "(" << rate
<< ")";
351 DCHECK(main_task_runner_
->BelongsToCurrentThread());
353 // TODO(kylep): Remove when support for negatives is added. Also, modify the
354 // following checks so rewind uses reasonable values also.
358 // Limit rates to reasonable values by clamping.
362 else if (rate
> kMaxRate
)
366 playback_rate_
= rate
;
368 pipeline_
.SetPlaybackRate(rate
);
370 data_source_
->MediaPlaybackRateChanged(rate
);
374 void WebMediaPlayerImpl::setVolume(double volume
) {
375 DVLOG(1) << __FUNCTION__
<< "(" << volume
<< ")";
376 DCHECK(main_task_runner_
->BelongsToCurrentThread());
378 pipeline_
.SetVolume(volume
);
381 #define COMPILE_ASSERT_MATCHING_ENUM(webkit_name, chromium_name) \
382 COMPILE_ASSERT(static_cast<int>(WebMediaPlayer::webkit_name) == \
383 static_cast<int>(content::chromium_name), \
385 COMPILE_ASSERT_MATCHING_ENUM(PreloadNone
, NONE
);
386 COMPILE_ASSERT_MATCHING_ENUM(PreloadMetaData
, METADATA
);
387 COMPILE_ASSERT_MATCHING_ENUM(PreloadAuto
, AUTO
);
388 #undef COMPILE_ASSERT_MATCHING_ENUM
390 void WebMediaPlayerImpl::setPreload(WebMediaPlayer::Preload preload
) {
391 DVLOG(1) << __FUNCTION__
<< "(" << preload
<< ")";
392 DCHECK(main_task_runner_
->BelongsToCurrentThread());
394 preload_
= static_cast<content::Preload
>(preload
);
396 data_source_
->SetPreload(preload_
);
399 bool WebMediaPlayerImpl::hasVideo() const {
400 DCHECK(main_task_runner_
->BelongsToCurrentThread());
402 return pipeline_metadata_
.has_video
;
405 bool WebMediaPlayerImpl::hasAudio() const {
406 DCHECK(main_task_runner_
->BelongsToCurrentThread());
408 return pipeline_metadata_
.has_audio
;
411 blink::WebSize
WebMediaPlayerImpl::naturalSize() const {
412 DCHECK(main_task_runner_
->BelongsToCurrentThread());
414 return blink::WebSize(pipeline_metadata_
.natural_size
);
417 bool WebMediaPlayerImpl::paused() const {
418 DCHECK(main_task_runner_
->BelongsToCurrentThread());
420 return pipeline_
.GetPlaybackRate() == 0.0f
;
423 bool WebMediaPlayerImpl::seeking() const {
424 DCHECK(main_task_runner_
->BelongsToCurrentThread());
426 if (ready_state_
== WebMediaPlayer::ReadyStateHaveNothing
)
432 double WebMediaPlayerImpl::duration() const {
433 DCHECK(main_task_runner_
->BelongsToCurrentThread());
435 if (ready_state_
== WebMediaPlayer::ReadyStateHaveNothing
)
436 return std::numeric_limits
<double>::quiet_NaN();
438 return GetPipelineDuration();
441 double WebMediaPlayerImpl::timelineOffset() const {
442 DCHECK(main_task_runner_
->BelongsToCurrentThread());
444 if (pipeline_metadata_
.timeline_offset
.is_null())
445 return std::numeric_limits
<double>::quiet_NaN();
447 return pipeline_metadata_
.timeline_offset
.ToJsTime();
450 double WebMediaPlayerImpl::currentTime() const {
451 DCHECK(main_task_runner_
->BelongsToCurrentThread());
452 return (paused_
? paused_time_
: pipeline_
.GetMediaTime()).InSecondsF();
455 WebMediaPlayer::NetworkState
WebMediaPlayerImpl::networkState() const {
456 DCHECK(main_task_runner_
->BelongsToCurrentThread());
457 return network_state_
;
460 WebMediaPlayer::ReadyState
WebMediaPlayerImpl::readyState() const {
461 DCHECK(main_task_runner_
->BelongsToCurrentThread());
465 blink::WebTimeRanges
WebMediaPlayerImpl::buffered() const {
466 DCHECK(main_task_runner_
->BelongsToCurrentThread());
468 media::Ranges
<base::TimeDelta
> buffered_time_ranges
=
469 pipeline_
.GetBufferedTimeRanges();
471 const base::TimeDelta duration
= pipeline_
.GetMediaDuration();
472 if (duration
!= media::kInfiniteDuration()) {
473 buffered_data_source_host_
.AddBufferedTimeRanges(
474 &buffered_time_ranges
, duration
);
476 return ConvertToWebTimeRanges(buffered_time_ranges
);
479 double WebMediaPlayerImpl::maxTimeSeekable() const {
480 DCHECK(main_task_runner_
->BelongsToCurrentThread());
482 // If we haven't even gotten to ReadyStateHaveMetadata yet then just
483 // return 0 so that the seekable range is empty.
484 if (ready_state_
< WebMediaPlayer::ReadyStateHaveMetadata
)
487 // We don't support seeking in streaming media.
488 if (data_source_
&& data_source_
->IsStreaming())
493 bool WebMediaPlayerImpl::didLoadingProgress() {
494 DCHECK(main_task_runner_
->BelongsToCurrentThread());
495 bool pipeline_progress
= pipeline_
.DidLoadingProgress();
496 bool data_progress
= buffered_data_source_host_
.DidLoadingProgress();
497 return pipeline_progress
|| data_progress
;
500 void WebMediaPlayerImpl::paint(blink::WebCanvas
* canvas
,
501 const blink::WebRect
& rect
,
502 unsigned char alpha
) {
503 paint(canvas
, rect
, alpha
, SkXfermode::kSrcOver_Mode
);
506 void WebMediaPlayerImpl::paint(blink::WebCanvas
* canvas
,
507 const blink::WebRect
& rect
,
509 SkXfermode::Mode mode
) {
510 DCHECK(main_task_runner_
->BelongsToCurrentThread());
511 TRACE_EVENT0("media", "WebMediaPlayerImpl:paint");
513 // TODO(scherkus): Clarify paint() API contract to better understand when and
514 // why it's being called. For example, today paint() is called when:
515 // - We haven't reached HAVE_CURRENT_DATA and need to paint black
516 // - We're painting to a canvas
517 // See http://crbug.com/341225 http://crbug.com/342621 for details.
518 scoped_refptr
<media::VideoFrame
> video_frame
=
519 GetCurrentFrameFromCompositor();
521 gfx::Rect
gfx_rect(rect
);
523 skcanvas_video_renderer_
.Paint(video_frame
.get(),
528 pipeline_metadata_
.video_rotation
);
531 bool WebMediaPlayerImpl::hasSingleSecurityOrigin() const {
533 return data_source_
->HasSingleOrigin();
537 bool WebMediaPlayerImpl::didPassCORSAccessCheck() const {
539 return data_source_
->DidPassCORSAccessCheck();
543 double WebMediaPlayerImpl::mediaTimeForTimeValue(double timeValue
) const {
544 return ConvertSecondsToTimestamp(timeValue
).InSecondsF();
547 unsigned WebMediaPlayerImpl::decodedFrameCount() const {
548 DCHECK(main_task_runner_
->BelongsToCurrentThread());
550 media::PipelineStatistics stats
= pipeline_
.GetStatistics();
551 return stats
.video_frames_decoded
;
554 unsigned WebMediaPlayerImpl::droppedFrameCount() const {
555 DCHECK(main_task_runner_
->BelongsToCurrentThread());
557 media::PipelineStatistics stats
= pipeline_
.GetStatistics();
558 return stats
.video_frames_dropped
;
561 unsigned WebMediaPlayerImpl::audioDecodedByteCount() const {
562 DCHECK(main_task_runner_
->BelongsToCurrentThread());
564 media::PipelineStatistics stats
= pipeline_
.GetStatistics();
565 return stats
.audio_bytes_decoded
;
568 unsigned WebMediaPlayerImpl::videoDecodedByteCount() const {
569 DCHECK(main_task_runner_
->BelongsToCurrentThread());
571 media::PipelineStatistics stats
= pipeline_
.GetStatistics();
572 return stats
.video_bytes_decoded
;
575 bool WebMediaPlayerImpl::copyVideoTextureToPlatformTexture(
576 blink::WebGraphicsContext3D
* web_graphics_context
,
577 unsigned int texture
,
579 unsigned int internal_format
,
581 bool premultiply_alpha
,
583 TRACE_EVENT0("media", "WebMediaPlayerImpl:copyVideoTextureToPlatformTexture");
585 scoped_refptr
<media::VideoFrame
> video_frame
=
586 GetCurrentFrameFromCompositor();
588 if (!video_frame
.get())
590 if (video_frame
->format() != media::VideoFrame::NATIVE_TEXTURE
)
593 const gpu::MailboxHolder
* mailbox_holder
= video_frame
->mailbox_holder();
594 if (mailbox_holder
->texture_target
!= GL_TEXTURE_2D
)
597 web_graphics_context
->waitSyncPoint(mailbox_holder
->sync_point
);
598 uint32 source_texture
= web_graphics_context
->createAndConsumeTextureCHROMIUM(
599 GL_TEXTURE_2D
, mailbox_holder
->mailbox
.name
);
601 // The video is stored in a unmultiplied format, so premultiply
603 web_graphics_context
->pixelStorei(GL_UNPACK_PREMULTIPLY_ALPHA_CHROMIUM
,
605 // Application itself needs to take care of setting the right flip_y
606 // value down to get the expected result.
607 // flip_y==true means to reverse the video orientation while
608 // flip_y==false means to keep the intrinsic orientation.
609 web_graphics_context
->pixelStorei(GL_UNPACK_FLIP_Y_CHROMIUM
, flip_y
);
610 web_graphics_context
->copyTextureCHROMIUM(GL_TEXTURE_2D
,
616 web_graphics_context
->pixelStorei(GL_UNPACK_FLIP_Y_CHROMIUM
, false);
617 web_graphics_context
->pixelStorei(GL_UNPACK_PREMULTIPLY_ALPHA_CHROMIUM
,
620 web_graphics_context
->deleteTexture(source_texture
);
621 web_graphics_context
->flush();
623 SyncPointClientImpl
client(web_graphics_context
);
624 video_frame
->UpdateReleaseSyncPoint(&client
);
628 WebMediaPlayer::MediaKeyException
629 WebMediaPlayerImpl::generateKeyRequest(const WebString
& key_system
,
630 const unsigned char* init_data
,
631 unsigned init_data_length
) {
632 DCHECK(main_task_runner_
->BelongsToCurrentThread());
634 return encrypted_media_support_
->GenerateKeyRequest(
635 frame_
, key_system
, init_data
, init_data_length
);
638 WebMediaPlayer::MediaKeyException
WebMediaPlayerImpl::addKey(
639 const WebString
& key_system
,
640 const unsigned char* key
,
642 const unsigned char* init_data
,
643 unsigned init_data_length
,
644 const WebString
& session_id
) {
645 DCHECK(main_task_runner_
->BelongsToCurrentThread());
647 return encrypted_media_support_
->AddKey(
648 key_system
, key
, key_length
, init_data
, init_data_length
, session_id
);
651 WebMediaPlayer::MediaKeyException
WebMediaPlayerImpl::cancelKeyRequest(
652 const WebString
& key_system
,
653 const WebString
& session_id
) {
654 DCHECK(main_task_runner_
->BelongsToCurrentThread());
656 return encrypted_media_support_
->CancelKeyRequest(key_system
, session_id
);
659 void WebMediaPlayerImpl::setContentDecryptionModule(
660 blink::WebContentDecryptionModule
* cdm
) {
661 DCHECK(main_task_runner_
->BelongsToCurrentThread());
663 encrypted_media_support_
->SetContentDecryptionModule(cdm
);
666 void WebMediaPlayerImpl::setContentDecryptionModule(
667 blink::WebContentDecryptionModule
* cdm
,
668 blink::WebContentDecryptionModuleResult result
) {
669 DCHECK(main_task_runner_
->BelongsToCurrentThread());
671 encrypted_media_support_
->SetContentDecryptionModule(cdm
, result
);
674 void WebMediaPlayerImpl::setContentDecryptionModuleSync(
675 blink::WebContentDecryptionModule
* cdm
) {
676 DCHECK(main_task_runner_
->BelongsToCurrentThread());
678 encrypted_media_support_
->SetContentDecryptionModuleSync(cdm
);
681 void WebMediaPlayerImpl::OnPipelineSeeked(bool time_changed
,
682 PipelineStatus status
) {
683 DVLOG(1) << __FUNCTION__
<< "(" << time_changed
<< ", " << status
<< ")";
684 DCHECK(main_task_runner_
->BelongsToCurrentThread());
687 pending_seek_
= false;
688 seek(pending_seek_seconds_
);
692 if (status
!= media::PIPELINE_OK
) {
693 OnPipelineError(status
);
697 // Update our paused time.
699 paused_time_
= pipeline_
.GetMediaTime();
701 should_notify_time_changed_
= time_changed
;
704 void WebMediaPlayerImpl::OnPipelineEnded() {
705 DVLOG(1) << __FUNCTION__
;
706 DCHECK(main_task_runner_
->BelongsToCurrentThread());
707 client_
->timeChanged();
710 void WebMediaPlayerImpl::OnPipelineError(PipelineStatus error
) {
711 DCHECK(main_task_runner_
->BelongsToCurrentThread());
712 DCHECK_NE(error
, media::PIPELINE_OK
);
714 if (ready_state_
== WebMediaPlayer::ReadyStateHaveNothing
) {
715 // Any error that occurs before reaching ReadyStateHaveMetadata should
716 // be considered a format error.
717 SetNetworkState(WebMediaPlayer::NetworkStateFormatError
);
721 SetNetworkState(PipelineErrorToNetworkState(error
));
723 if (error
== media::PIPELINE_ERROR_DECRYPT
)
724 encrypted_media_support_
->OnPipelineDecryptError();
727 void WebMediaPlayerImpl::OnPipelineMetadata(
728 media::PipelineMetadata metadata
) {
729 DVLOG(1) << __FUNCTION__
;
731 pipeline_metadata_
= metadata
;
733 UMA_HISTOGRAM_ENUMERATION("Media.VideoRotation",
734 metadata
.video_rotation
,
735 media::VIDEO_ROTATION_MAX
+ 1);
736 SetReadyState(WebMediaPlayer::ReadyStateHaveMetadata
);
739 DCHECK(!video_weblayer_
);
740 scoped_refptr
<cc::VideoLayer
> layer
=
741 cc::VideoLayer::Create(compositor_
, pipeline_metadata_
.video_rotation
);
743 if (pipeline_metadata_
.video_rotation
== media::VIDEO_ROTATION_90
||
744 pipeline_metadata_
.video_rotation
== media::VIDEO_ROTATION_270
) {
745 gfx::Size size
= pipeline_metadata_
.natural_size
;
746 pipeline_metadata_
.natural_size
= gfx::Size(size
.height(), size
.width());
749 video_weblayer_
.reset(new cc_blink::WebLayerImpl(layer
));
750 video_weblayer_
->setOpaque(opaque_
);
751 client_
->setWebLayer(video_weblayer_
.get());
755 void WebMediaPlayerImpl::OnPipelineBufferingStateChanged(
756 media::BufferingState buffering_state
) {
757 DVLOG(1) << __FUNCTION__
<< "(" << buffering_state
<< ")";
759 // Ignore buffering state changes until we've completed all outstanding seeks.
760 if (seeking_
|| pending_seek_
)
763 // TODO(scherkus): Handle other buffering states when Pipeline starts using
764 // them and translate them ready state changes http://crbug.com/144683
765 DCHECK_EQ(buffering_state
, media::BUFFERING_HAVE_ENOUGH
);
766 SetReadyState(WebMediaPlayer::ReadyStateHaveEnoughData
);
768 // Blink expects a timeChanged() in response to a seek().
769 if (should_notify_time_changed_
)
770 client_
->timeChanged();
773 void WebMediaPlayerImpl::OnDemuxerOpened() {
774 DCHECK(main_task_runner_
->BelongsToCurrentThread());
775 client_
->mediaSourceOpened(new WebMediaSourceImpl(
776 chunk_demuxer_
, base::Bind(&LogMediaSourceError
, media_log_
)));
779 void WebMediaPlayerImpl::OnAddTextTrack(
780 const media::TextTrackConfig
& config
,
781 const media::AddTextTrackDoneCB
& done_cb
) {
782 DCHECK(main_task_runner_
->BelongsToCurrentThread());
784 const WebInbandTextTrackImpl::Kind web_kind
=
785 static_cast<WebInbandTextTrackImpl::Kind
>(config
.kind());
786 const blink::WebString web_label
=
787 blink::WebString::fromUTF8(config
.label());
788 const blink::WebString web_language
=
789 blink::WebString::fromUTF8(config
.language());
790 const blink::WebString web_id
=
791 blink::WebString::fromUTF8(config
.id());
793 scoped_ptr
<WebInbandTextTrackImpl
> web_inband_text_track(
794 new WebInbandTextTrackImpl(web_kind
, web_label
, web_language
, web_id
,
795 text_track_index_
++));
797 scoped_ptr
<media::TextTrack
> text_track(new TextTrackImpl(
798 main_task_runner_
, client_
, web_inband_text_track
.Pass()));
800 done_cb
.Run(text_track
.Pass());
803 void WebMediaPlayerImpl::DataSourceInitialized(bool success
) {
804 DCHECK(main_task_runner_
->BelongsToCurrentThread());
807 SetNetworkState(WebMediaPlayer::NetworkStateFormatError
);
814 void WebMediaPlayerImpl::NotifyDownloading(bool is_downloading
) {
815 if (!is_downloading
&& network_state_
== WebMediaPlayer::NetworkStateLoading
)
816 SetNetworkState(WebMediaPlayer::NetworkStateIdle
);
817 else if (is_downloading
&& network_state_
== WebMediaPlayer::NetworkStateIdle
)
818 SetNetworkState(WebMediaPlayer::NetworkStateLoading
);
819 media_log_
->AddEvent(
820 media_log_
->CreateBooleanEvent(
821 media::MediaLogEvent::NETWORK_ACTIVITY_SET
,
822 "is_downloading_data", is_downloading
));
825 // TODO(xhwang): Move this to a factory class so that we can create different
827 scoped_ptr
<media::Renderer
> WebMediaPlayerImpl::CreateRenderer() {
828 media::SetDecryptorReadyCB set_decryptor_ready_cb
=
829 encrypted_media_support_
->CreateSetDecryptorReadyCB();
831 // Create our audio decoders and renderer.
832 ScopedVector
<media::AudioDecoder
> audio_decoders
;
834 media::LogCB log_cb
= base::Bind(&LogMediaSourceError
, media_log_
);
835 audio_decoders
.push_back(new media::FFmpegAudioDecoder(media_task_runner_
,
837 audio_decoders
.push_back(new media::OpusAudioDecoder(media_task_runner_
));
839 scoped_ptr
<media::AudioRenderer
> audio_renderer(new media::AudioRendererImpl(
841 audio_source_provider_
.get(),
842 audio_decoders
.Pass(),
843 set_decryptor_ready_cb
,
844 RenderThreadImpl::current()->GetAudioHardwareConfig()));
846 // Create our video decoders and renderer.
847 ScopedVector
<media::VideoDecoder
> video_decoders
;
849 if (gpu_factories_
.get()) {
850 video_decoders
.push_back(
851 new media::GpuVideoDecoder(gpu_factories_
, media_log_
));
854 #if !defined(MEDIA_DISABLE_LIBVPX)
855 video_decoders
.push_back(new media::VpxVideoDecoder(media_task_runner_
));
856 #endif // !defined(MEDIA_DISABLE_LIBVPX)
858 video_decoders
.push_back(new media::FFmpegVideoDecoder(media_task_runner_
));
860 scoped_ptr
<media::VideoRenderer
> video_renderer(
861 new media::VideoRendererImpl(
863 video_decoders
.Pass(),
864 set_decryptor_ready_cb
,
865 base::Bind(&WebMediaPlayerImpl::FrameReady
, base::Unretained(this)),
869 return scoped_ptr
<media::Renderer
>(new media::RendererImpl(
872 audio_renderer
.Pass(),
873 video_renderer
.Pass()));
876 void WebMediaPlayerImpl::StartPipeline() {
877 DCHECK(main_task_runner_
->BelongsToCurrentThread());
878 const CommandLine
* cmd_line
= CommandLine::ForCurrentProcess();
880 // Keep track if this is a MSE or non-MSE playback.
881 UMA_HISTOGRAM_BOOLEAN("Media.MSE.Playback",
882 (load_type_
== LoadTypeMediaSource
));
884 media::LogCB mse_log_cb
;
885 media::Demuxer::NeedKeyCB need_key_cb
=
886 encrypted_media_support_
->CreateNeedKeyCB();
888 // Figure out which demuxer to use.
889 if (load_type_
!= LoadTypeMediaSource
) {
890 DCHECK(!chunk_demuxer_
);
891 DCHECK(data_source_
);
893 demuxer_
.reset(new media::FFmpegDemuxer(
894 media_task_runner_
, data_source_
.get(),
898 DCHECK(!chunk_demuxer_
);
899 DCHECK(!data_source_
);
901 mse_log_cb
= base::Bind(&LogMediaSourceError
, media_log_
);
903 chunk_demuxer_
= new media::ChunkDemuxer(
904 BIND_TO_RENDER_LOOP(&WebMediaPlayerImpl::OnDemuxerOpened
),
908 demuxer_
.reset(chunk_demuxer_
);
911 scoped_ptr
<media::FilterCollection
> filter_collection(
912 new media::FilterCollection());
913 filter_collection
->SetDemuxer(demuxer_
.get());
914 filter_collection
->SetRenderer(CreateRenderer());
916 if (cmd_line
->HasSwitch(switches::kEnableInbandTextTracks
)) {
917 scoped_ptr
<media::TextRenderer
> text_renderer(
918 new media::TextRenderer(
920 BIND_TO_RENDER_LOOP(&WebMediaPlayerImpl::OnAddTextTrack
)));
922 filter_collection
->SetTextRenderer(text_renderer
.Pass());
925 // ... and we're ready to go!
928 filter_collection
.Pass(),
929 BIND_TO_RENDER_LOOP(&WebMediaPlayerImpl::OnPipelineEnded
),
930 BIND_TO_RENDER_LOOP(&WebMediaPlayerImpl::OnPipelineError
),
931 BIND_TO_RENDER_LOOP1(&WebMediaPlayerImpl::OnPipelineSeeked
, false),
932 BIND_TO_RENDER_LOOP(&WebMediaPlayerImpl::OnPipelineMetadata
),
933 BIND_TO_RENDER_LOOP(&WebMediaPlayerImpl::OnPipelineBufferingStateChanged
),
934 BIND_TO_RENDER_LOOP(&WebMediaPlayerImpl::OnDurationChanged
));
937 void WebMediaPlayerImpl::SetNetworkState(WebMediaPlayer::NetworkState state
) {
938 DVLOG(1) << __FUNCTION__
<< "(" << state
<< ")";
939 DCHECK(main_task_runner_
->BelongsToCurrentThread());
940 network_state_
= state
;
941 // Always notify to ensure client has the latest value.
942 client_
->networkStateChanged();
945 void WebMediaPlayerImpl::SetReadyState(WebMediaPlayer::ReadyState state
) {
946 DVLOG(1) << __FUNCTION__
<< "(" << state
<< ")";
947 DCHECK(main_task_runner_
->BelongsToCurrentThread());
949 if (state
== WebMediaPlayer::ReadyStateHaveEnoughData
&& data_source_
&&
950 data_source_
->assume_fully_buffered() &&
951 network_state_
== WebMediaPlayer::NetworkStateLoading
)
952 SetNetworkState(WebMediaPlayer::NetworkStateLoaded
);
954 ready_state_
= state
;
955 // Always notify to ensure client has the latest value.
956 client_
->readyStateChanged();
959 blink::WebAudioSourceProvider
* WebMediaPlayerImpl::audioSourceProvider() {
960 return audio_source_provider_
.get();
963 double WebMediaPlayerImpl::GetPipelineDuration() const {
964 base::TimeDelta duration
= pipeline_
.GetMediaDuration();
966 // Return positive infinity if the resource is unbounded.
967 // http://www.whatwg.org/specs/web-apps/current-work/multipage/video.html#dom-media-duration
968 if (duration
== media::kInfiniteDuration())
969 return std::numeric_limits
<double>::infinity();
971 return duration
.InSecondsF();
974 void WebMediaPlayerImpl::OnDurationChanged() {
975 if (ready_state_
== WebMediaPlayer::ReadyStateHaveNothing
)
978 client_
->durationChanged();
981 void WebMediaPlayerImpl::OnNaturalSizeChanged(gfx::Size size
) {
982 DCHECK(main_task_runner_
->BelongsToCurrentThread());
983 DCHECK_NE(ready_state_
, WebMediaPlayer::ReadyStateHaveNothing
);
984 TRACE_EVENT0("media", "WebMediaPlayerImpl::OnNaturalSizeChanged");
986 media_log_
->AddEvent(
987 media_log_
->CreateVideoSizeSetEvent(size
.width(), size
.height()));
988 pipeline_metadata_
.natural_size
= size
;
990 client_
->sizeChanged();
993 void WebMediaPlayerImpl::OnOpacityChanged(bool opaque
) {
994 DCHECK(main_task_runner_
->BelongsToCurrentThread());
995 DCHECK_NE(ready_state_
, WebMediaPlayer::ReadyStateHaveNothing
);
999 video_weblayer_
->setOpaque(opaque_
);
1002 void WebMediaPlayerImpl::FrameReady(
1003 const scoped_refptr
<media::VideoFrame
>& frame
) {
1004 compositor_task_runner_
->PostTask(
1006 base::Bind(&VideoFrameCompositor::UpdateCurrentFrame
,
1007 base::Unretained(compositor_
),
1011 static void GetCurrentFrameAndSignal(
1012 VideoFrameCompositor
* compositor
,
1013 scoped_refptr
<media::VideoFrame
>* video_frame_out
,
1014 base::WaitableEvent
* event
) {
1015 TRACE_EVENT0("media", "GetCurrentFrameAndSignal");
1016 *video_frame_out
= compositor
->GetCurrentFrame();
1020 scoped_refptr
<media::VideoFrame
>
1021 WebMediaPlayerImpl::GetCurrentFrameFromCompositor() {
1022 TRACE_EVENT0("media", "WebMediaPlayerImpl::GetCurrentFrameFromCompositor");
1023 if (compositor_task_runner_
->BelongsToCurrentThread())
1024 return compositor_
->GetCurrentFrame();
1026 // Use a posted task and waitable event instead of a lock otherwise
1027 // WebGL/Canvas can see different content than what the compositor is seeing.
1028 scoped_refptr
<media::VideoFrame
> video_frame
;
1029 base::WaitableEvent
event(false, false);
1030 compositor_task_runner_
->PostTask(FROM_HERE
,
1031 base::Bind(&GetCurrentFrameAndSignal
,
1032 base::Unretained(compositor_
),
1039 } // namespace content