Roll ANGLE bc75f36:ef9d63e
[chromium-blink-merge.git] / content / renderer / media / webmediaplayer_impl.cc
blob0b990ad7d1409fbcce9536a56c67c0a89363ad01
1 // Copyright 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "content/renderer/media/webmediaplayer_impl.h"
7 #include <algorithm>
8 #include <limits>
9 #include <string>
10 #include <vector>
12 #include "base/bind.h"
13 #include "base/callback.h"
14 #include "base/callback_helpers.h"
15 #include "base/command_line.h"
16 #include "base/debug/alias.h"
17 #include "base/debug/crash_logging.h"
18 #include "base/debug/trace_event.h"
19 #include "base/message_loop/message_loop_proxy.h"
20 #include "base/metrics/histogram.h"
21 #include "base/synchronization/waitable_event.h"
22 #include "cc/blink/web_layer_impl.h"
23 #include "cc/layers/video_layer.h"
24 #include "content/public/common/content_switches.h"
25 #include "content/public/renderer/render_frame.h"
26 #include "content/renderer/media/buffered_data_source.h"
27 #include "content/renderer/media/crypto/encrypted_media_player_support.h"
28 #include "content/renderer/media/render_media_log.h"
29 #include "content/renderer/media/texttrack_impl.h"
30 #include "content/renderer/media/webaudiosourceprovider_impl.h"
31 #include "content/renderer/media/webinbandtexttrack_impl.h"
32 #include "content/renderer/media/webmediaplayer_delegate.h"
33 #include "content/renderer/media/webmediaplayer_params.h"
34 #include "content/renderer/media/webmediaplayer_util.h"
35 #include "content/renderer/media/webmediasource_impl.h"
36 #include "content/renderer/render_thread_impl.h"
37 #include "gpu/GLES2/gl2extchromium.h"
38 #include "gpu/command_buffer/common/mailbox_holder.h"
39 #include "media/audio/null_audio_sink.h"
40 #include "media/base/audio_hardware_config.h"
41 #include "media/base/bind_to_current_loop.h"
42 #include "media/base/filter_collection.h"
43 #include "media/base/limits.h"
44 #include "media/base/media_log.h"
45 #include "media/base/media_switches.h"
46 #include "media/base/pipeline.h"
47 #include "media/base/text_renderer.h"
48 #include "media/base/video_frame.h"
49 #include "media/filters/audio_renderer_impl.h"
50 #include "media/filters/chunk_demuxer.h"
51 #include "media/filters/ffmpeg_audio_decoder.h"
52 #include "media/filters/ffmpeg_demuxer.h"
53 #include "media/filters/ffmpeg_video_decoder.h"
54 #include "media/filters/gpu_video_accelerator_factories.h"
55 #include "media/filters/gpu_video_decoder.h"
56 #include "media/filters/opus_audio_decoder.h"
57 #include "media/filters/renderer_impl.h"
58 #include "media/filters/video_renderer_impl.h"
59 #include "media/filters/vpx_video_decoder.h"
60 #include "third_party/WebKit/public/platform/WebMediaSource.h"
61 #include "third_party/WebKit/public/platform/WebRect.h"
62 #include "third_party/WebKit/public/platform/WebSize.h"
63 #include "third_party/WebKit/public/platform/WebString.h"
64 #include "third_party/WebKit/public/platform/WebURL.h"
65 #include "third_party/WebKit/public/web/WebLocalFrame.h"
66 #include "third_party/WebKit/public/web/WebSecurityOrigin.h"
67 #include "third_party/WebKit/public/web/WebView.h"
68 #include "v8/include/v8.h"
70 using blink::WebCanvas;
71 using blink::WebMediaPlayer;
72 using blink::WebRect;
73 using blink::WebSize;
74 using blink::WebString;
75 using media::PipelineStatus;
77 namespace {
79 // Amount of extra memory used by each player instance reported to V8.
80 // It is not exact number -- first, it differs on different platforms,
81 // and second, it is very hard to calculate. Instead, use some arbitrary
82 // value that will cause garbage collection from time to time. We don't want
83 // it to happen on every allocation, but don't want 5k players to sit in memory
84 // either. Looks that chosen constant achieves both goals, at least for audio
85 // objects. (Do not worry about video objects yet, JS programs do not create
86 // thousands of them...)
87 const int kPlayerExtraMemory = 1024 * 1024;
89 // Limits the range of playback rate.
91 // TODO(kylep): Revisit these.
93 // Vista has substantially lower performance than XP or Windows7. If you speed
94 // up a video too much, it can't keep up, and rendering stops updating except on
95 // the time bar. For really high speeds, audio becomes a bottleneck and we just
96 // use up the data we have, which may not achieve the speed requested, but will
97 // not crash the tab.
99 // A very slow speed, ie 0.00000001x, causes the machine to lock up. (It seems
100 // like a busy loop). It gets unresponsive, although its not completely dead.
102 // Also our timers are not very accurate (especially for ogg), which becomes
103 // evident at low speeds and on Vista. Since other speeds are risky and outside
104 // the norms, we think 1/16x to 16x is a safe and useful range for now.
105 const double kMinRate = 0.0625;
106 const double kMaxRate = 16.0;
108 class SyncPointClientImpl : public media::VideoFrame::SyncPointClient {
109 public:
110 explicit SyncPointClientImpl(
111 blink::WebGraphicsContext3D* web_graphics_context)
112 : web_graphics_context_(web_graphics_context) {}
113 virtual ~SyncPointClientImpl() {}
114 virtual uint32 InsertSyncPoint() OVERRIDE {
115 return web_graphics_context_->insertSyncPoint();
117 virtual void WaitSyncPoint(uint32 sync_point) OVERRIDE {
118 web_graphics_context_->waitSyncPoint(sync_point);
121 private:
122 blink::WebGraphicsContext3D* web_graphics_context_;
125 } // namespace
127 namespace content {
129 class BufferedDataSourceHostImpl;
131 #define COMPILE_ASSERT_MATCHING_ENUM(name) \
132 COMPILE_ASSERT(static_cast<int>(WebMediaPlayer::CORSMode ## name) == \
133 static_cast<int>(BufferedResourceLoader::k ## name), \
134 mismatching_enums)
135 COMPILE_ASSERT_MATCHING_ENUM(Unspecified);
136 COMPILE_ASSERT_MATCHING_ENUM(Anonymous);
137 COMPILE_ASSERT_MATCHING_ENUM(UseCredentials);
138 #undef COMPILE_ASSERT_MATCHING_ENUM
140 #define BIND_TO_RENDER_LOOP(function) \
141 (DCHECK(main_loop_->BelongsToCurrentThread()), \
142 media::BindToCurrentLoop(base::Bind(function, AsWeakPtr())))
144 #define BIND_TO_RENDER_LOOP1(function, arg1) \
145 (DCHECK(main_loop_->BelongsToCurrentThread()), \
146 media::BindToCurrentLoop(base::Bind(function, AsWeakPtr(), arg1)))
148 static void LogMediaSourceError(const scoped_refptr<media::MediaLog>& media_log,
149 const std::string& error) {
150 media_log->AddEvent(media_log->CreateMediaSourceErrorEvent(error));
153 WebMediaPlayerImpl::WebMediaPlayerImpl(
154 blink::WebLocalFrame* frame,
155 blink::WebMediaPlayerClient* client,
156 base::WeakPtr<WebMediaPlayerDelegate> delegate,
157 const WebMediaPlayerParams& params)
158 : frame_(frame),
159 network_state_(WebMediaPlayer::NetworkStateEmpty),
160 ready_state_(WebMediaPlayer::ReadyStateHaveNothing),
161 preload_(AUTO),
162 main_loop_(base::MessageLoopProxy::current()),
163 media_loop_(
164 RenderThreadImpl::current()->GetMediaThreadMessageLoopProxy()),
165 media_log_(new RenderMediaLog()),
166 pipeline_(media_loop_, media_log_.get()),
167 load_type_(LoadTypeURL),
168 opaque_(false),
169 paused_(true),
170 seeking_(false),
171 playback_rate_(0.0f),
172 pending_seek_(false),
173 pending_seek_seconds_(0.0f),
174 should_notify_time_changed_(false),
175 client_(client),
176 delegate_(delegate),
177 defer_load_cb_(params.defer_load_cb()),
178 incremented_externally_allocated_memory_(false),
179 gpu_factories_(RenderThreadImpl::current()->GetGpuFactories()),
180 supports_save_(true),
181 chunk_demuxer_(NULL),
182 // Threaded compositing isn't enabled universally yet.
183 compositor_task_runner_(
184 RenderThreadImpl::current()->compositor_message_loop_proxy()
185 ? RenderThreadImpl::current()->compositor_message_loop_proxy()
186 : base::MessageLoopProxy::current()),
187 compositor_(new VideoFrameCompositor(
188 BIND_TO_RENDER_LOOP(&WebMediaPlayerImpl::OnNaturalSizeChanged),
189 BIND_TO_RENDER_LOOP(&WebMediaPlayerImpl::OnOpacityChanged))),
190 text_track_index_(0),
191 encrypted_media_support_(EncryptedMediaPlayerSupport::Create(client)) {
192 DCHECK(encrypted_media_support_);
194 media_log_->AddEvent(
195 media_log_->CreateEvent(media::MediaLogEvent::WEBMEDIAPLAYER_CREATED));
197 // |gpu_factories_| requires that its entry points be called on its
198 // |GetTaskRunner()|. Since |pipeline_| will own decoders created from the
199 // factories, require that their message loops are identical.
200 DCHECK(!gpu_factories_ || (gpu_factories_->GetTaskRunner() == media_loop_));
202 // Let V8 know we started new thread if we did not do it yet.
203 // Made separate task to avoid deletion of player currently being created.
204 // Also, delaying GC until after player starts gets rid of starting lag --
205 // collection happens in parallel with playing.
207 // TODO(enal): remove when we get rid of per-audio-stream thread.
208 main_loop_->PostTask(
209 FROM_HERE,
210 base::Bind(&WebMediaPlayerImpl::IncrementExternallyAllocatedMemory,
211 AsWeakPtr()));
213 // Use the null sink if no sink was provided.
214 audio_source_provider_ = new WebAudioSourceProviderImpl(
215 params.audio_renderer_sink().get()
216 ? params.audio_renderer_sink()
217 : new media::NullAudioSink(media_loop_));
220 WebMediaPlayerImpl::~WebMediaPlayerImpl() {
221 client_->setWebLayer(NULL);
223 DCHECK(main_loop_->BelongsToCurrentThread());
224 media_log_->AddEvent(
225 media_log_->CreateEvent(media::MediaLogEvent::WEBMEDIAPLAYER_DESTROYED));
227 if (delegate_.get())
228 delegate_->PlayerGone(this);
230 // Abort any pending IO so stopping the pipeline doesn't get blocked.
231 if (data_source_)
232 data_source_->Abort();
233 if (chunk_demuxer_) {
234 chunk_demuxer_->Shutdown();
235 chunk_demuxer_ = NULL;
238 gpu_factories_ = NULL;
240 // Make sure to kill the pipeline so there's no more media threads running.
241 // Note: stopping the pipeline might block for a long time.
242 base::WaitableEvent waiter(false, false);
243 pipeline_.Stop(
244 base::Bind(&base::WaitableEvent::Signal, base::Unretained(&waiter)));
245 waiter.Wait();
247 compositor_task_runner_->DeleteSoon(FROM_HERE, compositor_);
249 // Let V8 know we are not using extra resources anymore.
250 if (incremented_externally_allocated_memory_) {
251 v8::Isolate::GetCurrent()->AdjustAmountOfExternalAllocatedMemory(
252 -kPlayerExtraMemory);
253 incremented_externally_allocated_memory_ = false;
257 void WebMediaPlayerImpl::load(LoadType load_type, const blink::WebURL& url,
258 CORSMode cors_mode) {
259 DVLOG(1) << __FUNCTION__ << "(" << load_type << ", " << url << ", "
260 << cors_mode << ")";
261 if (!defer_load_cb_.is_null()) {
262 defer_load_cb_.Run(base::Bind(
263 &WebMediaPlayerImpl::DoLoad, AsWeakPtr(), load_type, url, cors_mode));
264 return;
266 DoLoad(load_type, url, cors_mode);
269 void WebMediaPlayerImpl::DoLoad(LoadType load_type,
270 const blink::WebURL& url,
271 CORSMode cors_mode) {
272 DCHECK(main_loop_->BelongsToCurrentThread());
274 GURL gurl(url);
275 ReportMediaSchemeUma(gurl);
277 // Set subresource URL for crash reporting.
278 base::debug::SetCrashKeyValue("subresource_url", gurl.spec());
280 load_type_ = load_type;
282 SetNetworkState(WebMediaPlayer::NetworkStateLoading);
283 SetReadyState(WebMediaPlayer::ReadyStateHaveNothing);
284 media_log_->AddEvent(media_log_->CreateLoadEvent(url.spec()));
286 // Media source pipelines can start immediately.
287 if (load_type == LoadTypeMediaSource) {
288 supports_save_ = false;
289 StartPipeline();
290 return;
293 // Otherwise it's a regular request which requires resolving the URL first.
294 data_source_.reset(new BufferedDataSource(
295 url,
296 static_cast<BufferedResourceLoader::CORSMode>(cors_mode),
297 main_loop_,
298 frame_,
299 media_log_.get(),
300 &buffered_data_source_host_,
301 base::Bind(&WebMediaPlayerImpl::NotifyDownloading, AsWeakPtr())));
302 data_source_->Initialize(
303 base::Bind(&WebMediaPlayerImpl::DataSourceInitialized, AsWeakPtr()));
304 data_source_->SetPreload(preload_);
307 void WebMediaPlayerImpl::play() {
308 DVLOG(1) << __FUNCTION__;
309 DCHECK(main_loop_->BelongsToCurrentThread());
311 paused_ = false;
312 pipeline_.SetPlaybackRate(playback_rate_);
313 if (data_source_)
314 data_source_->MediaIsPlaying();
316 media_log_->AddEvent(media_log_->CreateEvent(media::MediaLogEvent::PLAY));
318 if (delegate_.get())
319 delegate_->DidPlay(this);
322 void WebMediaPlayerImpl::pause() {
323 DVLOG(1) << __FUNCTION__;
324 DCHECK(main_loop_->BelongsToCurrentThread());
326 paused_ = true;
327 pipeline_.SetPlaybackRate(0.0f);
328 if (data_source_)
329 data_source_->MediaIsPaused();
330 paused_time_ = pipeline_.GetMediaTime();
332 media_log_->AddEvent(media_log_->CreateEvent(media::MediaLogEvent::PAUSE));
334 if (delegate_.get())
335 delegate_->DidPause(this);
338 bool WebMediaPlayerImpl::supportsSave() const {
339 DCHECK(main_loop_->BelongsToCurrentThread());
340 return supports_save_;
343 void WebMediaPlayerImpl::seek(double seconds) {
344 DVLOG(1) << __FUNCTION__ << "(" << seconds << ")";
345 DCHECK(main_loop_->BelongsToCurrentThread());
347 if (ready_state_ > WebMediaPlayer::ReadyStateHaveMetadata)
348 SetReadyState(WebMediaPlayer::ReadyStateHaveMetadata);
350 base::TimeDelta seek_time = ConvertSecondsToTimestamp(seconds);
352 if (seeking_) {
353 pending_seek_ = true;
354 pending_seek_seconds_ = seconds;
355 if (chunk_demuxer_)
356 chunk_demuxer_->CancelPendingSeek(seek_time);
357 return;
360 media_log_->AddEvent(media_log_->CreateSeekEvent(seconds));
362 // Update our paused time.
363 if (paused_)
364 paused_time_ = seek_time;
366 seeking_ = true;
368 if (chunk_demuxer_)
369 chunk_demuxer_->StartWaitingForSeek(seek_time);
371 // Kick off the asynchronous seek!
372 pipeline_.Seek(
373 seek_time,
374 BIND_TO_RENDER_LOOP1(&WebMediaPlayerImpl::OnPipelineSeeked, true));
377 void WebMediaPlayerImpl::setRate(double rate) {
378 DVLOG(1) << __FUNCTION__ << "(" << rate << ")";
379 DCHECK(main_loop_->BelongsToCurrentThread());
381 // TODO(kylep): Remove when support for negatives is added. Also, modify the
382 // following checks so rewind uses reasonable values also.
383 if (rate < 0.0)
384 return;
386 // Limit rates to reasonable values by clamping.
387 if (rate != 0.0) {
388 if (rate < kMinRate)
389 rate = kMinRate;
390 else if (rate > kMaxRate)
391 rate = kMaxRate;
394 playback_rate_ = rate;
395 if (!paused_) {
396 pipeline_.SetPlaybackRate(rate);
397 if (data_source_)
398 data_source_->MediaPlaybackRateChanged(rate);
402 void WebMediaPlayerImpl::setVolume(double volume) {
403 DVLOG(1) << __FUNCTION__ << "(" << volume << ")";
404 DCHECK(main_loop_->BelongsToCurrentThread());
406 pipeline_.SetVolume(volume);
409 #define COMPILE_ASSERT_MATCHING_ENUM(webkit_name, chromium_name) \
410 COMPILE_ASSERT(static_cast<int>(WebMediaPlayer::webkit_name) == \
411 static_cast<int>(content::chromium_name), \
412 mismatching_enums)
413 COMPILE_ASSERT_MATCHING_ENUM(PreloadNone, NONE);
414 COMPILE_ASSERT_MATCHING_ENUM(PreloadMetaData, METADATA);
415 COMPILE_ASSERT_MATCHING_ENUM(PreloadAuto, AUTO);
416 #undef COMPILE_ASSERT_MATCHING_ENUM
418 void WebMediaPlayerImpl::setPreload(WebMediaPlayer::Preload preload) {
419 DVLOG(1) << __FUNCTION__ << "(" << preload << ")";
420 DCHECK(main_loop_->BelongsToCurrentThread());
422 preload_ = static_cast<content::Preload>(preload);
423 if (data_source_)
424 data_source_->SetPreload(preload_);
427 bool WebMediaPlayerImpl::hasVideo() const {
428 DCHECK(main_loop_->BelongsToCurrentThread());
430 return pipeline_metadata_.has_video;
433 bool WebMediaPlayerImpl::hasAudio() const {
434 DCHECK(main_loop_->BelongsToCurrentThread());
436 return pipeline_metadata_.has_audio;
439 blink::WebSize WebMediaPlayerImpl::naturalSize() const {
440 DCHECK(main_loop_->BelongsToCurrentThread());
442 return blink::WebSize(pipeline_metadata_.natural_size);
445 bool WebMediaPlayerImpl::paused() const {
446 DCHECK(main_loop_->BelongsToCurrentThread());
448 return pipeline_.GetPlaybackRate() == 0.0f;
451 bool WebMediaPlayerImpl::seeking() const {
452 DCHECK(main_loop_->BelongsToCurrentThread());
454 if (ready_state_ == WebMediaPlayer::ReadyStateHaveNothing)
455 return false;
457 return seeking_;
460 double WebMediaPlayerImpl::duration() const {
461 DCHECK(main_loop_->BelongsToCurrentThread());
463 if (ready_state_ == WebMediaPlayer::ReadyStateHaveNothing)
464 return std::numeric_limits<double>::quiet_NaN();
466 return GetPipelineDuration();
469 double WebMediaPlayerImpl::timelineOffset() const {
470 DCHECK(main_loop_->BelongsToCurrentThread());
472 if (pipeline_metadata_.timeline_offset.is_null())
473 return std::numeric_limits<double>::quiet_NaN();
475 return pipeline_metadata_.timeline_offset.ToJsTime();
478 double WebMediaPlayerImpl::currentTime() const {
479 DCHECK(main_loop_->BelongsToCurrentThread());
480 return (paused_ ? paused_time_ : pipeline_.GetMediaTime()).InSecondsF();
483 WebMediaPlayer::NetworkState WebMediaPlayerImpl::networkState() const {
484 DCHECK(main_loop_->BelongsToCurrentThread());
485 return network_state_;
488 WebMediaPlayer::ReadyState WebMediaPlayerImpl::readyState() const {
489 DCHECK(main_loop_->BelongsToCurrentThread());
490 return ready_state_;
493 blink::WebTimeRanges WebMediaPlayerImpl::buffered() const {
494 DCHECK(main_loop_->BelongsToCurrentThread());
496 media::Ranges<base::TimeDelta> buffered_time_ranges =
497 pipeline_.GetBufferedTimeRanges();
499 const base::TimeDelta duration = pipeline_.GetMediaDuration();
500 if (duration != media::kInfiniteDuration()) {
501 buffered_data_source_host_.AddBufferedTimeRanges(
502 &buffered_time_ranges, duration);
504 return ConvertToWebTimeRanges(buffered_time_ranges);
507 double WebMediaPlayerImpl::maxTimeSeekable() const {
508 DCHECK(main_loop_->BelongsToCurrentThread());
510 // If we haven't even gotten to ReadyStateHaveMetadata yet then just
511 // return 0 so that the seekable range is empty.
512 if (ready_state_ < WebMediaPlayer::ReadyStateHaveMetadata)
513 return 0.0;
515 // We don't support seeking in streaming media.
516 if (data_source_ && data_source_->IsStreaming())
517 return 0.0;
518 return duration();
521 bool WebMediaPlayerImpl::didLoadingProgress() {
522 DCHECK(main_loop_->BelongsToCurrentThread());
523 bool pipeline_progress = pipeline_.DidLoadingProgress();
524 bool data_progress = buffered_data_source_host_.DidLoadingProgress();
525 return pipeline_progress || data_progress;
528 void WebMediaPlayerImpl::paint(blink::WebCanvas* canvas,
529 const blink::WebRect& rect,
530 unsigned char alpha) {
531 paint(canvas, rect, alpha, SkXfermode::kSrcOver_Mode);
534 void WebMediaPlayerImpl::paint(blink::WebCanvas* canvas,
535 const blink::WebRect& rect,
536 unsigned char alpha,
537 SkXfermode::Mode mode) {
538 DCHECK(main_loop_->BelongsToCurrentThread());
539 TRACE_EVENT0("media", "WebMediaPlayerImpl:paint");
541 // TODO(scherkus): Clarify paint() API contract to better understand when and
542 // why it's being called. For example, today paint() is called when:
543 // - We haven't reached HAVE_CURRENT_DATA and need to paint black
544 // - We're painting to a canvas
545 // See http://crbug.com/341225 http://crbug.com/342621 for details.
546 scoped_refptr<media::VideoFrame> video_frame =
547 GetCurrentFrameFromCompositor();
549 gfx::Rect gfx_rect(rect);
551 skcanvas_video_renderer_.Paint(video_frame.get(),
552 canvas,
553 gfx_rect,
554 alpha,
555 mode,
556 pipeline_metadata_.video_rotation);
559 bool WebMediaPlayerImpl::hasSingleSecurityOrigin() const {
560 if (data_source_)
561 return data_source_->HasSingleOrigin();
562 return true;
565 bool WebMediaPlayerImpl::didPassCORSAccessCheck() const {
566 if (data_source_)
567 return data_source_->DidPassCORSAccessCheck();
568 return false;
571 double WebMediaPlayerImpl::mediaTimeForTimeValue(double timeValue) const {
572 return ConvertSecondsToTimestamp(timeValue).InSecondsF();
575 unsigned WebMediaPlayerImpl::decodedFrameCount() const {
576 DCHECK(main_loop_->BelongsToCurrentThread());
578 media::PipelineStatistics stats = pipeline_.GetStatistics();
579 return stats.video_frames_decoded;
582 unsigned WebMediaPlayerImpl::droppedFrameCount() const {
583 DCHECK(main_loop_->BelongsToCurrentThread());
585 media::PipelineStatistics stats = pipeline_.GetStatistics();
586 return stats.video_frames_dropped;
589 unsigned WebMediaPlayerImpl::audioDecodedByteCount() const {
590 DCHECK(main_loop_->BelongsToCurrentThread());
592 media::PipelineStatistics stats = pipeline_.GetStatistics();
593 return stats.audio_bytes_decoded;
596 unsigned WebMediaPlayerImpl::videoDecodedByteCount() const {
597 DCHECK(main_loop_->BelongsToCurrentThread());
599 media::PipelineStatistics stats = pipeline_.GetStatistics();
600 return stats.video_bytes_decoded;
603 bool WebMediaPlayerImpl::copyVideoTextureToPlatformTexture(
604 blink::WebGraphicsContext3D* web_graphics_context,
605 unsigned int texture,
606 unsigned int level,
607 unsigned int internal_format,
608 unsigned int type,
609 bool premultiply_alpha,
610 bool flip_y) {
611 TRACE_EVENT0("media", "WebMediaPlayerImpl:copyVideoTextureToPlatformTexture");
613 scoped_refptr<media::VideoFrame> video_frame =
614 GetCurrentFrameFromCompositor();
616 if (!video_frame)
617 return false;
618 if (video_frame->format() != media::VideoFrame::NATIVE_TEXTURE)
619 return false;
621 const gpu::MailboxHolder* mailbox_holder = video_frame->mailbox_holder();
622 if (mailbox_holder->texture_target != GL_TEXTURE_2D)
623 return false;
625 web_graphics_context->waitSyncPoint(mailbox_holder->sync_point);
626 uint32 source_texture = web_graphics_context->createAndConsumeTextureCHROMIUM(
627 GL_TEXTURE_2D, mailbox_holder->mailbox.name);
629 // The video is stored in a unmultiplied format, so premultiply
630 // if necessary.
631 web_graphics_context->pixelStorei(GL_UNPACK_PREMULTIPLY_ALPHA_CHROMIUM,
632 premultiply_alpha);
633 // Application itself needs to take care of setting the right flip_y
634 // value down to get the expected result.
635 // flip_y==true means to reverse the video orientation while
636 // flip_y==false means to keep the intrinsic orientation.
637 web_graphics_context->pixelStorei(GL_UNPACK_FLIP_Y_CHROMIUM, flip_y);
638 web_graphics_context->copyTextureCHROMIUM(GL_TEXTURE_2D,
639 source_texture,
640 texture,
641 level,
642 internal_format,
643 type);
644 web_graphics_context->pixelStorei(GL_UNPACK_FLIP_Y_CHROMIUM, false);
645 web_graphics_context->pixelStorei(GL_UNPACK_PREMULTIPLY_ALPHA_CHROMIUM,
646 false);
648 web_graphics_context->deleteTexture(source_texture);
649 web_graphics_context->flush();
651 SyncPointClientImpl client(web_graphics_context);
652 video_frame->UpdateReleaseSyncPoint(&client);
653 return true;
656 WebMediaPlayer::MediaKeyException
657 WebMediaPlayerImpl::generateKeyRequest(const WebString& key_system,
658 const unsigned char* init_data,
659 unsigned init_data_length) {
660 DCHECK(main_loop_->BelongsToCurrentThread());
662 return encrypted_media_support_->GenerateKeyRequest(
663 frame_, key_system, init_data, init_data_length);
666 WebMediaPlayer::MediaKeyException WebMediaPlayerImpl::addKey(
667 const WebString& key_system,
668 const unsigned char* key,
669 unsigned key_length,
670 const unsigned char* init_data,
671 unsigned init_data_length,
672 const WebString& session_id) {
673 DCHECK(main_loop_->BelongsToCurrentThread());
675 return encrypted_media_support_->AddKey(
676 key_system, key, key_length, init_data, init_data_length, session_id);
679 WebMediaPlayer::MediaKeyException WebMediaPlayerImpl::cancelKeyRequest(
680 const WebString& key_system,
681 const WebString& session_id) {
682 DCHECK(main_loop_->BelongsToCurrentThread());
684 return encrypted_media_support_->CancelKeyRequest(key_system, session_id);
687 void WebMediaPlayerImpl::setContentDecryptionModule(
688 blink::WebContentDecryptionModule* cdm) {
689 DCHECK(main_loop_->BelongsToCurrentThread());
691 encrypted_media_support_->SetContentDecryptionModule(cdm);
694 void WebMediaPlayerImpl::setContentDecryptionModule(
695 blink::WebContentDecryptionModule* cdm,
696 blink::WebContentDecryptionModuleResult result) {
697 DCHECK(main_loop_->BelongsToCurrentThread());
699 encrypted_media_support_->SetContentDecryptionModule(cdm, result);
702 void WebMediaPlayerImpl::setContentDecryptionModuleSync(
703 blink::WebContentDecryptionModule* cdm) {
704 DCHECK(main_loop_->BelongsToCurrentThread());
706 encrypted_media_support_->SetContentDecryptionModuleSync(cdm);
709 void WebMediaPlayerImpl::OnPipelineSeeked(bool time_changed,
710 PipelineStatus status) {
711 DVLOG(1) << __FUNCTION__ << "(" << time_changed << ", " << status << ")";
712 DCHECK(main_loop_->BelongsToCurrentThread());
713 seeking_ = false;
714 if (pending_seek_) {
715 pending_seek_ = false;
716 seek(pending_seek_seconds_);
717 return;
720 if (status != media::PIPELINE_OK) {
721 OnPipelineError(status);
722 return;
725 // Update our paused time.
726 if (paused_)
727 paused_time_ = pipeline_.GetMediaTime();
729 should_notify_time_changed_ = time_changed;
732 void WebMediaPlayerImpl::OnPipelineEnded() {
733 DVLOG(1) << __FUNCTION__;
734 DCHECK(main_loop_->BelongsToCurrentThread());
735 client_->timeChanged();
738 void WebMediaPlayerImpl::OnPipelineError(PipelineStatus error) {
739 DCHECK(main_loop_->BelongsToCurrentThread());
740 DCHECK_NE(error, media::PIPELINE_OK);
742 if (ready_state_ == WebMediaPlayer::ReadyStateHaveNothing) {
743 // Any error that occurs before reaching ReadyStateHaveMetadata should
744 // be considered a format error.
745 SetNetworkState(WebMediaPlayer::NetworkStateFormatError);
746 return;
749 SetNetworkState(PipelineErrorToNetworkState(error));
751 if (error == media::PIPELINE_ERROR_DECRYPT)
752 encrypted_media_support_->OnPipelineDecryptError();
755 void WebMediaPlayerImpl::OnPipelineMetadata(
756 media::PipelineMetadata metadata) {
757 DVLOG(1) << __FUNCTION__;
759 pipeline_metadata_ = metadata;
761 UMA_HISTOGRAM_ENUMERATION("Media.VideoRotation",
762 metadata.video_rotation,
763 media::VIDEO_ROTATION_MAX + 1);
764 SetReadyState(WebMediaPlayer::ReadyStateHaveMetadata);
766 if (hasVideo()) {
767 DCHECK(!video_weblayer_);
768 scoped_refptr<cc::VideoLayer> layer =
769 cc::VideoLayer::Create(compositor_, pipeline_metadata_.video_rotation);
771 if (pipeline_metadata_.video_rotation == media::VIDEO_ROTATION_90 ||
772 pipeline_metadata_.video_rotation == media::VIDEO_ROTATION_270) {
773 gfx::Size size = pipeline_metadata_.natural_size;
774 pipeline_metadata_.natural_size = gfx::Size(size.height(), size.width());
777 video_weblayer_.reset(new cc_blink::WebLayerImpl(layer));
778 video_weblayer_->setOpaque(opaque_);
779 client_->setWebLayer(video_weblayer_.get());
783 void WebMediaPlayerImpl::OnPipelineBufferingStateChanged(
784 media::BufferingState buffering_state) {
785 DVLOG(1) << __FUNCTION__ << "(" << buffering_state << ")";
787 // Ignore buffering state changes until we've completed all outstanding seeks.
788 if (seeking_ || pending_seek_)
789 return;
791 // TODO(scherkus): Handle other buffering states when Pipeline starts using
792 // them and translate them ready state changes http://crbug.com/144683
793 DCHECK_EQ(buffering_state, media::BUFFERING_HAVE_ENOUGH);
794 SetReadyState(WebMediaPlayer::ReadyStateHaveEnoughData);
796 // Blink expects a timeChanged() in response to a seek().
797 if (should_notify_time_changed_)
798 client_->timeChanged();
801 void WebMediaPlayerImpl::OnDemuxerOpened() {
802 DCHECK(main_loop_->BelongsToCurrentThread());
803 client_->mediaSourceOpened(new WebMediaSourceImpl(
804 chunk_demuxer_, base::Bind(&LogMediaSourceError, media_log_)));
807 void WebMediaPlayerImpl::OnAddTextTrack(
808 const media::TextTrackConfig& config,
809 const media::AddTextTrackDoneCB& done_cb) {
810 DCHECK(main_loop_->BelongsToCurrentThread());
812 const WebInbandTextTrackImpl::Kind web_kind =
813 static_cast<WebInbandTextTrackImpl::Kind>(config.kind());
814 const blink::WebString web_label =
815 blink::WebString::fromUTF8(config.label());
816 const blink::WebString web_language =
817 blink::WebString::fromUTF8(config.language());
818 const blink::WebString web_id =
819 blink::WebString::fromUTF8(config.id());
821 scoped_ptr<WebInbandTextTrackImpl> web_inband_text_track(
822 new WebInbandTextTrackImpl(web_kind, web_label, web_language, web_id,
823 text_track_index_++));
825 scoped_ptr<media::TextTrack> text_track(
826 new TextTrackImpl(main_loop_, client_, web_inband_text_track.Pass()));
828 done_cb.Run(text_track.Pass());
831 void WebMediaPlayerImpl::DataSourceInitialized(bool success) {
832 DCHECK(main_loop_->BelongsToCurrentThread());
834 if (!success) {
835 SetNetworkState(WebMediaPlayer::NetworkStateFormatError);
836 return;
839 StartPipeline();
842 void WebMediaPlayerImpl::NotifyDownloading(bool is_downloading) {
843 if (!is_downloading && network_state_ == WebMediaPlayer::NetworkStateLoading)
844 SetNetworkState(WebMediaPlayer::NetworkStateIdle);
845 else if (is_downloading && network_state_ == WebMediaPlayer::NetworkStateIdle)
846 SetNetworkState(WebMediaPlayer::NetworkStateLoading);
847 media_log_->AddEvent(
848 media_log_->CreateBooleanEvent(
849 media::MediaLogEvent::NETWORK_ACTIVITY_SET,
850 "is_downloading_data", is_downloading));
853 // TODO(xhwang): Move this to a factory class so that we can create different
854 // renderers.
855 scoped_ptr<media::Renderer> WebMediaPlayerImpl::CreateRenderer() {
856 media::SetDecryptorReadyCB set_decryptor_ready_cb =
857 encrypted_media_support_->CreateSetDecryptorReadyCB();
859 // Create our audio decoders and renderer.
860 ScopedVector<media::AudioDecoder> audio_decoders;
862 media::LogCB log_cb = base::Bind(&LogMediaSourceError, media_log_);
863 audio_decoders.push_back(new media::FFmpegAudioDecoder(media_loop_, log_cb));
864 audio_decoders.push_back(new media::OpusAudioDecoder(media_loop_));
866 scoped_ptr<media::AudioRenderer> audio_renderer(new media::AudioRendererImpl(
867 media_loop_,
868 audio_source_provider_.get(),
869 audio_decoders.Pass(),
870 set_decryptor_ready_cb,
871 RenderThreadImpl::current()->GetAudioHardwareConfig()));
873 // Create our video decoders and renderer.
874 ScopedVector<media::VideoDecoder> video_decoders;
876 if (gpu_factories_.get()) {
877 video_decoders.push_back(
878 new media::GpuVideoDecoder(gpu_factories_, media_log_));
881 #if !defined(MEDIA_DISABLE_LIBVPX)
882 video_decoders.push_back(new media::VpxVideoDecoder(media_loop_));
883 #endif // !defined(MEDIA_DISABLE_LIBVPX)
885 video_decoders.push_back(new media::FFmpegVideoDecoder(media_loop_));
887 scoped_ptr<media::VideoRenderer> video_renderer(
888 new media::VideoRendererImpl(
889 media_loop_,
890 video_decoders.Pass(),
891 set_decryptor_ready_cb,
892 base::Bind(&WebMediaPlayerImpl::FrameReady, base::Unretained(this)),
893 true));
895 // Create renderer.
896 return scoped_ptr<media::Renderer>(new media::RendererImpl(
897 media_loop_,
898 demuxer_.get(),
899 audio_renderer.Pass(),
900 video_renderer.Pass()));
903 void WebMediaPlayerImpl::StartPipeline() {
904 DCHECK(main_loop_->BelongsToCurrentThread());
905 const CommandLine* cmd_line = CommandLine::ForCurrentProcess();
907 // Keep track if this is a MSE or non-MSE playback.
908 UMA_HISTOGRAM_BOOLEAN("Media.MSE.Playback",
909 (load_type_ == LoadTypeMediaSource));
911 media::LogCB mse_log_cb;
912 media::Demuxer::NeedKeyCB need_key_cb =
913 encrypted_media_support_->CreateNeedKeyCB();
915 // Figure out which demuxer to use.
916 if (load_type_ != LoadTypeMediaSource) {
917 DCHECK(!chunk_demuxer_);
918 DCHECK(data_source_);
920 demuxer_.reset(new media::FFmpegDemuxer(
921 media_loop_, data_source_.get(),
922 need_key_cb,
923 media_log_));
924 } else {
925 DCHECK(!chunk_demuxer_);
926 DCHECK(!data_source_);
928 mse_log_cb = base::Bind(&LogMediaSourceError, media_log_);
930 chunk_demuxer_ = new media::ChunkDemuxer(
931 BIND_TO_RENDER_LOOP(&WebMediaPlayerImpl::OnDemuxerOpened),
932 need_key_cb,
933 mse_log_cb,
934 true);
935 demuxer_.reset(chunk_demuxer_);
938 scoped_ptr<media::FilterCollection> filter_collection(
939 new media::FilterCollection());
940 filter_collection->SetDemuxer(demuxer_.get());
941 filter_collection->SetRenderer(CreateRenderer());
943 if (cmd_line->HasSwitch(switches::kEnableInbandTextTracks)) {
944 scoped_ptr<media::TextRenderer> text_renderer(
945 new media::TextRenderer(
946 media_loop_,
947 BIND_TO_RENDER_LOOP(&WebMediaPlayerImpl::OnAddTextTrack)));
949 filter_collection->SetTextRenderer(text_renderer.Pass());
952 // ... and we're ready to go!
953 seeking_ = true;
954 pipeline_.Start(
955 filter_collection.Pass(),
956 BIND_TO_RENDER_LOOP(&WebMediaPlayerImpl::OnPipelineEnded),
957 BIND_TO_RENDER_LOOP(&WebMediaPlayerImpl::OnPipelineError),
958 BIND_TO_RENDER_LOOP1(&WebMediaPlayerImpl::OnPipelineSeeked, false),
959 BIND_TO_RENDER_LOOP(&WebMediaPlayerImpl::OnPipelineMetadata),
960 BIND_TO_RENDER_LOOP(&WebMediaPlayerImpl::OnPipelineBufferingStateChanged),
961 BIND_TO_RENDER_LOOP(&WebMediaPlayerImpl::OnDurationChanged));
964 void WebMediaPlayerImpl::SetNetworkState(WebMediaPlayer::NetworkState state) {
965 DVLOG(1) << __FUNCTION__ << "(" << state << ")";
966 DCHECK(main_loop_->BelongsToCurrentThread());
967 network_state_ = state;
968 // Always notify to ensure client has the latest value.
969 client_->networkStateChanged();
972 void WebMediaPlayerImpl::SetReadyState(WebMediaPlayer::ReadyState state) {
973 DVLOG(1) << __FUNCTION__ << "(" << state << ")";
974 DCHECK(main_loop_->BelongsToCurrentThread());
976 if (state == WebMediaPlayer::ReadyStateHaveEnoughData && data_source_ &&
977 data_source_->assume_fully_buffered() &&
978 network_state_ == WebMediaPlayer::NetworkStateLoading)
979 SetNetworkState(WebMediaPlayer::NetworkStateLoaded);
981 ready_state_ = state;
982 // Always notify to ensure client has the latest value.
983 client_->readyStateChanged();
986 blink::WebAudioSourceProvider* WebMediaPlayerImpl::audioSourceProvider() {
987 return audio_source_provider_.get();
990 void WebMediaPlayerImpl::IncrementExternallyAllocatedMemory() {
991 DCHECK(main_loop_->BelongsToCurrentThread());
992 incremented_externally_allocated_memory_ = true;
993 v8::Isolate::GetCurrent()->AdjustAmountOfExternalAllocatedMemory(
994 kPlayerExtraMemory);
997 double WebMediaPlayerImpl::GetPipelineDuration() const {
998 base::TimeDelta duration = pipeline_.GetMediaDuration();
1000 // Return positive infinity if the resource is unbounded.
1001 // http://www.whatwg.org/specs/web-apps/current-work/multipage/video.html#dom-media-duration
1002 if (duration == media::kInfiniteDuration())
1003 return std::numeric_limits<double>::infinity();
1005 return duration.InSecondsF();
1008 void WebMediaPlayerImpl::OnDurationChanged() {
1009 if (ready_state_ == WebMediaPlayer::ReadyStateHaveNothing)
1010 return;
1012 client_->durationChanged();
1015 void WebMediaPlayerImpl::OnNaturalSizeChanged(gfx::Size size) {
1016 DCHECK(main_loop_->BelongsToCurrentThread());
1017 DCHECK_NE(ready_state_, WebMediaPlayer::ReadyStateHaveNothing);
1018 TRACE_EVENT0("media", "WebMediaPlayerImpl::OnNaturalSizeChanged");
1020 media_log_->AddEvent(
1021 media_log_->CreateVideoSizeSetEvent(size.width(), size.height()));
1022 pipeline_metadata_.natural_size = size;
1024 client_->sizeChanged();
1027 void WebMediaPlayerImpl::OnOpacityChanged(bool opaque) {
1028 DCHECK(main_loop_->BelongsToCurrentThread());
1029 DCHECK_NE(ready_state_, WebMediaPlayer::ReadyStateHaveNothing);
1031 opaque_ = opaque;
1032 if (video_weblayer_)
1033 video_weblayer_->setOpaque(opaque_);
1036 void WebMediaPlayerImpl::FrameReady(
1037 const scoped_refptr<media::VideoFrame>& frame) {
1038 compositor_task_runner_->PostTask(
1039 FROM_HERE,
1040 base::Bind(&VideoFrameCompositor::UpdateCurrentFrame,
1041 base::Unretained(compositor_),
1042 frame));
1045 static void GetCurrentFrameAndSignal(
1046 VideoFrameCompositor* compositor,
1047 scoped_refptr<media::VideoFrame>* video_frame_out,
1048 base::WaitableEvent* event) {
1049 TRACE_EVENT0("media", "GetCurrentFrameAndSignal");
1050 *video_frame_out = compositor->GetCurrentFrame();
1051 event->Signal();
1054 scoped_refptr<media::VideoFrame>
1055 WebMediaPlayerImpl::GetCurrentFrameFromCompositor() {
1056 TRACE_EVENT0("media", "WebMediaPlayerImpl::GetCurrentFrameFromCompositor");
1057 if (compositor_task_runner_->BelongsToCurrentThread())
1058 return compositor_->GetCurrentFrame();
1060 // Use a posted task and waitable event instead of a lock otherwise
1061 // WebGL/Canvas can see different content than what the compositor is seeing.
1062 scoped_refptr<media::VideoFrame> video_frame;
1063 base::WaitableEvent event(false, false);
1064 compositor_task_runner_->PostTask(FROM_HERE,
1065 base::Bind(&GetCurrentFrameAndSignal,
1066 base::Unretained(compositor_),
1067 &video_frame,
1068 &event));
1069 event.Wait();
1070 return video_frame;
1073 } // namespace content