Merge Chromium + Blink git repositories
[chromium-blink-merge.git] / content / renderer / media / media_stream_renderer_factory_impl.cc
blob7e799f87f3e6c500a11469b18057d54d6f49fa12
1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "content/renderer/media/media_stream_renderer_factory_impl.h"
7 #include "base/strings/utf_string_conversions.h"
8 #include "content/renderer/media/media_stream.h"
9 #include "content/renderer/media/media_stream_video_renderer_sink.h"
10 #include "content/renderer/media/media_stream_video_track.h"
11 #include "content/renderer/media/webrtc/peer_connection_dependency_factory.h"
12 #include "content/renderer/media/webrtc_audio_renderer.h"
13 #include "content/renderer/media/webrtc_local_audio_renderer.h"
14 #include "content/renderer/render_thread_impl.h"
15 #include "media/base/audio_hardware_config.h"
16 #include "third_party/WebKit/public/platform/WebMediaStream.h"
17 #include "third_party/WebKit/public/platform/WebURL.h"
18 #include "third_party/WebKit/public/web/WebMediaStreamRegistry.h"
19 #include "third_party/libjingle/source/talk/app/webrtc/mediastreaminterface.h"
21 namespace content {
23 namespace {
25 PeerConnectionDependencyFactory* GetPeerConnectionDependencyFactory() {
26 return RenderThreadImpl::current()->GetPeerConnectionDependencyFactory();
29 void GetDefaultOutputDeviceParams(
30 int* output_sample_rate, int* output_buffer_size) {
31 // Fetch the default audio output hardware config.
32 media::AudioHardwareConfig* hardware_config =
33 RenderThreadImpl::current()->GetAudioHardwareConfig();
34 *output_sample_rate = hardware_config->GetOutputSampleRate();
35 *output_buffer_size = hardware_config->GetOutputBufferSize();
39 // Returns a valid session id if a single capture device is currently open
40 // (and then the matching session_id), otherwise -1.
41 // This is used to pass on a session id to a webrtc audio renderer (either
42 // local or remote), so that audio will be rendered to a matching output
43 // device, should one exist.
44 // Note that if there are more than one open capture devices the function
45 // will not be able to pick an appropriate device and return false.
46 bool GetAuthorizedDeviceInfoForAudioRenderer(
47 int* session_id,
48 int* output_sample_rate,
49 int* output_frames_per_buffer) {
50 WebRtcAudioDeviceImpl* audio_device =
51 GetPeerConnectionDependencyFactory()->GetWebRtcAudioDevice();
52 if (!audio_device)
53 return false;
55 return audio_device->GetAuthorizedDeviceInfoForAudioRenderer(
56 session_id, output_sample_rate, output_frames_per_buffer);
59 scoped_refptr<WebRtcAudioRenderer> CreateRemoteAudioRenderer(
60 webrtc::MediaStreamInterface* stream,
61 int render_frame_id) {
62 if (stream->GetAudioTracks().empty())
63 return NULL;
65 DVLOG(1) << "MediaStreamRendererFactoryImpl::CreateRemoteAudioRenderer label:"
66 << stream->label();
68 // TODO(tommi): Change the default value of session_id to be
69 // StreamDeviceInfo::kNoId. Also update AudioOutputDevice etc.
70 int session_id = 0, sample_rate = 0, buffer_size = 0;
71 if (!GetAuthorizedDeviceInfoForAudioRenderer(&session_id,
72 &sample_rate,
73 &buffer_size)) {
74 GetDefaultOutputDeviceParams(&sample_rate, &buffer_size);
77 return new WebRtcAudioRenderer(
78 GetPeerConnectionDependencyFactory()->GetWebRtcSignalingThread(), stream,
79 render_frame_id, session_id, sample_rate, buffer_size);
83 scoped_refptr<WebRtcLocalAudioRenderer> CreateLocalAudioRenderer(
84 const blink::WebMediaStreamTrack& audio_track,
85 int render_frame_id) {
86 DVLOG(1) << "MediaStreamRendererFactoryImpl::CreateLocalAudioRenderer";
88 int session_id = 0, sample_rate = 0, buffer_size = 0;
89 if (!GetAuthorizedDeviceInfoForAudioRenderer(&session_id,
90 &sample_rate,
91 &buffer_size)) {
92 GetDefaultOutputDeviceParams(&sample_rate, &buffer_size);
95 // Create a new WebRtcLocalAudioRenderer instance and connect it to the
96 // existing WebRtcAudioCapturer so that the renderer can use it as source.
97 return new WebRtcLocalAudioRenderer(
98 audio_track,
99 render_frame_id,
100 session_id,
101 buffer_size);
104 } // namespace
107 MediaStreamRendererFactoryImpl::MediaStreamRendererFactoryImpl() {
110 MediaStreamRendererFactoryImpl::~MediaStreamRendererFactoryImpl() {
113 scoped_refptr<VideoFrameProvider>
114 MediaStreamRendererFactoryImpl::GetVideoFrameProvider(
115 const GURL& url,
116 const base::Closure& error_cb,
117 const VideoFrameProvider::RepaintCB& repaint_cb) {
118 blink::WebMediaStream web_stream =
119 blink::WebMediaStreamRegistry::lookupMediaStreamDescriptor(url);
120 DCHECK(!web_stream.isNull());
122 DVLOG(1) << "MediaStreamRendererFactoryImpl::GetVideoFrameProvider stream:"
123 << base::UTF16ToUTF8(base::StringPiece16(web_stream.id()));
125 blink::WebVector<blink::WebMediaStreamTrack> video_tracks;
126 web_stream.videoTracks(video_tracks);
127 if (video_tracks.isEmpty() ||
128 !MediaStreamVideoTrack::GetTrack(video_tracks[0])) {
129 return NULL;
132 return new MediaStreamVideoRendererSink(video_tracks[0], error_cb,
133 repaint_cb);
136 scoped_refptr<MediaStreamAudioRenderer>
137 MediaStreamRendererFactoryImpl::GetAudioRenderer(const GURL& url,
138 int render_frame_id) {
139 blink::WebMediaStream web_stream =
140 blink::WebMediaStreamRegistry::lookupMediaStreamDescriptor(url);
142 if (web_stream.isNull() || !web_stream.extraData())
143 return NULL; // This is not a valid stream.
145 DVLOG(1) << "MediaStreamRendererFactoryImpl::GetAudioRenderer stream:"
146 << base::UTF16ToUTF8(base::StringPiece16(web_stream.id()));
148 MediaStream* native_stream = MediaStream::GetMediaStream(web_stream);
150 // TODO(tommi): MediaStreams do not have a 'local or not' concept.
151 // Tracks _might_, but even so, we need to fix the data flow so that
152 // it works the same way for all track implementations, local, remote or what
153 // have you.
154 // In this function, we should simply create a renderer object that receives
155 // and mixes audio from all the tracks that belong to the media stream.
156 // We need to remove the |is_local| property from MediaStreamExtraData since
157 // this concept is peerconnection specific (is a previously recorded stream
158 // local or remote?).
159 if (native_stream->is_local()) {
160 // Create the local audio renderer if the stream contains audio tracks.
161 blink::WebVector<blink::WebMediaStreamTrack> audio_tracks;
162 web_stream.audioTracks(audio_tracks);
163 if (audio_tracks.isEmpty())
164 return NULL;
166 // TODO(xians): Add support for the case where the media stream contains
167 // multiple audio tracks.
168 return CreateLocalAudioRenderer(audio_tracks[0], render_frame_id);
171 webrtc::MediaStreamInterface* stream =
172 MediaStream::GetAdapter(web_stream);
173 if (stream->GetAudioTracks().empty())
174 return NULL;
176 // This is a remote WebRTC media stream.
177 WebRtcAudioDeviceImpl* audio_device =
178 GetPeerConnectionDependencyFactory()->GetWebRtcAudioDevice();
180 // Share the existing renderer if any, otherwise create a new one.
181 scoped_refptr<WebRtcAudioRenderer> renderer(audio_device->renderer());
182 if (!renderer.get()) {
183 renderer = CreateRemoteAudioRenderer(stream, render_frame_id);
185 if (renderer.get() && !audio_device->SetAudioRenderer(renderer.get()))
186 renderer = NULL;
189 return renderer.get() ?
190 renderer->CreateSharedAudioRendererProxy(stream) : NULL;
193 } // namespace content