1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "content/renderer/media/media_stream_renderer_factory.h"
7 #include "base/strings/utf_string_conversions.h"
8 #include "content/renderer/media/media_stream.h"
9 #include "content/renderer/media/media_stream_video_track.h"
10 #include "content/renderer/media/rtc_video_renderer.h"
11 #include "content/renderer/media/webrtc/peer_connection_dependency_factory.h"
12 #include "content/renderer/media/webrtc_audio_renderer.h"
13 #include "content/renderer/media/webrtc_local_audio_renderer.h"
14 #include "content/renderer/render_thread_impl.h"
15 #include "media/base/audio_hardware_config.h"
16 #include "third_party/WebKit/public/platform/WebMediaStream.h"
17 #include "third_party/WebKit/public/platform/WebURL.h"
18 #include "third_party/WebKit/public/web/WebMediaStreamRegistry.h"
19 #include "third_party/libjingle/source/talk/app/webrtc/mediastreaminterface.h"
25 PeerConnectionDependencyFactory
* GetPeerConnectionDependencyFactory() {
26 return RenderThreadImpl::current()->GetPeerConnectionDependencyFactory();
29 void GetDefaultOutputDeviceParams(
30 int* output_sample_rate
, int* output_buffer_size
) {
31 // Fetch the default audio output hardware config.
32 media::AudioHardwareConfig
* hardware_config
=
33 RenderThreadImpl::current()->GetAudioHardwareConfig();
34 *output_sample_rate
= hardware_config
->GetOutputSampleRate();
35 *output_buffer_size
= hardware_config
->GetOutputBufferSize();
39 // Returns a valid session id if a single capture device is currently open
40 // (and then the matching session_id), otherwise -1.
41 // This is used to pass on a session id to a webrtc audio renderer (either
42 // local or remote), so that audio will be rendered to a matching output
43 // device, should one exist.
44 // Note that if there are more than one open capture devices the function
45 // will not be able to pick an appropriate device and return false.
46 bool GetAuthorizedDeviceInfoForAudioRenderer(
48 int* output_sample_rate
,
49 int* output_frames_per_buffer
) {
50 WebRtcAudioDeviceImpl
* audio_device
=
51 GetPeerConnectionDependencyFactory()->GetWebRtcAudioDevice();
55 return audio_device
->GetAuthorizedDeviceInfoForAudioRenderer(
56 session_id
, output_sample_rate
, output_frames_per_buffer
);
59 scoped_refptr
<WebRtcAudioRenderer
> CreateRemoteAudioRenderer(
60 webrtc::MediaStreamInterface
* stream
,
61 int render_frame_id
) {
62 if (stream
->GetAudioTracks().empty())
65 DVLOG(1) << "MediaStreamRendererFactory::CreateRemoteAudioRenderer label:"
68 // TODO(tommi): Change the default value of session_id to be
69 // StreamDeviceInfo::kNoId. Also update AudioOutputDevice etc.
70 int session_id
= 0, sample_rate
= 0, buffer_size
= 0;
71 if (!GetAuthorizedDeviceInfoForAudioRenderer(&session_id
,
74 GetDefaultOutputDeviceParams(&sample_rate
, &buffer_size
);
77 return new WebRtcAudioRenderer(
78 GetPeerConnectionDependencyFactory()->GetWebRtcSignalingThread(), stream
,
79 render_frame_id
, session_id
, sample_rate
, buffer_size
);
83 scoped_refptr
<WebRtcLocalAudioRenderer
> CreateLocalAudioRenderer(
84 const blink::WebMediaStreamTrack
& audio_track
,
85 int render_frame_id
) {
86 DVLOG(1) << "MediaStreamRendererFactory::CreateLocalAudioRenderer";
88 int session_id
= 0, sample_rate
= 0, buffer_size
= 0;
89 if (!GetAuthorizedDeviceInfoForAudioRenderer(&session_id
,
92 GetDefaultOutputDeviceParams(&sample_rate
, &buffer_size
);
95 // Create a new WebRtcLocalAudioRenderer instance and connect it to the
96 // existing WebRtcAudioCapturer so that the renderer can use it as source.
97 return new WebRtcLocalAudioRenderer(
107 MediaStreamRendererFactory::MediaStreamRendererFactory() {
110 MediaStreamRendererFactory::~MediaStreamRendererFactory() {
113 scoped_refptr
<VideoFrameProvider
>
114 MediaStreamRendererFactory::GetVideoFrameProvider(
116 const base::Closure
& error_cb
,
117 const VideoFrameProvider::RepaintCB
& repaint_cb
) {
118 blink::WebMediaStream web_stream
=
119 blink::WebMediaStreamRegistry::lookupMediaStreamDescriptor(url
);
120 DCHECK(!web_stream
.isNull());
122 DVLOG(1) << "MediaStreamRendererFactory::GetVideoFrameProvider stream:"
123 << base::UTF16ToUTF8(web_stream
.id());
125 blink::WebVector
<blink::WebMediaStreamTrack
> video_tracks
;
126 web_stream
.videoTracks(video_tracks
);
127 if (video_tracks
.isEmpty() ||
128 !MediaStreamVideoTrack::GetTrack(video_tracks
[0])) {
132 return new RTCVideoRenderer(video_tracks
[0], error_cb
, repaint_cb
);
135 scoped_refptr
<MediaStreamAudioRenderer
>
136 MediaStreamRendererFactory::GetAudioRenderer(const GURL
& url
,
137 int render_frame_id
) {
138 blink::WebMediaStream web_stream
=
139 blink::WebMediaStreamRegistry::lookupMediaStreamDescriptor(url
);
141 if (web_stream
.isNull() || !web_stream
.extraData())
142 return NULL
; // This is not a valid stream.
144 DVLOG(1) << "MediaStreamRendererFactory::GetAudioRenderer stream:"
145 << base::UTF16ToUTF8(web_stream
.id());
147 MediaStream
* native_stream
= MediaStream::GetMediaStream(web_stream
);
149 // TODO(tommi): MediaStreams do not have a 'local or not' concept.
150 // Tracks _might_, but even so, we need to fix the data flow so that
151 // it works the same way for all track implementations, local, remote or what
153 // In this function, we should simply create a renderer object that receives
154 // and mixes audio from all the tracks that belong to the media stream.
155 // We need to remove the |is_local| property from MediaStreamExtraData since
156 // this concept is peerconnection specific (is a previously recorded stream
157 // local or remote?).
158 if (native_stream
->is_local()) {
159 // Create the local audio renderer if the stream contains audio tracks.
160 blink::WebVector
<blink::WebMediaStreamTrack
> audio_tracks
;
161 web_stream
.audioTracks(audio_tracks
);
162 if (audio_tracks
.isEmpty())
165 // TODO(xians): Add support for the case where the media stream contains
166 // multiple audio tracks.
167 return CreateLocalAudioRenderer(audio_tracks
[0], render_frame_id
);
170 webrtc::MediaStreamInterface
* stream
=
171 MediaStream::GetAdapter(web_stream
);
172 if (stream
->GetAudioTracks().empty())
175 // This is a remote WebRTC media stream.
176 WebRtcAudioDeviceImpl
* audio_device
=
177 GetPeerConnectionDependencyFactory()->GetWebRtcAudioDevice();
179 // Share the existing renderer if any, otherwise create a new one.
180 scoped_refptr
<WebRtcAudioRenderer
> renderer(audio_device
->renderer());
181 if (!renderer
.get()) {
182 renderer
= CreateRemoteAudioRenderer(stream
, render_frame_id
);
184 if (renderer
.get() && !audio_device
->SetAudioRenderer(renderer
.get()))
188 return renderer
.get() ?
189 renderer
->CreateSharedAudioRendererProxy(stream
) : NULL
;
192 } // namespace content