Updating trunk VERSION from 2139.0 to 2140.0
[chromium-blink-merge.git] / content / renderer / media / webrtc / peer_connection_dependency_factory.cc
blobdc072860d128ec361375b812db72fea2cde23532
1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "content/renderer/media/webrtc/peer_connection_dependency_factory.h"
7 #include <vector>
9 #include "base/command_line.h"
10 #include "base/strings/utf_string_conversions.h"
11 #include "base/synchronization/waitable_event.h"
12 #include "content/common/media/media_stream_messages.h"
13 #include "content/public/common/content_switches.h"
14 #include "content/renderer/media/media_stream.h"
15 #include "content/renderer/media/media_stream_audio_processor.h"
16 #include "content/renderer/media/media_stream_audio_processor_options.h"
17 #include "content/renderer/media/media_stream_audio_source.h"
18 #include "content/renderer/media/media_stream_video_source.h"
19 #include "content/renderer/media/media_stream_video_track.h"
20 #include "content/renderer/media/peer_connection_identity_service.h"
21 #include "content/renderer/media/rtc_media_constraints.h"
22 #include "content/renderer/media/rtc_peer_connection_handler.h"
23 #include "content/renderer/media/rtc_video_decoder_factory.h"
24 #include "content/renderer/media/rtc_video_encoder_factory.h"
25 #include "content/renderer/media/webaudio_capturer_source.h"
26 #include "content/renderer/media/webrtc/webrtc_local_audio_track_adapter.h"
27 #include "content/renderer/media/webrtc/webrtc_video_capturer_adapter.h"
28 #include "content/renderer/media/webrtc_audio_device_impl.h"
29 #include "content/renderer/media/webrtc_local_audio_track.h"
30 #include "content/renderer/media/webrtc_uma_histograms.h"
31 #include "content/renderer/p2p/ipc_network_manager.h"
32 #include "content/renderer/p2p/ipc_socket_factory.h"
33 #include "content/renderer/p2p/port_allocator.h"
34 #include "content/renderer/render_thread_impl.h"
35 #include "jingle/glue/thread_wrapper.h"
36 #include "media/filters/gpu_video_accelerator_factories.h"
37 #include "third_party/WebKit/public/platform/WebMediaConstraints.h"
38 #include "third_party/WebKit/public/platform/WebMediaStream.h"
39 #include "third_party/WebKit/public/platform/WebMediaStreamSource.h"
40 #include "third_party/WebKit/public/platform/WebMediaStreamTrack.h"
41 #include "third_party/WebKit/public/platform/WebURL.h"
42 #include "third_party/WebKit/public/web/WebDocument.h"
43 #include "third_party/WebKit/public/web/WebFrame.h"
44 #include "third_party/libjingle/source/talk/app/webrtc/mediaconstraintsinterface.h"
46 #if defined(USE_OPENSSL)
47 #include "third_party/webrtc/base/ssladapter.h"
48 #else
49 #include "net/socket/nss_ssl_util.h"
50 #endif
52 #if defined(OS_ANDROID)
53 #include "media/base/android/media_codec_bridge.h"
54 #endif
56 namespace content {
58 // Map of corresponding media constraints and platform effects.
59 struct {
60 const char* constraint;
61 const media::AudioParameters::PlatformEffectsMask effect;
62 } const kConstraintEffectMap[] = {
63 { content::kMediaStreamAudioDucking,
64 media::AudioParameters::DUCKING },
65 { webrtc::MediaConstraintsInterface::kEchoCancellation,
66 media::AudioParameters::ECHO_CANCELLER },
69 // If any platform effects are available, check them against the constraints.
70 // Disable effects to match false constraints, but if a constraint is true, set
71 // the constraint to false to later disable the software effect.
73 // This function may modify both |constraints| and |effects|.
74 void HarmonizeConstraintsAndEffects(RTCMediaConstraints* constraints,
75 int* effects) {
76 if (*effects != media::AudioParameters::NO_EFFECTS) {
77 for (size_t i = 0; i < ARRAYSIZE_UNSAFE(kConstraintEffectMap); ++i) {
78 bool value;
79 size_t is_mandatory = 0;
80 if (!webrtc::FindConstraint(constraints,
81 kConstraintEffectMap[i].constraint,
82 &value,
83 &is_mandatory) || !value) {
84 // If the constraint is false, or does not exist, disable the platform
85 // effect.
86 *effects &= ~kConstraintEffectMap[i].effect;
87 DVLOG(1) << "Disabling platform effect: "
88 << kConstraintEffectMap[i].effect;
89 } else if (*effects & kConstraintEffectMap[i].effect) {
90 // If the constraint is true, leave the platform effect enabled, and
91 // set the constraint to false to later disable the software effect.
92 if (is_mandatory) {
93 constraints->AddMandatory(kConstraintEffectMap[i].constraint,
94 webrtc::MediaConstraintsInterface::kValueFalse, true);
95 } else {
96 constraints->AddOptional(kConstraintEffectMap[i].constraint,
97 webrtc::MediaConstraintsInterface::kValueFalse, true);
99 DVLOG(1) << "Disabling constraint: "
100 << kConstraintEffectMap[i].constraint;
101 } else if (kConstraintEffectMap[i].effect ==
102 media::AudioParameters::DUCKING && value && !is_mandatory) {
103 // Special handling of the DUCKING flag that sets the optional
104 // constraint to |false| to match what the device will support.
105 constraints->AddOptional(kConstraintEffectMap[i].constraint,
106 webrtc::MediaConstraintsInterface::kValueFalse, true);
107 // No need to modify |effects| since the ducking flag is already off.
108 DCHECK((*effects & media::AudioParameters::DUCKING) == 0);
114 class P2PPortAllocatorFactory : public webrtc::PortAllocatorFactoryInterface {
115 public:
116 P2PPortAllocatorFactory(
117 P2PSocketDispatcher* socket_dispatcher,
118 rtc::NetworkManager* network_manager,
119 rtc::PacketSocketFactory* socket_factory,
120 blink::WebFrame* web_frame)
121 : socket_dispatcher_(socket_dispatcher),
122 network_manager_(network_manager),
123 socket_factory_(socket_factory),
124 web_frame_(web_frame) {
127 virtual cricket::PortAllocator* CreatePortAllocator(
128 const std::vector<StunConfiguration>& stun_servers,
129 const std::vector<TurnConfiguration>& turn_configurations) OVERRIDE {
130 CHECK(web_frame_);
131 P2PPortAllocator::Config config;
132 for (size_t i = 0; i < stun_servers.size(); ++i) {
133 config.stun_servers.insert(rtc::SocketAddress(
134 stun_servers[i].server.hostname(),
135 stun_servers[i].server.port()));
137 config.legacy_relay = false;
138 for (size_t i = 0; i < turn_configurations.size(); ++i) {
139 P2PPortAllocator::Config::RelayServerConfig relay_config;
140 relay_config.server_address = turn_configurations[i].server.hostname();
141 relay_config.port = turn_configurations[i].server.port();
142 relay_config.username = turn_configurations[i].username;
143 relay_config.password = turn_configurations[i].password;
144 relay_config.transport_type = turn_configurations[i].transport_type;
145 relay_config.secure = turn_configurations[i].secure;
146 config.relays.push_back(relay_config);
148 // Use turn servers as stun servers.
149 config.stun_servers.insert(rtc::SocketAddress(
150 turn_configurations[i].server.hostname(),
151 turn_configurations[i].server.port()));
154 return new P2PPortAllocator(
155 web_frame_, socket_dispatcher_.get(), network_manager_,
156 socket_factory_, config);
159 protected:
160 virtual ~P2PPortAllocatorFactory() {}
162 private:
163 scoped_refptr<P2PSocketDispatcher> socket_dispatcher_;
164 // |network_manager_| and |socket_factory_| are a weak references, owned by
165 // PeerConnectionDependencyFactory.
166 rtc::NetworkManager* network_manager_;
167 rtc::PacketSocketFactory* socket_factory_;
168 // Raw ptr to the WebFrame that created the P2PPortAllocatorFactory.
169 blink::WebFrame* web_frame_;
172 PeerConnectionDependencyFactory::PeerConnectionDependencyFactory(
173 P2PSocketDispatcher* p2p_socket_dispatcher)
174 : network_manager_(NULL),
175 p2p_socket_dispatcher_(p2p_socket_dispatcher),
176 signaling_thread_(NULL),
177 worker_thread_(NULL),
178 chrome_worker_thread_("Chrome_libJingle_WorkerThread") {
181 PeerConnectionDependencyFactory::~PeerConnectionDependencyFactory() {
182 CleanupPeerConnectionFactory();
183 if (aec_dump_message_filter_.get())
184 aec_dump_message_filter_->RemoveDelegate(this);
187 blink::WebRTCPeerConnectionHandler*
188 PeerConnectionDependencyFactory::CreateRTCPeerConnectionHandler(
189 blink::WebRTCPeerConnectionHandlerClient* client) {
190 // Save histogram data so we can see how much PeerConnetion is used.
191 // The histogram counts the number of calls to the JS API
192 // webKitRTCPeerConnection.
193 UpdateWebRTCMethodCount(WEBKIT_RTC_PEER_CONNECTION);
195 return new RTCPeerConnectionHandler(client, this);
198 bool PeerConnectionDependencyFactory::InitializeMediaStreamAudioSource(
199 int render_view_id,
200 const blink::WebMediaConstraints& audio_constraints,
201 MediaStreamAudioSource* source_data) {
202 DVLOG(1) << "InitializeMediaStreamAudioSources()";
204 // Do additional source initialization if the audio source is a valid
205 // microphone or tab audio.
206 RTCMediaConstraints native_audio_constraints(audio_constraints);
207 MediaAudioConstraints::ApplyFixedAudioConstraints(&native_audio_constraints);
209 StreamDeviceInfo device_info = source_data->device_info();
210 RTCMediaConstraints constraints = native_audio_constraints;
211 // May modify both |constraints| and |effects|.
212 HarmonizeConstraintsAndEffects(&constraints,
213 &device_info.device.input.effects);
215 scoped_refptr<WebRtcAudioCapturer> capturer(
216 CreateAudioCapturer(render_view_id, device_info, audio_constraints,
217 source_data));
218 if (!capturer.get()) {
219 DLOG(WARNING) << "Failed to create the capturer for device "
220 << device_info.device.id;
221 // TODO(xians): Don't we need to check if source_observer is observing
222 // something? If not, then it looks like we have a leak here.
223 // OTOH, if it _is_ observing something, then the callback might
224 // be called multiple times which is likely also a bug.
225 return false;
227 source_data->SetAudioCapturer(capturer.get());
229 // Creates a LocalAudioSource object which holds audio options.
230 // TODO(xians): The option should apply to the track instead of the source.
231 // TODO(perkj): Move audio constraints parsing to Chrome.
232 // Currently there are a few constraints that are parsed by libjingle and
233 // the state is set to ended if parsing fails.
234 scoped_refptr<webrtc::AudioSourceInterface> rtc_source(
235 CreateLocalAudioSource(&constraints).get());
236 if (rtc_source->state() != webrtc::MediaSourceInterface::kLive) {
237 DLOG(WARNING) << "Failed to create rtc LocalAudioSource.";
238 return false;
240 source_data->SetLocalAudioSource(rtc_source.get());
241 return true;
244 WebRtcVideoCapturerAdapter*
245 PeerConnectionDependencyFactory::CreateVideoCapturer(
246 bool is_screeencast) {
247 // We need to make sure the libjingle thread wrappers have been created
248 // before we can use an instance of a WebRtcVideoCapturerAdapter. This is
249 // since the base class of WebRtcVideoCapturerAdapter is a
250 // cricket::VideoCapturer and it uses the libjingle thread wrappers.
251 if (!GetPcFactory().get())
252 return NULL;
253 return new WebRtcVideoCapturerAdapter(is_screeencast);
256 scoped_refptr<webrtc::VideoSourceInterface>
257 PeerConnectionDependencyFactory::CreateVideoSource(
258 cricket::VideoCapturer* capturer,
259 const blink::WebMediaConstraints& constraints) {
260 RTCMediaConstraints webrtc_constraints(constraints);
261 scoped_refptr<webrtc::VideoSourceInterface> source =
262 GetPcFactory()->CreateVideoSource(capturer, &webrtc_constraints).get();
263 return source;
266 const scoped_refptr<webrtc::PeerConnectionFactoryInterface>&
267 PeerConnectionDependencyFactory::GetPcFactory() {
268 if (!pc_factory_.get())
269 CreatePeerConnectionFactory();
270 CHECK(pc_factory_.get());
271 return pc_factory_;
274 void PeerConnectionDependencyFactory::CreatePeerConnectionFactory() {
275 DCHECK(!pc_factory_.get());
276 DCHECK(!signaling_thread_);
277 DCHECK(!worker_thread_);
278 DCHECK(!network_manager_);
279 DCHECK(!socket_factory_);
280 DCHECK(!chrome_worker_thread_.IsRunning());
282 DVLOG(1) << "PeerConnectionDependencyFactory::CreatePeerConnectionFactory()";
284 jingle_glue::JingleThreadWrapper::EnsureForCurrentMessageLoop();
285 jingle_glue::JingleThreadWrapper::current()->set_send_allowed(true);
286 signaling_thread_ = jingle_glue::JingleThreadWrapper::current();
287 CHECK(signaling_thread_);
289 CHECK(chrome_worker_thread_.Start());
291 base::WaitableEvent start_worker_event(true, false);
292 chrome_worker_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
293 &PeerConnectionDependencyFactory::InitializeWorkerThread,
294 base::Unretained(this),
295 &worker_thread_,
296 &start_worker_event));
297 start_worker_event.Wait();
298 CHECK(worker_thread_);
300 base::WaitableEvent create_network_manager_event(true, false);
301 chrome_worker_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
302 &PeerConnectionDependencyFactory::CreateIpcNetworkManagerOnWorkerThread,
303 base::Unretained(this),
304 &create_network_manager_event));
305 create_network_manager_event.Wait();
307 socket_factory_.reset(
308 new IpcPacketSocketFactory(p2p_socket_dispatcher_.get()));
310 // Init SSL, which will be needed by PeerConnection.
311 #if defined(USE_OPENSSL)
312 if (!rtc::InitializeSSL()) {
313 LOG(ERROR) << "Failed on InitializeSSL.";
314 NOTREACHED();
315 return;
317 #else
318 // TODO(ronghuawu): Replace this call with InitializeSSL.
319 net::EnsureNSSSSLInit();
320 #endif
322 scoped_ptr<cricket::WebRtcVideoDecoderFactory> decoder_factory;
323 scoped_ptr<cricket::WebRtcVideoEncoderFactory> encoder_factory;
325 const CommandLine* cmd_line = CommandLine::ForCurrentProcess();
326 scoped_refptr<media::GpuVideoAcceleratorFactories> gpu_factories =
327 RenderThreadImpl::current()->GetGpuFactories();
328 if (!cmd_line->HasSwitch(switches::kDisableWebRtcHWDecoding)) {
329 if (gpu_factories.get())
330 decoder_factory.reset(new RTCVideoDecoderFactory(gpu_factories));
333 if (!cmd_line->HasSwitch(switches::kDisableWebRtcHWEncoding)) {
334 if (gpu_factories.get())
335 encoder_factory.reset(new RTCVideoEncoderFactory(gpu_factories));
338 #if defined(OS_ANDROID)
339 if (!media::MediaCodecBridge::SupportsSetParameters())
340 encoder_factory.reset();
341 #endif
343 EnsureWebRtcAudioDeviceImpl();
345 scoped_refptr<webrtc::PeerConnectionFactoryInterface> factory(
346 webrtc::CreatePeerConnectionFactory(worker_thread_,
347 signaling_thread_,
348 audio_device_.get(),
349 encoder_factory.release(),
350 decoder_factory.release()));
351 CHECK(factory.get());
353 pc_factory_ = factory;
354 webrtc::PeerConnectionFactoryInterface::Options factory_options;
355 factory_options.disable_sctp_data_channels = false;
356 factory_options.disable_encryption =
357 cmd_line->HasSwitch(switches::kDisableWebRtcEncryption);
358 pc_factory_->SetOptions(factory_options);
360 // TODO(xians): Remove the following code after kDisableAudioTrackProcessing
361 // is removed.
362 if (!MediaStreamAudioProcessor::IsAudioTrackProcessingEnabled()) {
363 aec_dump_message_filter_ = AecDumpMessageFilter::Get();
364 // In unit tests not creating a message filter, |aec_dump_message_filter_|
365 // will be NULL. We can just ignore that. Other unit tests and browser tests
366 // ensure that we do get the filter when we should.
367 if (aec_dump_message_filter_.get())
368 aec_dump_message_filter_->AddDelegate(this);
372 bool PeerConnectionDependencyFactory::PeerConnectionFactoryCreated() {
373 return pc_factory_.get() != NULL;
376 scoped_refptr<webrtc::PeerConnectionInterface>
377 PeerConnectionDependencyFactory::CreatePeerConnection(
378 const webrtc::PeerConnectionInterface::RTCConfiguration& config,
379 const webrtc::MediaConstraintsInterface* constraints,
380 blink::WebFrame* web_frame,
381 webrtc::PeerConnectionObserver* observer) {
382 CHECK(web_frame);
383 CHECK(observer);
384 if (!GetPcFactory().get())
385 return NULL;
387 scoped_refptr<P2PPortAllocatorFactory> pa_factory =
388 new rtc::RefCountedObject<P2PPortAllocatorFactory>(
389 p2p_socket_dispatcher_.get(),
390 network_manager_,
391 socket_factory_.get(),
392 web_frame);
394 PeerConnectionIdentityService* identity_service =
395 new PeerConnectionIdentityService(
396 GURL(web_frame->document().url().spec()).GetOrigin());
398 return GetPcFactory()->CreatePeerConnection(config,
399 constraints,
400 pa_factory.get(),
401 identity_service,
402 observer).get();
405 scoped_refptr<webrtc::MediaStreamInterface>
406 PeerConnectionDependencyFactory::CreateLocalMediaStream(
407 const std::string& label) {
408 return GetPcFactory()->CreateLocalMediaStream(label).get();
411 scoped_refptr<webrtc::AudioSourceInterface>
412 PeerConnectionDependencyFactory::CreateLocalAudioSource(
413 const webrtc::MediaConstraintsInterface* constraints) {
414 scoped_refptr<webrtc::AudioSourceInterface> source =
415 GetPcFactory()->CreateAudioSource(constraints).get();
416 return source;
419 void PeerConnectionDependencyFactory::CreateLocalAudioTrack(
420 const blink::WebMediaStreamTrack& track) {
421 blink::WebMediaStreamSource source = track.source();
422 DCHECK_EQ(source.type(), blink::WebMediaStreamSource::TypeAudio);
423 MediaStreamAudioSource* source_data =
424 static_cast<MediaStreamAudioSource*>(source.extraData());
426 scoped_refptr<WebAudioCapturerSource> webaudio_source;
427 if (!source_data) {
428 if (source.requiresAudioConsumer()) {
429 // We're adding a WebAudio MediaStream.
430 // Create a specific capturer for each WebAudio consumer.
431 webaudio_source = CreateWebAudioSource(&source);
432 source_data =
433 static_cast<MediaStreamAudioSource*>(source.extraData());
434 } else {
435 // TODO(perkj): Implement support for sources from
436 // remote MediaStreams.
437 NOTIMPLEMENTED();
438 return;
442 // Creates an adapter to hold all the libjingle objects.
443 scoped_refptr<WebRtcLocalAudioTrackAdapter> adapter(
444 WebRtcLocalAudioTrackAdapter::Create(track.id().utf8(),
445 source_data->local_audio_source()));
446 static_cast<webrtc::AudioTrackInterface*>(adapter.get())->set_enabled(
447 track.isEnabled());
449 // TODO(xians): Merge |source| to the capturer(). We can't do this today
450 // because only one capturer() is supported while one |source| is created
451 // for each audio track.
452 scoped_ptr<WebRtcLocalAudioTrack> audio_track(new WebRtcLocalAudioTrack(
453 adapter.get(), source_data->GetAudioCapturer(), webaudio_source.get()));
455 StartLocalAudioTrack(audio_track.get());
457 // Pass the ownership of the native local audio track to the blink track.
458 blink::WebMediaStreamTrack writable_track = track;
459 writable_track.setExtraData(audio_track.release());
462 void PeerConnectionDependencyFactory::StartLocalAudioTrack(
463 WebRtcLocalAudioTrack* audio_track) {
464 // Add the WebRtcAudioDevice as the sink to the local audio track.
465 // TODO(xians): Remove the following line of code after the APM in WebRTC is
466 // completely deprecated. See http://crbug/365672.
467 if (!MediaStreamAudioProcessor::IsAudioTrackProcessingEnabled())
468 audio_track->AddSink(GetWebRtcAudioDevice());
470 // Start the audio track. This will hook the |audio_track| to the capturer
471 // as the sink of the audio, and only start the source of the capturer if
472 // it is the first audio track connecting to the capturer.
473 audio_track->Start();
476 scoped_refptr<WebAudioCapturerSource>
477 PeerConnectionDependencyFactory::CreateWebAudioSource(
478 blink::WebMediaStreamSource* source) {
479 DVLOG(1) << "PeerConnectionDependencyFactory::CreateWebAudioSource()";
481 scoped_refptr<WebAudioCapturerSource>
482 webaudio_capturer_source(new WebAudioCapturerSource());
483 MediaStreamAudioSource* source_data = new MediaStreamAudioSource();
485 // Use the current default capturer for the WebAudio track so that the
486 // WebAudio track can pass a valid delay value and |need_audio_processing|
487 // flag to PeerConnection.
488 // TODO(xians): Remove this after moving APM to Chrome.
489 if (GetWebRtcAudioDevice()) {
490 source_data->SetAudioCapturer(
491 GetWebRtcAudioDevice()->GetDefaultCapturer());
494 // Create a LocalAudioSource object which holds audio options.
495 // SetLocalAudioSource() affects core audio parts in third_party/Libjingle.
496 source_data->SetLocalAudioSource(CreateLocalAudioSource(NULL).get());
497 source->setExtraData(source_data);
499 // Replace the default source with WebAudio as source instead.
500 source->addAudioConsumer(webaudio_capturer_source.get());
502 return webaudio_capturer_source;
505 scoped_refptr<webrtc::VideoTrackInterface>
506 PeerConnectionDependencyFactory::CreateLocalVideoTrack(
507 const std::string& id,
508 webrtc::VideoSourceInterface* source) {
509 return GetPcFactory()->CreateVideoTrack(id, source).get();
512 scoped_refptr<webrtc::VideoTrackInterface>
513 PeerConnectionDependencyFactory::CreateLocalVideoTrack(
514 const std::string& id, cricket::VideoCapturer* capturer) {
515 if (!capturer) {
516 LOG(ERROR) << "CreateLocalVideoTrack called with null VideoCapturer.";
517 return NULL;
520 // Create video source from the |capturer|.
521 scoped_refptr<webrtc::VideoSourceInterface> source =
522 GetPcFactory()->CreateVideoSource(capturer, NULL).get();
524 // Create native track from the source.
525 return GetPcFactory()->CreateVideoTrack(id, source.get()).get();
528 webrtc::SessionDescriptionInterface*
529 PeerConnectionDependencyFactory::CreateSessionDescription(
530 const std::string& type,
531 const std::string& sdp,
532 webrtc::SdpParseError* error) {
533 return webrtc::CreateSessionDescription(type, sdp, error);
536 webrtc::IceCandidateInterface*
537 PeerConnectionDependencyFactory::CreateIceCandidate(
538 const std::string& sdp_mid,
539 int sdp_mline_index,
540 const std::string& sdp) {
541 return webrtc::CreateIceCandidate(sdp_mid, sdp_mline_index, sdp);
544 WebRtcAudioDeviceImpl*
545 PeerConnectionDependencyFactory::GetWebRtcAudioDevice() {
546 return audio_device_.get();
549 void PeerConnectionDependencyFactory::InitializeWorkerThread(
550 rtc::Thread** thread,
551 base::WaitableEvent* event) {
552 jingle_glue::JingleThreadWrapper::EnsureForCurrentMessageLoop();
553 jingle_glue::JingleThreadWrapper::current()->set_send_allowed(true);
554 *thread = jingle_glue::JingleThreadWrapper::current();
555 event->Signal();
558 void PeerConnectionDependencyFactory::CreateIpcNetworkManagerOnWorkerThread(
559 base::WaitableEvent* event) {
560 DCHECK_EQ(base::MessageLoop::current(), chrome_worker_thread_.message_loop());
561 network_manager_ = new IpcNetworkManager(p2p_socket_dispatcher_.get());
562 event->Signal();
565 void PeerConnectionDependencyFactory::DeleteIpcNetworkManager() {
566 DCHECK_EQ(base::MessageLoop::current(), chrome_worker_thread_.message_loop());
567 delete network_manager_;
568 network_manager_ = NULL;
571 void PeerConnectionDependencyFactory::CleanupPeerConnectionFactory() {
572 pc_factory_ = NULL;
573 if (network_manager_) {
574 // The network manager needs to free its resources on the thread they were
575 // created, which is the worked thread.
576 if (chrome_worker_thread_.IsRunning()) {
577 chrome_worker_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
578 &PeerConnectionDependencyFactory::DeleteIpcNetworkManager,
579 base::Unretained(this)));
580 // Stopping the thread will wait until all tasks have been
581 // processed before returning. We wait for the above task to finish before
582 // letting the the function continue to avoid any potential race issues.
583 chrome_worker_thread_.Stop();
584 } else {
585 NOTREACHED() << "Worker thread not running.";
590 scoped_refptr<WebRtcAudioCapturer>
591 PeerConnectionDependencyFactory::CreateAudioCapturer(
592 int render_view_id,
593 const StreamDeviceInfo& device_info,
594 const blink::WebMediaConstraints& constraints,
595 MediaStreamAudioSource* audio_source) {
596 // TODO(xians): Handle the cases when gUM is called without a proper render
597 // view, for example, by an extension.
598 DCHECK_GE(render_view_id, 0);
600 EnsureWebRtcAudioDeviceImpl();
601 DCHECK(GetWebRtcAudioDevice());
602 return WebRtcAudioCapturer::CreateCapturer(render_view_id, device_info,
603 constraints,
604 GetWebRtcAudioDevice(),
605 audio_source);
608 void PeerConnectionDependencyFactory::AddNativeAudioTrackToBlinkTrack(
609 webrtc::MediaStreamTrackInterface* native_track,
610 const blink::WebMediaStreamTrack& webkit_track,
611 bool is_local_track) {
612 DCHECK(!webkit_track.isNull() && !webkit_track.extraData());
613 DCHECK_EQ(blink::WebMediaStreamSource::TypeAudio,
614 webkit_track.source().type());
615 blink::WebMediaStreamTrack track = webkit_track;
617 DVLOG(1) << "AddNativeTrackToBlinkTrack() audio";
618 track.setExtraData(
619 new MediaStreamTrack(
620 static_cast<webrtc::AudioTrackInterface*>(native_track),
621 is_local_track));
624 scoped_refptr<base::MessageLoopProxy>
625 PeerConnectionDependencyFactory::GetWebRtcWorkerThread() const {
626 DCHECK(CalledOnValidThread());
627 return chrome_worker_thread_.message_loop_proxy();
630 void PeerConnectionDependencyFactory::OnAecDumpFile(
631 const IPC::PlatformFileForTransit& file_handle) {
632 DCHECK(CalledOnValidThread());
633 DCHECK(!MediaStreamAudioProcessor::IsAudioTrackProcessingEnabled());
634 DCHECK(PeerConnectionFactoryCreated());
636 base::File file = IPC::PlatformFileForTransitToFile(file_handle);
637 DCHECK(file.IsValid());
639 // |pc_factory_| always takes ownership of |aec_dump_file|. If StartAecDump()
640 // fails, |aec_dump_file| will be closed.
641 if (!GetPcFactory()->StartAecDump(file.TakePlatformFile()))
642 VLOG(1) << "Could not start AEC dump.";
645 void PeerConnectionDependencyFactory::OnDisableAecDump() {
646 DCHECK(CalledOnValidThread());
647 DCHECK(!MediaStreamAudioProcessor::IsAudioTrackProcessingEnabled());
648 // Do nothing. We never disable AEC dump for non-track-processing case.
651 void PeerConnectionDependencyFactory::OnIpcClosing() {
652 DCHECK(CalledOnValidThread());
653 aec_dump_message_filter_ = NULL;
656 void PeerConnectionDependencyFactory::EnsureWebRtcAudioDeviceImpl() {
657 if (audio_device_.get())
658 return;
660 audio_device_ = new WebRtcAudioDeviceImpl();
663 } // namespace content