1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "content/renderer/media/webrtc/peer_connection_dependency_factory.h"
9 #include "base/command_line.h"
10 #include "base/strings/utf_string_conversions.h"
11 #include "base/synchronization/waitable_event.h"
12 #include "content/common/media/media_stream_messages.h"
13 #include "content/public/common/content_switches.h"
14 #include "content/renderer/media/media_stream.h"
15 #include "content/renderer/media/media_stream_audio_processor.h"
16 #include "content/renderer/media/media_stream_audio_processor_options.h"
17 #include "content/renderer/media/media_stream_audio_source.h"
18 #include "content/renderer/media/media_stream_video_source.h"
19 #include "content/renderer/media/media_stream_video_track.h"
20 #include "content/renderer/media/peer_connection_identity_service.h"
21 #include "content/renderer/media/rtc_media_constraints.h"
22 #include "content/renderer/media/rtc_peer_connection_handler.h"
23 #include "content/renderer/media/rtc_video_decoder_factory.h"
24 #include "content/renderer/media/rtc_video_encoder_factory.h"
25 #include "content/renderer/media/webaudio_capturer_source.h"
26 #include "content/renderer/media/webrtc/webrtc_local_audio_track_adapter.h"
27 #include "content/renderer/media/webrtc/webrtc_video_capturer_adapter.h"
28 #include "content/renderer/media/webrtc_audio_device_impl.h"
29 #include "content/renderer/media/webrtc_local_audio_track.h"
30 #include "content/renderer/media/webrtc_logging.h"
31 #include "content/renderer/media/webrtc_uma_histograms.h"
32 #include "content/renderer/p2p/ipc_network_manager.h"
33 #include "content/renderer/p2p/ipc_socket_factory.h"
34 #include "content/renderer/p2p/port_allocator.h"
35 #include "content/renderer/render_thread_impl.h"
36 #include "jingle/glue/thread_wrapper.h"
37 #include "media/filters/gpu_video_accelerator_factories.h"
38 #include "third_party/WebKit/public/platform/WebMediaConstraints.h"
39 #include "third_party/WebKit/public/platform/WebMediaStream.h"
40 #include "third_party/WebKit/public/platform/WebMediaStreamSource.h"
41 #include "third_party/WebKit/public/platform/WebMediaStreamTrack.h"
42 #include "third_party/WebKit/public/platform/WebURL.h"
43 #include "third_party/WebKit/public/web/WebDocument.h"
44 #include "third_party/WebKit/public/web/WebFrame.h"
45 #include "third_party/libjingle/source/talk/app/webrtc/mediaconstraintsinterface.h"
47 #if defined(USE_OPENSSL)
48 #include "third_party/webrtc/base/ssladapter.h"
50 #include "net/socket/nss_ssl_util.h"
53 #if defined(OS_ANDROID)
54 #include "media/base/android/media_codec_bridge.h"
59 // Map of corresponding media constraints and platform effects.
61 const char* constraint
;
62 const media::AudioParameters::PlatformEffectsMask effect
;
63 } const kConstraintEffectMap
[] = {
64 { content::kMediaStreamAudioDucking
,
65 media::AudioParameters::DUCKING
},
66 { webrtc::MediaConstraintsInterface::kEchoCancellation
,
67 media::AudioParameters::ECHO_CANCELLER
},
70 // If any platform effects are available, check them against the constraints.
71 // Disable effects to match false constraints, but if a constraint is true, set
72 // the constraint to false to later disable the software effect.
74 // This function may modify both |constraints| and |effects|.
75 void HarmonizeConstraintsAndEffects(RTCMediaConstraints
* constraints
,
77 if (*effects
!= media::AudioParameters::NO_EFFECTS
) {
78 for (size_t i
= 0; i
< arraysize(kConstraintEffectMap
); ++i
) {
80 size_t is_mandatory
= 0;
81 if (!webrtc::FindConstraint(constraints
,
82 kConstraintEffectMap
[i
].constraint
,
84 &is_mandatory
) || !value
) {
85 // If the constraint is false, or does not exist, disable the platform
87 *effects
&= ~kConstraintEffectMap
[i
].effect
;
88 DVLOG(1) << "Disabling platform effect: "
89 << kConstraintEffectMap
[i
].effect
;
90 } else if (*effects
& kConstraintEffectMap
[i
].effect
) {
91 // If the constraint is true, leave the platform effect enabled, and
92 // set the constraint to false to later disable the software effect.
94 constraints
->AddMandatory(kConstraintEffectMap
[i
].constraint
,
95 webrtc::MediaConstraintsInterface::kValueFalse
, true);
97 constraints
->AddOptional(kConstraintEffectMap
[i
].constraint
,
98 webrtc::MediaConstraintsInterface::kValueFalse
, true);
100 DVLOG(1) << "Disabling constraint: "
101 << kConstraintEffectMap
[i
].constraint
;
102 } else if (kConstraintEffectMap
[i
].effect
==
103 media::AudioParameters::DUCKING
&& value
&& !is_mandatory
) {
104 // Special handling of the DUCKING flag that sets the optional
105 // constraint to |false| to match what the device will support.
106 constraints
->AddOptional(kConstraintEffectMap
[i
].constraint
,
107 webrtc::MediaConstraintsInterface::kValueFalse
, true);
108 // No need to modify |effects| since the ducking flag is already off.
109 DCHECK((*effects
& media::AudioParameters::DUCKING
) == 0);
115 class P2PPortAllocatorFactory
: public webrtc::PortAllocatorFactoryInterface
{
117 P2PPortAllocatorFactory(
118 P2PSocketDispatcher
* socket_dispatcher
,
119 rtc::NetworkManager
* network_manager
,
120 rtc::PacketSocketFactory
* socket_factory
)
121 : socket_dispatcher_(socket_dispatcher
),
122 network_manager_(network_manager
),
123 socket_factory_(socket_factory
) {
126 cricket::PortAllocator
* CreatePortAllocator(
127 const std::vector
<StunConfiguration
>& stun_servers
,
128 const std::vector
<TurnConfiguration
>& turn_configurations
) override
{
129 P2PPortAllocator::Config config
;
130 for (size_t i
= 0; i
< stun_servers
.size(); ++i
) {
131 config
.stun_servers
.insert(rtc::SocketAddress(
132 stun_servers
[i
].server
.hostname(),
133 stun_servers
[i
].server
.port()));
135 for (size_t i
= 0; i
< turn_configurations
.size(); ++i
) {
136 P2PPortAllocator::Config::RelayServerConfig relay_config
;
137 relay_config
.server_address
= turn_configurations
[i
].server
.hostname();
138 relay_config
.port
= turn_configurations
[i
].server
.port();
139 relay_config
.username
= turn_configurations
[i
].username
;
140 relay_config
.password
= turn_configurations
[i
].password
;
141 relay_config
.transport_type
= turn_configurations
[i
].transport_type
;
142 relay_config
.secure
= turn_configurations
[i
].secure
;
143 config
.relays
.push_back(relay_config
);
145 // Use turn servers as stun servers.
146 config
.stun_servers
.insert(rtc::SocketAddress(
147 turn_configurations
[i
].server
.hostname(),
148 turn_configurations
[i
].server
.port()));
151 return new P2PPortAllocator(
152 socket_dispatcher_
.get(), network_manager_
, socket_factory_
, config
);
156 ~P2PPortAllocatorFactory() override
{}
159 scoped_refptr
<P2PSocketDispatcher
> socket_dispatcher_
;
160 // |network_manager_| and |socket_factory_| are a weak references, owned by
161 // PeerConnectionDependencyFactory.
162 rtc::NetworkManager
* network_manager_
;
163 rtc::PacketSocketFactory
* socket_factory_
;
166 PeerConnectionDependencyFactory::PeerConnectionDependencyFactory(
167 P2PSocketDispatcher
* p2p_socket_dispatcher
)
168 : network_manager_(NULL
),
169 p2p_socket_dispatcher_(p2p_socket_dispatcher
),
170 signaling_thread_(NULL
),
171 worker_thread_(NULL
),
172 chrome_signaling_thread_("Chrome_libJingle_Signaling"),
173 chrome_worker_thread_("Chrome_libJingle_WorkerThread") {
176 PeerConnectionDependencyFactory::~PeerConnectionDependencyFactory() {
177 DVLOG(1) << "~PeerConnectionDependencyFactory()";
178 DCHECK(pc_factory_
== NULL
);
181 blink::WebRTCPeerConnectionHandler
*
182 PeerConnectionDependencyFactory::CreateRTCPeerConnectionHandler(
183 blink::WebRTCPeerConnectionHandlerClient
* client
) {
184 // Save histogram data so we can see how much PeerConnetion is used.
185 // The histogram counts the number of calls to the JS API
186 // webKitRTCPeerConnection.
187 UpdateWebRTCMethodCount(WEBKIT_RTC_PEER_CONNECTION
);
189 return new RTCPeerConnectionHandler(client
, this);
192 bool PeerConnectionDependencyFactory::InitializeMediaStreamAudioSource(
194 const blink::WebMediaConstraints
& audio_constraints
,
195 MediaStreamAudioSource
* source_data
) {
196 DVLOG(1) << "InitializeMediaStreamAudioSources()";
198 // Do additional source initialization if the audio source is a valid
199 // microphone or tab audio.
200 RTCMediaConstraints
native_audio_constraints(audio_constraints
);
201 MediaAudioConstraints::ApplyFixedAudioConstraints(&native_audio_constraints
);
203 StreamDeviceInfo device_info
= source_data
->device_info();
204 RTCMediaConstraints constraints
= native_audio_constraints
;
205 // May modify both |constraints| and |effects|.
206 HarmonizeConstraintsAndEffects(&constraints
,
207 &device_info
.device
.input
.effects
);
209 scoped_refptr
<WebRtcAudioCapturer
> capturer(
210 CreateAudioCapturer(render_view_id
, device_info
, audio_constraints
,
212 if (!capturer
.get()) {
213 const std::string log_string
=
214 "PCDF::InitializeMediaStreamAudioSource: fails to create capturer";
215 WebRtcLogMessage(log_string
);
216 DVLOG(1) << log_string
;
217 // TODO(xians): Don't we need to check if source_observer is observing
218 // something? If not, then it looks like we have a leak here.
219 // OTOH, if it _is_ observing something, then the callback might
220 // be called multiple times which is likely also a bug.
223 source_data
->SetAudioCapturer(capturer
.get());
225 // Creates a LocalAudioSource object which holds audio options.
226 // TODO(xians): The option should apply to the track instead of the source.
227 // TODO(perkj): Move audio constraints parsing to Chrome.
228 // Currently there are a few constraints that are parsed by libjingle and
229 // the state is set to ended if parsing fails.
230 scoped_refptr
<webrtc::AudioSourceInterface
> rtc_source(
231 CreateLocalAudioSource(&constraints
).get());
232 if (rtc_source
->state() != webrtc::MediaSourceInterface::kLive
) {
233 DLOG(WARNING
) << "Failed to create rtc LocalAudioSource.";
236 source_data
->SetLocalAudioSource(rtc_source
.get());
240 WebRtcVideoCapturerAdapter
*
241 PeerConnectionDependencyFactory::CreateVideoCapturer(
242 bool is_screeencast
) {
243 // We need to make sure the libjingle thread wrappers have been created
244 // before we can use an instance of a WebRtcVideoCapturerAdapter. This is
245 // since the base class of WebRtcVideoCapturerAdapter is a
246 // cricket::VideoCapturer and it uses the libjingle thread wrappers.
247 if (!GetPcFactory().get())
249 return new WebRtcVideoCapturerAdapter(is_screeencast
);
252 scoped_refptr
<webrtc::VideoSourceInterface
>
253 PeerConnectionDependencyFactory::CreateVideoSource(
254 cricket::VideoCapturer
* capturer
,
255 const blink::WebMediaConstraints
& constraints
) {
256 RTCMediaConstraints
webrtc_constraints(constraints
);
257 scoped_refptr
<webrtc::VideoSourceInterface
> source
=
258 GetPcFactory()->CreateVideoSource(capturer
, &webrtc_constraints
).get();
262 const scoped_refptr
<webrtc::PeerConnectionFactoryInterface
>&
263 PeerConnectionDependencyFactory::GetPcFactory() {
264 if (!pc_factory_
.get())
265 CreatePeerConnectionFactory();
266 CHECK(pc_factory_
.get());
271 void PeerConnectionDependencyFactory::WillDestroyCurrentMessageLoop() {
272 CleanupPeerConnectionFactory();
275 void PeerConnectionDependencyFactory::CreatePeerConnectionFactory() {
276 DCHECK(!pc_factory_
.get());
277 DCHECK(!signaling_thread_
);
278 DCHECK(!worker_thread_
);
279 DCHECK(!network_manager_
);
280 DCHECK(!socket_factory_
);
281 DCHECK(!chrome_signaling_thread_
.IsRunning());
282 DCHECK(!chrome_worker_thread_
.IsRunning());
284 DVLOG(1) << "PeerConnectionDependencyFactory::CreatePeerConnectionFactory()";
286 base::MessageLoop::current()->AddDestructionObserver(this);
287 // To allow sending to the signaling/worker threads.
288 jingle_glue::JingleThreadWrapper::EnsureForCurrentMessageLoop();
289 jingle_glue::JingleThreadWrapper::current()->set_send_allowed(true);
291 CHECK(chrome_signaling_thread_
.Start());
292 CHECK(chrome_worker_thread_
.Start());
294 base::WaitableEvent
start_worker_event(true, false);
295 chrome_worker_thread_
.message_loop()->PostTask(FROM_HERE
, base::Bind(
296 &PeerConnectionDependencyFactory::InitializeWorkerThread
,
297 base::Unretained(this),
299 &start_worker_event
));
301 base::WaitableEvent
create_network_manager_event(true, false);
302 chrome_worker_thread_
.message_loop()->PostTask(FROM_HERE
, base::Bind(
303 &PeerConnectionDependencyFactory::CreateIpcNetworkManagerOnWorkerThread
,
304 base::Unretained(this),
305 &create_network_manager_event
));
307 start_worker_event
.Wait();
308 create_network_manager_event
.Wait();
310 CHECK(worker_thread_
);
312 // Init SSL, which will be needed by PeerConnection.
313 #if defined(USE_OPENSSL)
314 if (!rtc::InitializeSSL()) {
315 LOG(ERROR
) << "Failed on InitializeSSL.";
320 // TODO(ronghuawu): Replace this call with InitializeSSL.
321 net::EnsureNSSSSLInit();
324 base::WaitableEvent
start_signaling_event(true, false);
325 chrome_signaling_thread_
.message_loop()->PostTask(FROM_HERE
, base::Bind(
326 &PeerConnectionDependencyFactory::InitializeSignalingThread
,
327 base::Unretained(this),
328 RenderThreadImpl::current()->GetGpuFactories(),
329 &start_signaling_event
));
331 start_signaling_event
.Wait();
332 CHECK(signaling_thread_
);
335 void PeerConnectionDependencyFactory::InitializeSignalingThread(
336 const scoped_refptr
<media::GpuVideoAcceleratorFactories
>& gpu_factories
,
337 base::WaitableEvent
* event
) {
338 DCHECK(chrome_signaling_thread_
.task_runner()->BelongsToCurrentThread());
339 DCHECK(worker_thread_
);
340 DCHECK(p2p_socket_dispatcher_
.get());
342 jingle_glue::JingleThreadWrapper::EnsureForCurrentMessageLoop();
343 jingle_glue::JingleThreadWrapper::current()->set_send_allowed(true);
344 signaling_thread_
= jingle_glue::JingleThreadWrapper::current();
346 EnsureWebRtcAudioDeviceImpl();
348 socket_factory_
.reset(
349 new IpcPacketSocketFactory(p2p_socket_dispatcher_
.get()));
351 scoped_ptr
<cricket::WebRtcVideoDecoderFactory
> decoder_factory
;
352 scoped_ptr
<cricket::WebRtcVideoEncoderFactory
> encoder_factory
;
354 const base::CommandLine
* cmd_line
= base::CommandLine::ForCurrentProcess();
355 if (gpu_factories
.get()) {
356 if (!cmd_line
->HasSwitch(switches::kDisableWebRtcHWDecoding
))
357 decoder_factory
.reset(new RTCVideoDecoderFactory(gpu_factories
));
359 if (!cmd_line
->HasSwitch(switches::kDisableWebRtcHWEncoding
))
360 encoder_factory
.reset(new RTCVideoEncoderFactory(gpu_factories
));
363 #if defined(OS_ANDROID)
364 if (!media::MediaCodecBridge::SupportsSetParameters())
365 encoder_factory
.reset();
368 pc_factory_
= webrtc::CreatePeerConnectionFactory(
369 worker_thread_
, signaling_thread_
, audio_device_
.get(),
370 encoder_factory
.release(), decoder_factory
.release());
371 CHECK(pc_factory_
.get());
373 webrtc::PeerConnectionFactoryInterface::Options factory_options
;
374 factory_options
.disable_sctp_data_channels
= false;
375 factory_options
.disable_encryption
=
376 cmd_line
->HasSwitch(switches::kDisableWebRtcEncryption
);
377 pc_factory_
->SetOptions(factory_options
);
382 bool PeerConnectionDependencyFactory::PeerConnectionFactoryCreated() {
383 return pc_factory_
.get() != NULL
;
386 scoped_refptr
<webrtc::PeerConnectionInterface
>
387 PeerConnectionDependencyFactory::CreatePeerConnection(
388 const webrtc::PeerConnectionInterface::RTCConfiguration
& config
,
389 const webrtc::MediaConstraintsInterface
* constraints
,
390 blink::WebFrame
* web_frame
,
391 webrtc::PeerConnectionObserver
* observer
) {
394 if (!GetPcFactory().get())
397 scoped_refptr
<P2PPortAllocatorFactory
> pa_factory
=
398 new rtc::RefCountedObject
<P2PPortAllocatorFactory
>(
399 p2p_socket_dispatcher_
.get(),
401 socket_factory_
.get());
403 PeerConnectionIdentityService
* identity_service
=
404 new PeerConnectionIdentityService(
405 GURL(web_frame
->document().url().spec()).GetOrigin());
407 return GetPcFactory()->CreatePeerConnection(config
,
414 scoped_refptr
<webrtc::MediaStreamInterface
>
415 PeerConnectionDependencyFactory::CreateLocalMediaStream(
416 const std::string
& label
) {
417 return GetPcFactory()->CreateLocalMediaStream(label
).get();
420 scoped_refptr
<webrtc::AudioSourceInterface
>
421 PeerConnectionDependencyFactory::CreateLocalAudioSource(
422 const webrtc::MediaConstraintsInterface
* constraints
) {
423 scoped_refptr
<webrtc::AudioSourceInterface
> source
=
424 GetPcFactory()->CreateAudioSource(constraints
).get();
428 void PeerConnectionDependencyFactory::CreateLocalAudioTrack(
429 const blink::WebMediaStreamTrack
& track
) {
430 blink::WebMediaStreamSource source
= track
.source();
431 DCHECK_EQ(source
.type(), blink::WebMediaStreamSource::TypeAudio
);
432 MediaStreamAudioSource
* source_data
=
433 static_cast<MediaStreamAudioSource
*>(source
.extraData());
435 scoped_refptr
<WebAudioCapturerSource
> webaudio_source
;
437 if (source
.requiresAudioConsumer()) {
438 // We're adding a WebAudio MediaStream.
439 // Create a specific capturer for each WebAudio consumer.
440 webaudio_source
= CreateWebAudioSource(&source
);
442 static_cast<MediaStreamAudioSource
*>(source
.extraData());
444 // TODO(perkj): Implement support for sources from
445 // remote MediaStreams.
451 // Creates an adapter to hold all the libjingle objects.
452 scoped_refptr
<WebRtcLocalAudioTrackAdapter
> adapter(
453 WebRtcLocalAudioTrackAdapter::Create(track
.id().utf8(),
454 source_data
->local_audio_source()));
455 static_cast<webrtc::AudioTrackInterface
*>(adapter
.get())->set_enabled(
458 // TODO(xians): Merge |source| to the capturer(). We can't do this today
459 // because only one capturer() is supported while one |source| is created
460 // for each audio track.
461 scoped_ptr
<WebRtcLocalAudioTrack
> audio_track(new WebRtcLocalAudioTrack(
462 adapter
.get(), source_data
->GetAudioCapturer(), webaudio_source
.get()));
464 StartLocalAudioTrack(audio_track
.get());
466 // Pass the ownership of the native local audio track to the blink track.
467 blink::WebMediaStreamTrack writable_track
= track
;
468 writable_track
.setExtraData(audio_track
.release());
471 void PeerConnectionDependencyFactory::StartLocalAudioTrack(
472 WebRtcLocalAudioTrack
* audio_track
) {
473 // Start the audio track. This will hook the |audio_track| to the capturer
474 // as the sink of the audio, and only start the source of the capturer if
475 // it is the first audio track connecting to the capturer.
476 audio_track
->Start();
479 scoped_refptr
<WebAudioCapturerSource
>
480 PeerConnectionDependencyFactory::CreateWebAudioSource(
481 blink::WebMediaStreamSource
* source
) {
482 DVLOG(1) << "PeerConnectionDependencyFactory::CreateWebAudioSource()";
484 scoped_refptr
<WebAudioCapturerSource
>
485 webaudio_capturer_source(new WebAudioCapturerSource());
486 MediaStreamAudioSource
* source_data
= new MediaStreamAudioSource();
488 // Use the current default capturer for the WebAudio track so that the
489 // WebAudio track can pass a valid delay value and |need_audio_processing|
490 // flag to PeerConnection.
491 // TODO(xians): Remove this after moving APM to Chrome.
492 if (GetWebRtcAudioDevice()) {
493 source_data
->SetAudioCapturer(
494 GetWebRtcAudioDevice()->GetDefaultCapturer());
497 // Create a LocalAudioSource object which holds audio options.
498 // SetLocalAudioSource() affects core audio parts in third_party/Libjingle.
499 source_data
->SetLocalAudioSource(CreateLocalAudioSource(NULL
).get());
500 source
->setExtraData(source_data
);
502 // Replace the default source with WebAudio as source instead.
503 source
->addAudioConsumer(webaudio_capturer_source
.get());
505 return webaudio_capturer_source
;
508 scoped_refptr
<webrtc::VideoTrackInterface
>
509 PeerConnectionDependencyFactory::CreateLocalVideoTrack(
510 const std::string
& id
,
511 webrtc::VideoSourceInterface
* source
) {
512 return GetPcFactory()->CreateVideoTrack(id
, source
).get();
515 scoped_refptr
<webrtc::VideoTrackInterface
>
516 PeerConnectionDependencyFactory::CreateLocalVideoTrack(
517 const std::string
& id
, cricket::VideoCapturer
* capturer
) {
519 LOG(ERROR
) << "CreateLocalVideoTrack called with null VideoCapturer.";
523 // Create video source from the |capturer|.
524 scoped_refptr
<webrtc::VideoSourceInterface
> source
=
525 GetPcFactory()->CreateVideoSource(capturer
, NULL
).get();
527 // Create native track from the source.
528 return GetPcFactory()->CreateVideoTrack(id
, source
.get()).get();
531 webrtc::SessionDescriptionInterface
*
532 PeerConnectionDependencyFactory::CreateSessionDescription(
533 const std::string
& type
,
534 const std::string
& sdp
,
535 webrtc::SdpParseError
* error
) {
536 return webrtc::CreateSessionDescription(type
, sdp
, error
);
539 webrtc::IceCandidateInterface
*
540 PeerConnectionDependencyFactory::CreateIceCandidate(
541 const std::string
& sdp_mid
,
543 const std::string
& sdp
) {
544 return webrtc::CreateIceCandidate(sdp_mid
, sdp_mline_index
, sdp
);
547 WebRtcAudioDeviceImpl
*
548 PeerConnectionDependencyFactory::GetWebRtcAudioDevice() {
549 return audio_device_
.get();
552 void PeerConnectionDependencyFactory::InitializeWorkerThread(
553 rtc::Thread
** thread
,
554 base::WaitableEvent
* event
) {
555 jingle_glue::JingleThreadWrapper::EnsureForCurrentMessageLoop();
556 jingle_glue::JingleThreadWrapper::current()->set_send_allowed(true);
557 *thread
= jingle_glue::JingleThreadWrapper::current();
561 void PeerConnectionDependencyFactory::CreateIpcNetworkManagerOnWorkerThread(
562 base::WaitableEvent
* event
) {
563 DCHECK_EQ(base::MessageLoop::current(), chrome_worker_thread_
.message_loop());
564 network_manager_
= new IpcNetworkManager(p2p_socket_dispatcher_
.get());
568 void PeerConnectionDependencyFactory::DeleteIpcNetworkManager() {
569 DCHECK_EQ(base::MessageLoop::current(), chrome_worker_thread_
.message_loop());
570 delete network_manager_
;
571 network_manager_
= NULL
;
574 void PeerConnectionDependencyFactory::CleanupPeerConnectionFactory() {
575 DVLOG(1) << "PeerConnectionDependencyFactory::CleanupPeerConnectionFactory()";
577 if (network_manager_
) {
578 // The network manager needs to free its resources on the thread they were
579 // created, which is the worked thread.
580 if (chrome_worker_thread_
.IsRunning()) {
581 chrome_worker_thread_
.message_loop()->PostTask(FROM_HERE
, base::Bind(
582 &PeerConnectionDependencyFactory::DeleteIpcNetworkManager
,
583 base::Unretained(this)));
584 // Stopping the thread will wait until all tasks have been
585 // processed before returning. We wait for the above task to finish before
586 // letting the the function continue to avoid any potential race issues.
587 chrome_worker_thread_
.Stop();
589 NOTREACHED() << "Worker thread not running.";
594 scoped_refptr
<WebRtcAudioCapturer
>
595 PeerConnectionDependencyFactory::CreateAudioCapturer(
597 const StreamDeviceInfo
& device_info
,
598 const blink::WebMediaConstraints
& constraints
,
599 MediaStreamAudioSource
* audio_source
) {
600 // TODO(xians): Handle the cases when gUM is called without a proper render
601 // view, for example, by an extension.
602 DCHECK_GE(render_view_id
, 0);
604 EnsureWebRtcAudioDeviceImpl();
605 DCHECK(GetWebRtcAudioDevice());
606 return WebRtcAudioCapturer::CreateCapturer(render_view_id
, device_info
,
608 GetWebRtcAudioDevice(),
612 scoped_refptr
<base::MessageLoopProxy
>
613 PeerConnectionDependencyFactory::GetWebRtcWorkerThread() const {
614 DCHECK(CalledOnValidThread());
615 return chrome_worker_thread_
.message_loop_proxy();
618 scoped_refptr
<base::MessageLoopProxy
>
619 PeerConnectionDependencyFactory::GetWebRtcSignalingThread() const {
620 DCHECK(CalledOnValidThread());
621 return chrome_signaling_thread_
.message_loop_proxy();
624 void PeerConnectionDependencyFactory::EnsureWebRtcAudioDeviceImpl() {
625 if (audio_device_
.get())
628 audio_device_
= new WebRtcAudioDeviceImpl();
631 } // namespace content