Fix build break
[chromium-blink-merge.git] / content / renderer / media / media_stream_dependency_factory.cc
blob9045b71b89c08b0106f1c010b27092030a6a815c
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "content/renderer/media/media_stream_dependency_factory.h"
7 #include <vector>
9 #include "base/synchronization/waitable_event.h"
10 #include "base/utf_string_conversions.h"
11 #include "content/renderer/media/media_stream_source_extra_data.h"
12 #include "content/renderer/media/rtc_media_constraints.h"
13 #include "content/renderer/media/rtc_peer_connection_handler.h"
14 #include "content/renderer/media/rtc_video_capturer.h"
15 #include "content/renderer/media/video_capture_impl_manager.h"
16 #include "content/renderer/media/webaudio_capturer_source.h"
17 #include "content/renderer/media/webrtc_audio_device_impl.h"
18 #include "content/renderer/media/webrtc_uma_histograms.h"
19 #include "content/renderer/p2p/ipc_network_manager.h"
20 #include "content/renderer/p2p/ipc_socket_factory.h"
21 #include "content/renderer/p2p/port_allocator.h"
22 #include "jingle/glue/thread_wrapper.h"
23 #include "third_party/WebKit/Source/Platform/chromium/public/WebMediaConstraints.h"
24 #include "third_party/WebKit/Source/Platform/chromium/public/WebMediaStream.h"
25 #include "third_party/WebKit/Source/Platform/chromium/public/WebMediaStreamSource.h"
26 #include "third_party/WebKit/Source/Platform/chromium/public/WebMediaStreamTrack.h"
27 #include "third_party/WebKit/Source/WebKit/chromium/public/WebFrame.h"
29 #if defined(USE_OPENSSL)
30 #include "third_party/libjingle/source/talk/base/ssladapter.h"
31 #else
32 #include "net/socket/nss_ssl_util.h"
33 #endif
35 namespace content {
37 // Constant constraint keys which disables all audio constraints.
38 // Only used in combination with WebAudio sources.
39 struct {
40 const char* key;
41 const char* value;
42 } const kWebAudioConstraints[] = {
43 {webrtc::MediaConstraintsInterface::kEchoCancellation,
44 webrtc::MediaConstraintsInterface::kValueFalse},
45 {webrtc::MediaConstraintsInterface::kAutoGainControl,
46 webrtc::MediaConstraintsInterface::kValueFalse},
47 {webrtc::MediaConstraintsInterface::kNoiseSuppression,
48 webrtc::MediaConstraintsInterface::kValueFalse},
49 {webrtc::MediaConstraintsInterface::kHighpassFilter,
50 webrtc::MediaConstraintsInterface::kValueFalse},
53 class WebAudioConstraints : public RTCMediaConstraints {
54 public:
55 WebAudioConstraints()
56 : RTCMediaConstraints(WebKit::WebMediaConstraints()) {
57 for (size_t i = 0; i < ARRAYSIZE_UNSAFE(kWebAudioConstraints); ++i) {
58 webrtc::MediaConstraintsInterface::Constraint constraint;
59 constraint.key = kWebAudioConstraints[i].key;
60 constraint.value = kWebAudioConstraints[i].value;
62 DVLOG(1) << "WebAudioConstraints: " << constraint.key
63 << " : " << constraint.value;
64 mandatory_.push_back(constraint);
68 virtual ~WebAudioConstraints() {}
71 class P2PPortAllocatorFactory : public webrtc::PortAllocatorFactoryInterface {
72 public:
73 P2PPortAllocatorFactory(
74 P2PSocketDispatcher* socket_dispatcher,
75 talk_base::NetworkManager* network_manager,
76 talk_base::PacketSocketFactory* socket_factory,
77 WebKit::WebFrame* web_frame)
78 : socket_dispatcher_(socket_dispatcher),
79 network_manager_(network_manager),
80 socket_factory_(socket_factory),
81 web_frame_(web_frame) {
84 virtual cricket::PortAllocator* CreatePortAllocator(
85 const std::vector<StunConfiguration>& stun_servers,
86 const std::vector<TurnConfiguration>& turn_configurations) OVERRIDE {
87 CHECK(web_frame_);
88 P2PPortAllocator::Config config;
89 if (stun_servers.size() > 0) {
90 config.stun_server = stun_servers[0].server.hostname();
91 config.stun_server_port = stun_servers[0].server.port();
93 if (turn_configurations.size() > 0) {
94 config.legacy_relay = false;
95 config.relay_server = turn_configurations[0].server.hostname();
96 config.relay_server_port = turn_configurations[0].server.port();
97 config.relay_username = turn_configurations[0].username;
98 config.relay_password = turn_configurations[0].password;
99 // Use the turn server as the stun server.
100 config.stun_server = config.relay_server;
101 config.stun_server_port = config.relay_server_port;
104 return new P2PPortAllocator(web_frame_,
105 socket_dispatcher_,
106 network_manager_,
107 socket_factory_,
108 config);
111 protected:
112 virtual ~P2PPortAllocatorFactory() {}
114 private:
115 scoped_refptr<P2PSocketDispatcher> socket_dispatcher_;
116 // |network_manager_| and |socket_factory_| are a weak references, owned by
117 // MediaStreamDependencyFactory.
118 talk_base::NetworkManager* network_manager_;
119 talk_base::PacketSocketFactory* socket_factory_;
120 // Raw ptr to the WebFrame that created the P2PPortAllocatorFactory.
121 WebKit::WebFrame* web_frame_;
124 // SourceStateObserver is a help class used for observing the startup state
125 // transition of webrtc media sources such as a camera or microphone.
126 // An instance of the object deletes itself after use.
127 // Usage:
128 // 1. Create an instance of the object with the WebKit::WebMediaStream
129 // the observed sources belongs to a callback.
130 // 2. Add the sources to the observer using AddSource.
131 // 3. Call StartObserving()
132 // 4. The callback will be triggered when all sources have transitioned from
133 // webrtc::MediaSourceInterface::kInitializing.
134 class SourceStateObserver : public webrtc::ObserverInterface,
135 public base::NonThreadSafe {
136 public:
137 SourceStateObserver(
138 WebKit::WebMediaStream* description,
139 const MediaStreamDependencyFactory::MediaSourcesCreatedCallback& callback)
140 : description_(description),
141 ready_callback_(callback),
142 live_(true) {
145 void AddSource(webrtc::MediaSourceInterface* source) {
146 DCHECK(CalledOnValidThread());
147 switch (source->state()) {
148 case webrtc::MediaSourceInterface::kInitializing:
149 sources_.push_back(source);
150 source->RegisterObserver(this);
151 break;
152 case webrtc::MediaSourceInterface::kLive:
153 // The source is already live so we don't need to wait for it.
154 break;
155 case webrtc::MediaSourceInterface::kEnded:
156 // The source have already failed.
157 live_ = false;
158 break;
159 default:
160 NOTREACHED();
164 void StartObservering() {
165 DCHECK(CalledOnValidThread());
166 CheckIfSourcesAreLive();
169 virtual void OnChanged() OVERRIDE {
170 DCHECK(CalledOnValidThread());
171 CheckIfSourcesAreLive();
174 private:
175 void CheckIfSourcesAreLive() {
176 ObservedSources::iterator it = sources_.begin();
177 while (it != sources_.end()) {
178 if ((*it)->state() != webrtc::MediaSourceInterface::kInitializing) {
179 live_ &= (*it)->state() == webrtc::MediaSourceInterface::kLive;
180 (*it)->UnregisterObserver(this);
181 it = sources_.erase(it);
182 } else {
183 ++it;
186 if (sources_.empty()) {
187 ready_callback_.Run(description_, live_);
188 delete this;
192 WebKit::WebMediaStream* description_;
193 MediaStreamDependencyFactory::MediaSourcesCreatedCallback ready_callback_;
194 bool live_;
195 typedef std::vector<scoped_refptr<webrtc::MediaSourceInterface> >
196 ObservedSources;
197 ObservedSources sources_;
200 MediaStreamDependencyFactory::MediaStreamDependencyFactory(
201 VideoCaptureImplManager* vc_manager,
202 P2PSocketDispatcher* p2p_socket_dispatcher)
203 : network_manager_(NULL),
204 vc_manager_(vc_manager),
205 p2p_socket_dispatcher_(p2p_socket_dispatcher),
206 signaling_thread_(NULL),
207 worker_thread_(NULL),
208 chrome_worker_thread_("Chrome_libJingle_WorkerThread") {
211 MediaStreamDependencyFactory::~MediaStreamDependencyFactory() {
212 CleanupPeerConnectionFactory();
215 WebKit::WebRTCPeerConnectionHandler*
216 MediaStreamDependencyFactory::CreateRTCPeerConnectionHandler(
217 WebKit::WebRTCPeerConnectionHandlerClient* client) {
218 // Save histogram data so we can see how much PeerConnetion is used.
219 // The histogram counts the number of calls to the JS API
220 // webKitRTCPeerConnection.
221 UpdateWebRTCMethodCount(WEBKIT_RTC_PEER_CONNECTION);
223 if (!EnsurePeerConnectionFactory())
224 return NULL;
226 return new RTCPeerConnectionHandler(client, this);
229 void MediaStreamDependencyFactory::CreateNativeMediaSources(
230 const WebKit::WebMediaConstraints& audio_constraints,
231 const WebKit::WebMediaConstraints& video_constraints,
232 WebKit::WebMediaStream* description,
233 const MediaSourcesCreatedCallback& sources_created) {
234 DVLOG(1) << "MediaStreamDependencyFactory::CreateNativeMediaSources()";
235 if (!EnsurePeerConnectionFactory()) {
236 sources_created.Run(description, false);
237 return;
240 // |source_observer| clean up itself when it has completed
241 // source_observer->StartObservering.
242 SourceStateObserver* source_observer =
243 new SourceStateObserver(description, sources_created);
245 // Create local video sources.
246 RTCMediaConstraints native_video_constraints(video_constraints);
247 WebKit::WebVector<WebKit::WebMediaStreamTrack> video_tracks;
248 description->videoSources(video_tracks);
249 for (size_t i = 0; i < video_tracks.size(); ++i) {
250 const WebKit::WebMediaStreamSource& source = video_tracks[i].source();
251 MediaStreamSourceExtraData* source_data =
252 static_cast<MediaStreamSourceExtraData*>(source.extraData());
253 if (!source_data) {
254 // TODO(perkj): Implement support for sources from remote MediaStreams.
255 NOTIMPLEMENTED();
256 continue;
258 const bool is_screencast =
259 source_data->device_info().device.type ==
260 content::MEDIA_TAB_VIDEO_CAPTURE ||
261 source_data->device_info().device.type ==
262 content::MEDIA_SCREEN_VIDEO_CAPTURE;
263 source_data->SetVideoSource(
264 CreateLocalVideoSource(source_data->device_info().session_id,
265 is_screencast,
266 &native_video_constraints));
267 source_observer->AddSource(source_data->video_source());
270 // Do additional source initialization if the audio source is a valid
271 // microphone or tab audio.
272 RTCMediaConstraints native_audio_constraints(audio_constraints);
273 WebKit::WebVector<WebKit::WebMediaStreamTrack> audio_tracks;
274 description->audioSources(audio_tracks);
275 for (size_t i = 0; i < audio_tracks.size(); ++i) {
276 const WebKit::WebMediaStreamSource& source = audio_tracks[i].source();
277 MediaStreamSourceExtraData* source_data =
278 static_cast<MediaStreamSourceExtraData*>(source.extraData());
279 if (!source_data) {
280 // TODO(henrika): Implement support for sources from remote MediaStreams.
281 NOTIMPLEMENTED();
282 continue;
285 const StreamDeviceInfo device_info = source_data->device_info();
286 if (IsAudioMediaType(device_info.device.type)) {
287 if (!InitializeAudioSource(device_info)) {
288 DLOG(WARNING) << "Unsupported audio source";
289 sources_created.Run(description, false);
290 return;
294 // Creates a LocalAudioSource object which holds audio options.
295 source_data->SetLocalAudioSource(
296 CreateLocalAudioSource(&native_audio_constraints));
297 source_observer->AddSource(source_data->local_audio_source());
300 source_observer->StartObservering();
303 void MediaStreamDependencyFactory::CreateNativeLocalMediaStream(
304 WebKit::WebMediaStream* description) {
305 DVLOG(1) << "MediaStreamDependencyFactory::CreateNativeLocalMediaStream()";
306 if (!EnsurePeerConnectionFactory()) {
307 DVLOG(1) << "EnsurePeerConnectionFactory() failed!";
308 return;
311 std::string label = UTF16ToUTF8(description->label());
312 scoped_refptr<webrtc::MediaStreamInterface> native_stream =
313 CreateLocalMediaStream(label);
314 MediaStreamExtraData* extra_data = new MediaStreamExtraData(native_stream,
315 true);
316 description->setExtraData(extra_data);
318 // Add audio tracks.
319 WebKit::WebVector<WebKit::WebMediaStreamTrack> audio_tracks;
320 description->audioTracks(audio_tracks);
321 for (size_t i = 0; i < audio_tracks.size(); ++i) {
322 AddNativeMediaStreamTrack(*description, audio_tracks[i]);
325 // Add video tracks.
326 WebKit::WebVector<WebKit::WebMediaStreamTrack> video_tracks;
327 description->videoTracks(video_tracks);
328 for (size_t i = 0; i < video_tracks.size(); ++i) {
329 AddNativeMediaStreamTrack(*description, video_tracks[i]);
333 void MediaStreamDependencyFactory::CreateNativeLocalMediaStream(
334 WebKit::WebMediaStream* description,
335 const MediaStreamExtraData::StreamStopCallback& stream_stop) {
336 CreateNativeLocalMediaStream(description);
338 MediaStreamExtraData* extra_data =
339 static_cast<MediaStreamExtraData*>(description->extraData());
340 extra_data->SetLocalStreamStopCallback(stream_stop);
343 bool MediaStreamDependencyFactory::AddNativeMediaStreamTrack(
344 const WebKit::WebMediaStream& stream,
345 const WebKit::WebMediaStreamTrack& track) {
346 MediaStreamExtraData* extra_data =
347 static_cast<MediaStreamExtraData*>(stream.extraData());
348 webrtc::MediaStreamInterface* native_stream = extra_data->stream();
349 DCHECK(native_stream);
351 WebKit::WebMediaStreamSource source = track.source();
352 MediaStreamSourceExtraData* source_data =
353 static_cast<MediaStreamSourceExtraData*>(source.extraData());
355 if (!source_data) {
356 if (source.requiresAudioConsumer()) {
357 // We're adding a WebAudio MediaStream.
358 // TODO(crogers, xians): In reality we should be able to send a unique
359 // audio stream to each PeerConnection separately.
360 // Currently WebRTC is only able to handle a global audio stream sent to
361 // ALL peers. Create a special source where default WebAudio constraints
362 // are used.
363 if (CreateWebAudioSource(&source)) {
364 source_data =
365 static_cast<MediaStreamSourceExtraData*>(source.extraData());
366 } else {
367 LOG(ERROR) << "Failed to create WebAudio source";
368 return false;
370 } else {
371 // TODO(perkj): Implement support for sources from
372 // remote MediaStreams.
373 NOTIMPLEMENTED();
374 return false;
378 WebKit::WebMediaStreamSource::Type type = track.source().type();
379 DCHECK(type == WebKit::WebMediaStreamSource::TypeAudio ||
380 type == WebKit::WebMediaStreamSource::TypeVideo);
382 std::string track_id = UTF16ToUTF8(track.id());
383 if (source.type() == WebKit::WebMediaStreamSource::TypeAudio) {
384 // TODO(henrika,xians): Refactor how an audio track is created to harmonize
385 // with video tracks.
386 scoped_refptr<webrtc::AudioTrackInterface> audio_track(
387 CreateLocalAudioTrack(track_id, source_data->local_audio_source()));
388 audio_track->set_enabled(track.isEnabled());
389 if (GetWebRtcAudioDevice()) {
390 WebRtcAudioCapturer* capturer = GetWebRtcAudioDevice()->capturer();
391 if (!capturer->is_recording())
392 capturer->Start();
394 return native_stream->AddTrack(audio_track);
395 } else {
396 scoped_refptr<webrtc::VideoTrackInterface> video_track(
397 CreateLocalVideoTrack(track_id, source_data->video_source()));
398 video_track->set_enabled(track.isEnabled());
399 return native_stream->AddTrack(video_track);
403 bool MediaStreamDependencyFactory::RemoveNativeMediaStreamTrack(
404 const WebKit::WebMediaStream& stream,
405 const WebKit::WebMediaStreamTrack& track) {
406 MediaStreamExtraData* extra_data =
407 static_cast<MediaStreamExtraData*>(stream.extraData());
408 webrtc::MediaStreamInterface* native_stream = extra_data->stream();
409 DCHECK(native_stream);
411 WebKit::WebMediaStreamSource::Type type = track.source().type();
412 DCHECK(type == WebKit::WebMediaStreamSource::TypeAudio ||
413 type == WebKit::WebMediaStreamSource::TypeVideo);
415 std::string track_id = UTF16ToUTF8(track.id());
416 return type == WebKit::WebMediaStreamSource::TypeAudio ?
417 native_stream->RemoveTrack(native_stream->FindAudioTrack(track_id)) :
418 native_stream->RemoveTrack(native_stream->FindVideoTrack(track_id));
421 bool MediaStreamDependencyFactory::CreatePeerConnectionFactory() {
422 DVLOG(1) << "MediaStreamDependencyFactory::CreatePeerConnectionFactory()";
423 if (!pc_factory_.get()) {
424 DCHECK(!audio_device_);
425 audio_device_ = new WebRtcAudioDeviceImpl();
426 scoped_refptr<webrtc::PeerConnectionFactoryInterface> factory(
427 webrtc::CreatePeerConnectionFactory(worker_thread_,
428 signaling_thread_,
429 audio_device_));
430 if (factory.get())
431 pc_factory_ = factory;
432 else
433 audio_device_ = NULL;
435 return pc_factory_.get() != NULL;
438 bool MediaStreamDependencyFactory::PeerConnectionFactoryCreated() {
439 return pc_factory_.get() != NULL;
442 scoped_refptr<webrtc::PeerConnectionInterface>
443 MediaStreamDependencyFactory::CreatePeerConnection(
444 const webrtc::PeerConnectionInterface::IceServers& ice_servers,
445 const webrtc::MediaConstraintsInterface* constraints,
446 WebKit::WebFrame* web_frame,
447 webrtc::PeerConnectionObserver* observer) {
448 CHECK(web_frame);
449 CHECK(observer);
450 scoped_refptr<P2PPortAllocatorFactory> pa_factory =
451 new talk_base::RefCountedObject<P2PPortAllocatorFactory>(
452 p2p_socket_dispatcher_.get(),
453 network_manager_,
454 socket_factory_.get(),
455 web_frame);
456 return pc_factory_->CreatePeerConnection(
457 ice_servers, constraints, pa_factory, observer).get();
460 scoped_refptr<webrtc::MediaStreamInterface>
461 MediaStreamDependencyFactory::CreateLocalMediaStream(
462 const std::string& label) {
463 return pc_factory_->CreateLocalMediaStream(label).get();
466 scoped_refptr<webrtc::AudioSourceInterface>
467 MediaStreamDependencyFactory::CreateLocalAudioSource(
468 const webrtc::MediaConstraintsInterface* constraints) {
469 scoped_refptr<webrtc::AudioSourceInterface> source =
470 pc_factory_->CreateAudioSource(constraints).get();
471 return source;
474 scoped_refptr<webrtc::VideoSourceInterface>
475 MediaStreamDependencyFactory::CreateLocalVideoSource(
476 int video_session_id,
477 bool is_screencast,
478 const webrtc::MediaConstraintsInterface* constraints) {
479 RtcVideoCapturer* capturer = new RtcVideoCapturer(
480 video_session_id, vc_manager_.get(), is_screencast);
482 // The video source takes ownership of |capturer|.
483 scoped_refptr<webrtc::VideoSourceInterface> source =
484 pc_factory_->CreateVideoSource(capturer, constraints).get();
485 return source;
488 bool MediaStreamDependencyFactory::InitializeAudioSource(
489 const StreamDeviceInfo& device_info) {
490 DVLOG(1) << "MediaStreamDependencyFactory::InitializeAudioSource()";
492 // TODO(henrika): the current design does not support a unique source
493 // for each audio track.
494 if (device_info.session_id <= 0)
495 return false;
497 // Initialize the source using audio parameters for the selected
498 // capture device.
499 WebRtcAudioCapturer* capturer = GetWebRtcAudioDevice()->capturer();
500 // TODO(henrika): refactor \content\public\common\media_stream_request.h
501 // to allow dependency of media::ChannelLayout and avoid static_cast.
502 if (!capturer->Initialize(
503 static_cast<media::ChannelLayout>(device_info.device.channel_layout),
504 device_info.device.sample_rate, device_info.session_id))
505 return false;
507 return true;
510 bool MediaStreamDependencyFactory::CreateWebAudioSource(
511 WebKit::WebMediaStreamSource* source) {
512 DVLOG(1) << "MediaStreamDependencyFactory::CreateWebAudioSource()";
513 DCHECK(GetWebRtcAudioDevice());
515 // WebAudio needs the WebRtcAudioCapturer to be able to send its data
516 // over a PeerConnection. The microphone source is not utilized in this
517 // case; instead the WebRtcAudioCapturer is driving.
518 WebRtcAudioCapturer* capturer = GetWebRtcAudioDevice()->capturer();
519 if (!capturer)
520 return false;
522 // Set up the source and ensure that WebAudio is driving things instead of
523 // a microphone.
525 scoped_refptr<WebAudioCapturerSource>
526 webaudio_capturer_source(new WebAudioCapturerSource(capturer));
527 MediaStreamSourceExtraData* source_data =
528 new content::MediaStreamSourceExtraData(webaudio_capturer_source);
530 // Create a LocalAudioSource object which holds audio options.
531 // Use audio constraints where all values are false, i.e., disable
532 // echo cancellation, automatic gain control, noise suppression and
533 // high-pass filter. SetLocalAudioSource() affects core audio parts in
534 // third_party/Libjingle.
535 WebAudioConstraints webaudio_audio_constraints_all_false;
536 source_data->SetLocalAudioSource(
537 CreateLocalAudioSource(&webaudio_audio_constraints_all_false));
538 source->setExtraData(source_data);
540 // Replace the default source with WebAudio as source instead.
541 source->addAudioConsumer(webaudio_capturer_source);
543 return true;
546 scoped_refptr<webrtc::VideoTrackInterface>
547 MediaStreamDependencyFactory::CreateLocalVideoTrack(
548 const std::string& id,
549 webrtc::VideoSourceInterface* source) {
550 return pc_factory_->CreateVideoTrack(id, source).get();
553 scoped_refptr<webrtc::AudioTrackInterface>
554 MediaStreamDependencyFactory::CreateLocalAudioTrack(
555 const std::string& id,
556 webrtc::AudioSourceInterface* source) {
557 return pc_factory_->CreateAudioTrack(id, source).get();
560 webrtc::SessionDescriptionInterface*
561 MediaStreamDependencyFactory::CreateSessionDescription(
562 const std::string& type,
563 const std::string& sdp,
564 webrtc::SdpParseError* error) {
565 return webrtc::CreateSessionDescription(type, sdp, error);
568 webrtc::IceCandidateInterface* MediaStreamDependencyFactory::CreateIceCandidate(
569 const std::string& sdp_mid,
570 int sdp_mline_index,
571 const std::string& sdp) {
572 return webrtc::CreateIceCandidate(sdp_mid, sdp_mline_index, sdp);
575 WebRtcAudioDeviceImpl*
576 MediaStreamDependencyFactory::GetWebRtcAudioDevice() {
577 return audio_device_;
580 void MediaStreamDependencyFactory::StopLocalAudioSource(
581 const WebKit::WebMediaStream& description) {
582 MediaStreamExtraData* extra_data = static_cast<MediaStreamExtraData*>(
583 description.extraData());
584 if (extra_data && extra_data->is_local() && extra_data->stream() &&
585 !extra_data->stream()->GetAudioTracks().empty()) {
586 if (GetWebRtcAudioDevice()) {
587 scoped_refptr<WebRtcAudioCapturer> capturer =
588 GetWebRtcAudioDevice()->capturer();
589 if (capturer)
590 capturer->Stop();
595 void MediaStreamDependencyFactory::InitializeWorkerThread(
596 talk_base::Thread** thread,
597 base::WaitableEvent* event) {
598 jingle_glue::JingleThreadWrapper::EnsureForCurrentMessageLoop();
599 jingle_glue::JingleThreadWrapper::current()->set_send_allowed(true);
600 *thread = jingle_glue::JingleThreadWrapper::current();
601 event->Signal();
604 void MediaStreamDependencyFactory::CreateIpcNetworkManagerOnWorkerThread(
605 base::WaitableEvent* event) {
606 DCHECK_EQ(MessageLoop::current(), chrome_worker_thread_.message_loop());
607 network_manager_ = new IpcNetworkManager(p2p_socket_dispatcher_);
608 event->Signal();
611 void MediaStreamDependencyFactory::DeleteIpcNetworkManager() {
612 DCHECK_EQ(MessageLoop::current(), chrome_worker_thread_.message_loop());
613 delete network_manager_;
614 network_manager_ = NULL;
617 bool MediaStreamDependencyFactory::EnsurePeerConnectionFactory() {
618 DCHECK(CalledOnValidThread());
619 if (PeerConnectionFactoryCreated())
620 return true;
622 if (!signaling_thread_) {
623 jingle_glue::JingleThreadWrapper::EnsureForCurrentMessageLoop();
624 jingle_glue::JingleThreadWrapper::current()->set_send_allowed(true);
625 signaling_thread_ = jingle_glue::JingleThreadWrapper::current();
626 CHECK(signaling_thread_);
629 if (!worker_thread_) {
630 if (!chrome_worker_thread_.IsRunning()) {
631 if (!chrome_worker_thread_.Start()) {
632 LOG(ERROR) << "Could not start worker thread";
633 signaling_thread_ = NULL;
634 return false;
637 base::WaitableEvent event(true, false);
638 chrome_worker_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
639 &MediaStreamDependencyFactory::InitializeWorkerThread,
640 base::Unretained(this),
641 &worker_thread_,
642 &event));
643 event.Wait();
644 DCHECK(worker_thread_);
647 if (!network_manager_) {
648 base::WaitableEvent event(true, false);
649 chrome_worker_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
650 &MediaStreamDependencyFactory::CreateIpcNetworkManagerOnWorkerThread,
651 base::Unretained(this),
652 &event));
653 event.Wait();
656 if (!socket_factory_.get()) {
657 socket_factory_.reset(
658 new IpcPacketSocketFactory(p2p_socket_dispatcher_));
661 // Init SSL, which will be needed by PeerConnection.
662 #if defined(USE_OPENSSL)
663 if (!talk_base::InitializeSSL()) {
664 LOG(ERROR) << "Failed on InitializeSSL.";
665 return false;
667 #else
668 // TODO(ronghuawu): Replace this call with InitializeSSL.
669 net::EnsureNSSSSLInit();
670 #endif
672 if (!CreatePeerConnectionFactory()) {
673 LOG(ERROR) << "Could not create PeerConnection factory";
674 return false;
676 return true;
679 void MediaStreamDependencyFactory::CleanupPeerConnectionFactory() {
680 pc_factory_ = NULL;
681 if (network_manager_) {
682 // The network manager needs to free its resources on the thread they were
683 // created, which is the worked thread.
684 if (chrome_worker_thread_.IsRunning()) {
685 chrome_worker_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
686 &MediaStreamDependencyFactory::DeleteIpcNetworkManager,
687 base::Unretained(this)));
688 // Stopping the thread will wait until all tasks have been
689 // processed before returning. We wait for the above task to finish before
690 // letting the the function continue to avoid any potential race issues.
691 chrome_worker_thread_.Stop();
692 } else {
693 NOTREACHED() << "Worker thread not running.";
698 } // namespace content