Add ICU message format support
[chromium-blink-merge.git] / content / renderer / media / user_media_client_impl.cc
blob22ec3d13e33202170b541e9ab69d9249045183fe
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "content/renderer/media/user_media_client_impl.h"
7 #include <utility>
9 #include "base/hash.h"
10 #include "base/location.h"
11 #include "base/logging.h"
12 #include "base/single_thread_task_runner.h"
13 #include "base/strings/string_number_conversions.h"
14 #include "base/strings/string_util.h"
15 #include "base/strings/stringprintf.h"
16 #include "base/strings/utf_string_conversions.h"
17 #include "base/thread_task_runner_handle.h"
18 #include "content/public/renderer/render_frame.h"
19 #include "content/renderer/media/media_stream.h"
20 #include "content/renderer/media/media_stream_audio_source.h"
21 #include "content/renderer/media/media_stream_dispatcher.h"
22 #include "content/renderer/media/media_stream_video_capturer_source.h"
23 #include "content/renderer/media/media_stream_video_track.h"
24 #include "content/renderer/media/peer_connection_tracker.h"
25 #include "content/renderer/media/webrtc/webrtc_video_capturer_adapter.h"
26 #include "content/renderer/media/webrtc_audio_capturer.h"
27 #include "content/renderer/media/webrtc_logging.h"
28 #include "content/renderer/media/webrtc_uma_histograms.h"
29 #include "content/renderer/render_thread_impl.h"
30 #include "third_party/WebKit/public/platform/WebMediaConstraints.h"
31 #include "third_party/WebKit/public/platform/WebMediaDeviceInfo.h"
32 #include "third_party/WebKit/public/platform/WebMediaStreamTrack.h"
33 #include "third_party/WebKit/public/platform/WebMediaStreamTrackSourcesRequest.h"
34 #include "third_party/WebKit/public/web/WebDocument.h"
35 #include "third_party/WebKit/public/web/WebLocalFrame.h"
37 namespace content {
38 namespace {
40 void CopyStreamConstraints(const blink::WebMediaConstraints& constraints,
41 StreamOptions::Constraints* mandatory,
42 StreamOptions::Constraints* optional) {
43 blink::WebVector<blink::WebMediaConstraint> mandatory_constraints;
44 constraints.getMandatoryConstraints(mandatory_constraints);
45 for (size_t i = 0; i < mandatory_constraints.size(); i++) {
46 mandatory->push_back(StreamOptions::Constraint(
47 mandatory_constraints[i].m_name.utf8(),
48 mandatory_constraints[i].m_value.utf8()));
51 blink::WebVector<blink::WebMediaConstraint> optional_constraints;
52 constraints.getOptionalConstraints(optional_constraints);
53 for (size_t i = 0; i < optional_constraints.size(); i++) {
54 optional->push_back(StreamOptions::Constraint(
55 optional_constraints[i].m_name.utf8(),
56 optional_constraints[i].m_value.utf8()));
60 static int g_next_request_id = 0;
62 } // namespace
64 struct UserMediaClientImpl::MediaDevicesRequestInfo {
65 MediaDevicesRequestInfo(const blink::WebMediaDevicesRequest& request,
66 int audio_input_request_id,
67 int video_input_request_id,
68 int audio_output_request_id)
69 : media_devices_request(request),
70 audio_input_request_id(audio_input_request_id),
71 video_input_request_id(video_input_request_id),
72 audio_output_request_id(audio_output_request_id),
73 has_audio_input_returned(false),
74 has_video_input_returned(false),
75 has_audio_output_returned(false) {}
77 MediaDevicesRequestInfo(
78 const blink::WebMediaStreamTrackSourcesRequest& request,
79 int audio_input_request_id,
80 int video_input_request_id)
81 : sources_request(request),
82 audio_input_request_id(audio_input_request_id),
83 video_input_request_id(video_input_request_id),
84 audio_output_request_id(-1),
85 has_audio_input_returned(false),
86 has_video_input_returned(false),
87 has_audio_output_returned(false) {}
89 bool IsSourcesRequest() {
90 // We can't check isNull() on |media_devices_request| and |sources_request|,
91 // because in unit tests they will always be null.
92 return audio_output_request_id == -1;
95 blink::WebMediaDevicesRequest media_devices_request;
96 blink::WebMediaStreamTrackSourcesRequest sources_request;
97 int audio_input_request_id;
98 int video_input_request_id;
99 int audio_output_request_id;
100 bool has_audio_input_returned;
101 bool has_video_input_returned;
102 bool has_audio_output_returned;
103 StreamDeviceInfoArray audio_input_devices;
104 StreamDeviceInfoArray video_input_devices;
105 StreamDeviceInfoArray audio_output_devices;
108 UserMediaClientImpl::UserMediaClientImpl(
109 RenderFrame* render_frame,
110 PeerConnectionDependencyFactory* dependency_factory,
111 scoped_ptr<MediaStreamDispatcher> media_stream_dispatcher)
112 : RenderFrameObserver(render_frame),
113 dependency_factory_(dependency_factory),
114 media_stream_dispatcher_(media_stream_dispatcher.Pass()),
115 weak_factory_(this) {
116 DCHECK(dependency_factory_);
117 DCHECK(media_stream_dispatcher_.get());
120 UserMediaClientImpl::~UserMediaClientImpl() {
121 // Force-close all outstanding user media requests and local sources here,
122 // before the outstanding WeakPtrs are invalidated, to ensure a clean
123 // shutdown.
124 FrameWillClose();
127 void UserMediaClientImpl::requestUserMedia(
128 const blink::WebUserMediaRequest& user_media_request) {
129 // Save histogram data so we can see how much GetUserMedia is used.
130 // The histogram counts the number of calls to the JS API
131 // webGetUserMedia.
132 UpdateWebRTCMethodCount(WEBKIT_GET_USER_MEDIA);
133 DCHECK(CalledOnValidThread());
135 if (RenderThreadImpl::current()) {
136 RenderThreadImpl::current()->peer_connection_tracker()->TrackGetUserMedia(
137 user_media_request);
140 int request_id = g_next_request_id++;
141 StreamOptions options;
142 GURL security_origin;
143 bool enable_automatic_output_device_selection = false;
145 // |user_media_request| can't be mocked. So in order to test at all we check
146 // if it isNull.
147 if (user_media_request.isNull()) {
148 // We are in a test.
149 options.audio_requested = true;
150 options.video_requested = true;
151 } else {
152 if (user_media_request.audio()) {
153 options.audio_requested = true;
154 CopyStreamConstraints(user_media_request.audioConstraints(),
155 &options.mandatory_audio,
156 &options.optional_audio);
158 // Check if this input device should be used to select a matching output
159 // device for audio rendering.
160 std::string enable;
161 if (options.GetFirstAudioConstraintByName(
162 kMediaStreamRenderToAssociatedSink, &enable, NULL) &&
163 base::LowerCaseEqualsASCII(enable, "true")) {
164 enable_automatic_output_device_selection = true;
167 if (user_media_request.video()) {
168 options.video_requested = true;
169 CopyStreamConstraints(user_media_request.videoConstraints(),
170 &options.mandatory_video,
171 &options.optional_video);
174 security_origin = GURL(user_media_request.securityOrigin().toString());
175 DCHECK(render_frame()->GetWebFrame() ==
176 static_cast<blink::WebFrame*>(
177 user_media_request.ownerDocument().frame()));
180 DVLOG(1) << "UserMediaClientImpl::requestUserMedia(" << request_id << ", [ "
181 << "audio=" << (options.audio_requested)
182 << " select associated sink: "
183 << enable_automatic_output_device_selection
184 << ", video=" << (options.video_requested) << " ], "
185 << security_origin.spec() << ")";
187 std::string audio_device_id;
188 bool mandatory_audio;
189 options.GetFirstAudioConstraintByName(kMediaStreamSourceInfoId,
190 &audio_device_id, &mandatory_audio);
191 std::string video_device_id;
192 bool mandatory_video;
193 options.GetFirstVideoConstraintByName(kMediaStreamSourceInfoId,
194 &video_device_id, &mandatory_video);
196 WebRtcLogMessage(base::StringPrintf(
197 "MSI::requestUserMedia. request_id=%d"
198 ", audio source id=%s mandatory= %s "
199 ", video source id=%s mandatory= %s",
200 request_id,
201 audio_device_id.c_str(),
202 mandatory_audio ? "true":"false",
203 video_device_id.c_str(),
204 mandatory_video ? "true":"false"));
206 user_media_requests_.push_back(
207 new UserMediaRequestInfo(request_id, user_media_request,
208 enable_automatic_output_device_selection));
210 media_stream_dispatcher_->GenerateStream(
211 request_id,
212 weak_factory_.GetWeakPtr(),
213 options,
214 security_origin);
217 void UserMediaClientImpl::cancelUserMediaRequest(
218 const blink::WebUserMediaRequest& user_media_request) {
219 DCHECK(CalledOnValidThread());
220 UserMediaRequestInfo* request = FindUserMediaRequestInfo(user_media_request);
221 if (request) {
222 // We can't abort the stream generation process.
223 // Instead, erase the request. Once the stream is generated we will stop the
224 // stream if the request does not exist.
225 LogUserMediaRequestWithNoResult(MEDIA_STREAM_REQUEST_EXPLICITLY_CANCELLED);
226 DeleteUserMediaRequestInfo(request);
230 void UserMediaClientImpl::requestMediaDevices(
231 const blink::WebMediaDevicesRequest& media_devices_request) {
232 UpdateWebRTCMethodCount(WEBKIT_GET_MEDIA_DEVICES);
233 DCHECK(CalledOnValidThread());
235 int audio_input_request_id = g_next_request_id++;
236 int video_input_request_id = g_next_request_id++;
237 int audio_output_request_id = g_next_request_id++;
239 // |media_devices_request| can't be mocked, so in tests it will be empty (the
240 // underlying pointer is null). In order to use this function in a test we
241 // need to check if it isNull.
242 GURL security_origin;
243 if (!media_devices_request.isNull())
244 security_origin = GURL(media_devices_request.securityOrigin().toString());
246 DVLOG(1) << "UserMediaClientImpl::requestMediaDevices("
247 << audio_input_request_id
248 << ", " << video_input_request_id << ", " << audio_output_request_id
249 << ", " << security_origin.spec() << ")";
251 media_devices_requests_.push_back(new MediaDevicesRequestInfo(
252 media_devices_request,
253 audio_input_request_id,
254 video_input_request_id,
255 audio_output_request_id));
257 media_stream_dispatcher_->EnumerateDevices(
258 audio_input_request_id,
259 weak_factory_.GetWeakPtr(),
260 MEDIA_DEVICE_AUDIO_CAPTURE,
261 security_origin);
263 media_stream_dispatcher_->EnumerateDevices(
264 video_input_request_id,
265 weak_factory_.GetWeakPtr(),
266 MEDIA_DEVICE_VIDEO_CAPTURE,
267 security_origin);
269 media_stream_dispatcher_->EnumerateDevices(
270 audio_output_request_id,
271 weak_factory_.GetWeakPtr(),
272 MEDIA_DEVICE_AUDIO_OUTPUT,
273 security_origin);
276 void UserMediaClientImpl::cancelMediaDevicesRequest(
277 const blink::WebMediaDevicesRequest& media_devices_request) {
278 DCHECK(CalledOnValidThread());
279 MediaDevicesRequestInfo* request =
280 FindMediaDevicesRequestInfo(media_devices_request);
281 if (!request)
282 return;
283 CancelAndDeleteMediaDevicesRequest(request);
286 void UserMediaClientImpl::requestSources(
287 const blink::WebMediaStreamTrackSourcesRequest& sources_request) {
288 // We don't call UpdateWebRTCMethodCount() here to track the API count in UMA
289 // stats. This is instead counted in MediaStreamTrack::getSources in blink.
290 DCHECK(CalledOnValidThread());
292 int audio_input_request_id = g_next_request_id++;
293 int video_input_request_id = g_next_request_id++;
295 // |sources_request| can't be mocked, so in tests it will be empty (the
296 // underlying pointer is null). In order to use this function in a test we
297 // need to check if it isNull.
298 GURL security_origin;
299 if (!sources_request.isNull())
300 security_origin = GURL(sources_request.origin().utf8());
302 DVLOG(1) << "UserMediaClientImpl::requestSources("
303 << audio_input_request_id
304 << ", " << video_input_request_id
305 << ", " << security_origin.spec() << ")";
307 media_devices_requests_.push_back(new MediaDevicesRequestInfo(
308 sources_request,
309 audio_input_request_id,
310 video_input_request_id));
312 media_stream_dispatcher_->EnumerateDevices(
313 audio_input_request_id,
314 weak_factory_.GetWeakPtr(),
315 MEDIA_DEVICE_AUDIO_CAPTURE,
316 security_origin);
318 media_stream_dispatcher_->EnumerateDevices(
319 video_input_request_id,
320 weak_factory_.GetWeakPtr(),
321 MEDIA_DEVICE_VIDEO_CAPTURE,
322 security_origin);
325 // Callback from MediaStreamDispatcher.
326 // The requested stream have been generated by the MediaStreamDispatcher.
327 void UserMediaClientImpl::OnStreamGenerated(
328 int request_id,
329 const std::string& label,
330 const StreamDeviceInfoArray& audio_array,
331 const StreamDeviceInfoArray& video_array) {
332 DCHECK(CalledOnValidThread());
333 DVLOG(1) << "UserMediaClientImpl::OnStreamGenerated stream:" << label;
335 UserMediaRequestInfo* request_info = FindUserMediaRequestInfo(request_id);
336 if (!request_info) {
337 // This can happen if the request is canceled or the frame reloads while
338 // MediaStreamDispatcher is processing the request.
339 DVLOG(1) << "Request ID not found";
340 OnStreamGeneratedForCancelledRequest(audio_array, video_array);
341 return;
343 request_info->generated = true;
345 // WebUserMediaRequest don't have an implementation in unit tests.
346 // Therefore we need to check for isNull here and initialize the
347 // constraints.
348 blink::WebUserMediaRequest* request = &(request_info->request);
349 blink::WebMediaConstraints audio_constraints;
350 blink::WebMediaConstraints video_constraints;
351 if (request->isNull()) {
352 audio_constraints.initialize();
353 video_constraints.initialize();
354 } else {
355 audio_constraints = request->audioConstraints();
356 video_constraints = request->videoConstraints();
359 blink::WebVector<blink::WebMediaStreamTrack> audio_track_vector(
360 audio_array.size());
361 CreateAudioTracks(audio_array, audio_constraints, &audio_track_vector,
362 request_info);
364 blink::WebVector<blink::WebMediaStreamTrack> video_track_vector(
365 video_array.size());
366 CreateVideoTracks(video_array, video_constraints, &video_track_vector,
367 request_info);
369 blink::WebString webkit_id = base::UTF8ToUTF16(label);
370 blink::WebMediaStream* web_stream = &(request_info->web_stream);
372 web_stream->initialize(webkit_id, audio_track_vector,
373 video_track_vector);
374 web_stream->setExtraData(
375 new MediaStream(
376 *web_stream));
378 // Wait for the tracks to be started successfully or to fail.
379 request_info->CallbackOnTracksStarted(
380 base::Bind(&UserMediaClientImpl::OnCreateNativeTracksCompleted,
381 weak_factory_.GetWeakPtr()));
384 void UserMediaClientImpl::OnStreamGeneratedForCancelledRequest(
385 const StreamDeviceInfoArray& audio_array,
386 const StreamDeviceInfoArray& video_array) {
387 // Only stop the device if the device is not used in another MediaStream.
388 for (StreamDeviceInfoArray::const_iterator device_it = audio_array.begin();
389 device_it != audio_array.end(); ++device_it) {
390 if (!FindLocalSource(*device_it))
391 media_stream_dispatcher_->StopStreamDevice(*device_it);
394 for (StreamDeviceInfoArray::const_iterator device_it = video_array.begin();
395 device_it != video_array.end(); ++device_it) {
396 if (!FindLocalSource(*device_it))
397 media_stream_dispatcher_->StopStreamDevice(*device_it);
401 void UserMediaClientImpl::FinalizeEnumerateDevices(
402 MediaDevicesRequestInfo* request) {
403 // All devices are ready for copying. We use a hashed audio output device id
404 // as the group id for input and output audio devices. If an input device
405 // doesn't have an associated output device, we use the input device's own id.
406 // We don't support group id for video devices, that's left empty.
407 blink::WebVector<blink::WebMediaDeviceInfo>
408 devices(request->audio_input_devices.size() +
409 request->video_input_devices.size() +
410 request->audio_output_devices.size());
411 for (size_t i = 0; i < request->audio_input_devices.size(); ++i) {
412 const MediaStreamDevice& device = request->audio_input_devices[i].device;
413 DCHECK_EQ(device.type, MEDIA_DEVICE_AUDIO_CAPTURE);
414 std::string group_id = base::UintToString(base::Hash(
415 !device.matched_output_device_id.empty() ?
416 device.matched_output_device_id :
417 device.id));
418 devices[i].initialize(
419 blink::WebString::fromUTF8(device.id),
420 blink::WebMediaDeviceInfo::MediaDeviceKindAudioInput,
421 blink::WebString::fromUTF8(device.name),
422 blink::WebString::fromUTF8(group_id));
424 size_t offset = request->audio_input_devices.size();
425 for (size_t i = 0; i < request->video_input_devices.size(); ++i) {
426 const MediaStreamDevice& device = request->video_input_devices[i].device;
427 DCHECK_EQ(device.type, MEDIA_DEVICE_VIDEO_CAPTURE);
428 devices[offset + i].initialize(
429 blink::WebString::fromUTF8(device.id),
430 blink::WebMediaDeviceInfo::MediaDeviceKindVideoInput,
431 blink::WebString::fromUTF8(device.name),
432 blink::WebString());
434 offset += request->video_input_devices.size();
435 for (size_t i = 0; i < request->audio_output_devices.size(); ++i) {
436 const MediaStreamDevice& device = request->audio_output_devices[i].device;
437 DCHECK_EQ(device.type, MEDIA_DEVICE_AUDIO_OUTPUT);
438 devices[offset + i].initialize(
439 blink::WebString::fromUTF8(device.id),
440 blink::WebMediaDeviceInfo::MediaDeviceKindAudioOutput,
441 blink::WebString::fromUTF8(device.name),
442 blink::WebString::fromUTF8(base::UintToString(base::Hash(device.id))));
445 EnumerateDevicesSucceded(&request->media_devices_request, devices);
448 void UserMediaClientImpl::FinalizeEnumerateSources(
449 MediaDevicesRequestInfo* request) {
450 blink::WebVector<blink::WebSourceInfo>
451 sources(request->audio_input_devices.size() +
452 request->video_input_devices.size());
453 for (size_t i = 0; i < request->audio_input_devices.size(); ++i) {
454 const MediaStreamDevice& device = request->audio_input_devices[i].device;
455 DCHECK_EQ(device.type, MEDIA_DEVICE_AUDIO_CAPTURE);
456 std::string group_id = base::UintToString(base::Hash(
457 !device.matched_output_device_id.empty() ?
458 device.matched_output_device_id :
459 device.id));
460 sources[i].initialize(blink::WebString::fromUTF8(device.id),
461 blink::WebSourceInfo::SourceKindAudio,
462 blink::WebString::fromUTF8(device.name),
463 blink::WebSourceInfo::VideoFacingModeNone);
465 size_t offset = request->audio_input_devices.size();
466 for (size_t i = 0; i < request->video_input_devices.size(); ++i) {
467 const MediaStreamDevice& device = request->video_input_devices[i].device;
468 DCHECK_EQ(device.type, MEDIA_DEVICE_VIDEO_CAPTURE);
469 blink::WebSourceInfo::VideoFacingMode video_facing;
470 switch (device.video_facing) {
471 case MEDIA_VIDEO_FACING_USER:
472 video_facing = blink::WebSourceInfo::VideoFacingModeUser;
473 break;
474 case MEDIA_VIDEO_FACING_ENVIRONMENT:
475 video_facing = blink::WebSourceInfo::VideoFacingModeEnvironment;
476 break;
477 default:
478 video_facing = blink::WebSourceInfo::VideoFacingModeNone;
480 sources[offset + i].initialize(blink::WebString::fromUTF8(device.id),
481 blink::WebSourceInfo::SourceKindVideo,
482 blink::WebString::fromUTF8(device.name),
483 video_facing);
486 EnumerateSourcesSucceded(&request->sources_request, sources);
489 // Callback from MediaStreamDispatcher.
490 // The requested stream failed to be generated.
491 void UserMediaClientImpl::OnStreamGenerationFailed(
492 int request_id,
493 MediaStreamRequestResult result) {
494 DCHECK(CalledOnValidThread());
495 DVLOG(1) << "UserMediaClientImpl::OnStreamGenerationFailed("
496 << request_id << ")";
497 UserMediaRequestInfo* request_info = FindUserMediaRequestInfo(request_id);
498 if (!request_info) {
499 // This can happen if the request is canceled or the frame reloads while
500 // MediaStreamDispatcher is processing the request.
501 DVLOG(1) << "Request ID not found";
502 return;
505 GetUserMediaRequestFailed(request_info->request, result, "");
506 DeleteUserMediaRequestInfo(request_info);
509 // Callback from MediaStreamDispatcher.
510 // The browser process has stopped a device used by a MediaStream.
511 void UserMediaClientImpl::OnDeviceStopped(
512 const std::string& label,
513 const StreamDeviceInfo& device_info) {
514 DCHECK(CalledOnValidThread());
515 DVLOG(1) << "UserMediaClientImpl::OnDeviceStopped("
516 << "{device_id = " << device_info.device.id << "})";
518 const blink::WebMediaStreamSource* source_ptr = FindLocalSource(device_info);
519 if (!source_ptr) {
520 // This happens if the same device is used in several guM requests or
521 // if a user happen stop a track from JS at the same time
522 // as the underlying media device is unplugged from the system.
523 return;
525 // By creating |source| it is guaranteed that the blink::WebMediaStreamSource
526 // object is valid during the cleanup.
527 blink::WebMediaStreamSource source(*source_ptr);
528 StopLocalSource(source, false);
530 for (LocalStreamSources::iterator device_it = local_sources_.begin();
531 device_it != local_sources_.end(); ++device_it) {
532 if (device_it->id() == source.id()) {
533 local_sources_.erase(device_it);
534 break;
539 void UserMediaClientImpl::InitializeSourceObject(
540 const StreamDeviceInfo& device,
541 blink::WebMediaStreamSource::Type type,
542 const blink::WebMediaConstraints& constraints,
543 blink::WebMediaStreamSource* webkit_source) {
544 const blink::WebMediaStreamSource* existing_source =
545 FindLocalSource(device);
546 if (existing_source) {
547 *webkit_source = *existing_source;
548 DVLOG(1) << "Source already exist. Reusing source with id "
549 << webkit_source->id().utf8();
550 return;
553 webkit_source->initialize(
554 base::UTF8ToUTF16(device.device.id),
555 type,
556 base::UTF8ToUTF16(device.device.name),
557 false /* remote */, true /* readonly */);
559 DVLOG(1) << "Initialize source object :"
560 << "id = " << webkit_source->id().utf8()
561 << ", name = " << webkit_source->name().utf8();
563 if (type == blink::WebMediaStreamSource::TypeVideo) {
564 webkit_source->setExtraData(
565 CreateVideoSource(
566 device,
567 base::Bind(&UserMediaClientImpl::OnLocalSourceStopped,
568 weak_factory_.GetWeakPtr())));
569 } else {
570 DCHECK_EQ(blink::WebMediaStreamSource::TypeAudio, type);
571 MediaStreamAudioSource* audio_source(
572 new MediaStreamAudioSource(
573 RenderFrameObserver::routing_id(),
574 device,
575 base::Bind(&UserMediaClientImpl::OnLocalSourceStopped,
576 weak_factory_.GetWeakPtr()),
577 dependency_factory_));
578 webkit_source->setExtraData(audio_source);
580 local_sources_.push_back(*webkit_source);
583 MediaStreamVideoSource* UserMediaClientImpl::CreateVideoSource(
584 const StreamDeviceInfo& device,
585 const MediaStreamSource::SourceStoppedCallback& stop_callback) {
586 content::MediaStreamVideoCapturerSource* ret =
587 new content::MediaStreamVideoCapturerSource(
588 stop_callback,
589 make_scoped_ptr(new VideoCapturerDelegate(device)));
590 ret->SetDeviceInfo(device);
591 return ret;
594 void UserMediaClientImpl::CreateVideoTracks(
595 const StreamDeviceInfoArray& devices,
596 const blink::WebMediaConstraints& constraints,
597 blink::WebVector<blink::WebMediaStreamTrack>* webkit_tracks,
598 UserMediaRequestInfo* request) {
599 DCHECK_EQ(devices.size(), webkit_tracks->size());
601 for (size_t i = 0; i < devices.size(); ++i) {
602 blink::WebMediaStreamSource webkit_source;
603 InitializeSourceObject(devices[i],
604 blink::WebMediaStreamSource::TypeVideo,
605 constraints,
606 &webkit_source);
607 (*webkit_tracks)[i] =
608 request->CreateAndStartVideoTrack(webkit_source, constraints);
612 void UserMediaClientImpl::CreateAudioTracks(
613 const StreamDeviceInfoArray& devices,
614 const blink::WebMediaConstraints& constraints,
615 blink::WebVector<blink::WebMediaStreamTrack>* webkit_tracks,
616 UserMediaRequestInfo* request) {
617 DCHECK_EQ(devices.size(), webkit_tracks->size());
619 // Log the device names for this request.
620 for (StreamDeviceInfoArray::const_iterator it = devices.begin();
621 it != devices.end(); ++it) {
622 WebRtcLogMessage(base::StringPrintf(
623 "Generated media stream for request id %d contains audio device name"
624 " \"%s\"",
625 request->request_id,
626 it->device.name.c_str()));
629 StreamDeviceInfoArray overridden_audio_array = devices;
630 if (!request->enable_automatic_output_device_selection) {
631 // If the GetUserMedia request did not explicitly set the constraint
632 // kMediaStreamRenderToAssociatedSink, the output device parameters must
633 // be removed.
634 for (StreamDeviceInfoArray::iterator it = overridden_audio_array.begin();
635 it != overridden_audio_array.end(); ++it) {
636 it->device.matched_output_device_id = "";
637 it->device.matched_output = MediaStreamDevice::AudioDeviceParameters();
641 for (size_t i = 0; i < overridden_audio_array.size(); ++i) {
642 blink::WebMediaStreamSource webkit_source;
643 InitializeSourceObject(overridden_audio_array[i],
644 blink::WebMediaStreamSource::TypeAudio,
645 constraints,
646 &webkit_source);
647 (*webkit_tracks)[i].initialize(webkit_source);
648 request->StartAudioTrack((*webkit_tracks)[i], constraints);
652 void UserMediaClientImpl::OnCreateNativeTracksCompleted(
653 UserMediaRequestInfo* request,
654 MediaStreamRequestResult result,
655 const blink::WebString& result_name) {
656 DVLOG(1) << "UserMediaClientImpl::OnCreateNativeTracksComplete("
657 << "{request_id = " << request->request_id << "} "
658 << "{result = " << result << "})";
660 if (result == content::MEDIA_DEVICE_OK)
661 GetUserMediaRequestSucceeded(request->web_stream, request->request);
662 else
663 GetUserMediaRequestFailed(request->request, result, result_name);
665 DeleteUserMediaRequestInfo(request);
668 void UserMediaClientImpl::OnDevicesEnumerated(
669 int request_id,
670 const StreamDeviceInfoArray& device_array) {
671 DVLOG(1) << "UserMediaClientImpl::OnDevicesEnumerated(" << request_id << ")";
673 MediaDevicesRequestInfo* request = FindMediaDevicesRequestInfo(request_id);
674 DCHECK(request);
676 if (request_id == request->audio_input_request_id) {
677 request->has_audio_input_returned = true;
678 DCHECK(request->audio_input_devices.empty());
679 request->audio_input_devices = device_array;
680 } else if (request_id == request->video_input_request_id) {
681 request->has_video_input_returned = true;
682 DCHECK(request->video_input_devices.empty());
683 request->video_input_devices = device_array;
684 } else {
685 DCHECK_EQ(request->audio_output_request_id, request_id);
686 request->has_audio_output_returned = true;
687 DCHECK(request->audio_output_devices.empty());
688 request->audio_output_devices = device_array;
691 if (!request->has_audio_input_returned ||
692 !request->has_video_input_returned ||
693 (!request->IsSourcesRequest() && !request->has_audio_output_returned)) {
694 // Wait for the rest of the devices to complete.
695 return;
698 if (request->IsSourcesRequest())
699 FinalizeEnumerateSources(request);
700 else
701 FinalizeEnumerateDevices(request);
703 CancelAndDeleteMediaDevicesRequest(request);
706 void UserMediaClientImpl::OnDeviceOpened(
707 int request_id,
708 const std::string& label,
709 const StreamDeviceInfo& video_device) {
710 DVLOG(1) << "UserMediaClientImpl::OnDeviceOpened("
711 << request_id << ", " << label << ")";
712 NOTIMPLEMENTED();
715 void UserMediaClientImpl::OnDeviceOpenFailed(int request_id) {
716 DVLOG(1) << "UserMediaClientImpl::VideoDeviceOpenFailed("
717 << request_id << ")";
718 NOTIMPLEMENTED();
721 void UserMediaClientImpl::GetUserMediaRequestSucceeded(
722 const blink::WebMediaStream& stream,
723 blink::WebUserMediaRequest request_info) {
724 // Completing the getUserMedia request can lead to that the RenderFrame and
725 // the UserMediaClientImpl is destroyed if the JavaScript code request the
726 // frame to be destroyed within the scope of the callback. Therefore,
727 // post a task to complete the request with a clean stack.
728 base::ThreadTaskRunnerHandle::Get()->PostTask(
729 FROM_HERE,
730 base::Bind(&UserMediaClientImpl::DelayedGetUserMediaRequestSucceeded,
731 weak_factory_.GetWeakPtr(), stream, request_info));
734 void UserMediaClientImpl::DelayedGetUserMediaRequestSucceeded(
735 const blink::WebMediaStream& stream,
736 blink::WebUserMediaRequest request_info) {
737 DVLOG(1) << "UserMediaClientImpl::DelayedGetUserMediaRequestSucceeded";
738 LogUserMediaRequestResult(MEDIA_DEVICE_OK);
739 request_info.requestSucceeded(stream);
742 void UserMediaClientImpl::GetUserMediaRequestFailed(
743 blink::WebUserMediaRequest request_info,
744 MediaStreamRequestResult result,
745 const blink::WebString& result_name) {
746 // Completing the getUserMedia request can lead to that the RenderFrame and
747 // the UserMediaClientImpl is destroyed if the JavaScript code request the
748 // frame to be destroyed within the scope of the callback. Therefore,
749 // post a task to complete the request with a clean stack.
750 base::ThreadTaskRunnerHandle::Get()->PostTask(
751 FROM_HERE,
752 base::Bind(&UserMediaClientImpl::DelayedGetUserMediaRequestFailed,
753 weak_factory_.GetWeakPtr(), request_info, result,
754 result_name));
757 void UserMediaClientImpl::DelayedGetUserMediaRequestFailed(
758 blink::WebUserMediaRequest request_info,
759 MediaStreamRequestResult result,
760 const blink::WebString& result_name) {
761 LogUserMediaRequestResult(result);
762 switch (result) {
763 case MEDIA_DEVICE_OK:
764 case NUM_MEDIA_REQUEST_RESULTS:
765 NOTREACHED();
766 return;
767 case MEDIA_DEVICE_PERMISSION_DENIED:
768 request_info.requestDenied();
769 return;
770 case MEDIA_DEVICE_PERMISSION_DISMISSED:
771 request_info.requestFailedUASpecific("PermissionDismissedError");
772 return;
773 case MEDIA_DEVICE_INVALID_STATE:
774 request_info.requestFailedUASpecific("InvalidStateError");
775 return;
776 case MEDIA_DEVICE_NO_HARDWARE:
777 request_info.requestFailedUASpecific("DevicesNotFoundError");
778 return;
779 case MEDIA_DEVICE_INVALID_SECURITY_ORIGIN:
780 request_info.requestFailedUASpecific("InvalidSecurityOriginError");
781 return;
782 case MEDIA_DEVICE_TAB_CAPTURE_FAILURE:
783 request_info.requestFailedUASpecific("TabCaptureError");
784 return;
785 case MEDIA_DEVICE_SCREEN_CAPTURE_FAILURE:
786 request_info.requestFailedUASpecific("ScreenCaptureError");
787 return;
788 case MEDIA_DEVICE_CAPTURE_FAILURE:
789 request_info.requestFailedUASpecific("DeviceCaptureError");
790 return;
791 case MEDIA_DEVICE_CONSTRAINT_NOT_SATISFIED:
792 request_info.requestFailedConstraint(result_name);
793 return;
794 case MEDIA_DEVICE_TRACK_START_FAILURE:
795 request_info.requestFailedUASpecific("TrackStartError");
796 return;
797 case MEDIA_DEVICE_NOT_SUPPORTED:
798 request_info.requestFailedUASpecific("MediaDeviceNotSupported");
799 return;
800 case MEDIA_DEVICE_FAILED_DUE_TO_SHUTDOWN:
801 request_info.requestFailedUASpecific("MediaDeviceFailedDueToShutdown");
802 return;
804 NOTREACHED();
805 request_info.requestFailed();
808 void UserMediaClientImpl::EnumerateDevicesSucceded(
809 blink::WebMediaDevicesRequest* request,
810 blink::WebVector<blink::WebMediaDeviceInfo>& devices) {
811 request->requestSucceeded(devices);
814 void UserMediaClientImpl::EnumerateSourcesSucceded(
815 blink::WebMediaStreamTrackSourcesRequest* request,
816 blink::WebVector<blink::WebSourceInfo>& sources) {
817 request->requestSucceeded(sources);
820 const blink::WebMediaStreamSource* UserMediaClientImpl::FindLocalSource(
821 const StreamDeviceInfo& device) const {
822 for (LocalStreamSources::const_iterator it = local_sources_.begin();
823 it != local_sources_.end(); ++it) {
824 MediaStreamSource* const source =
825 static_cast<MediaStreamSource*>(it->extraData());
826 const StreamDeviceInfo& active_device = source->device_info();
827 if (active_device.device.id == device.device.id &&
828 active_device.device.type == device.device.type &&
829 active_device.session_id == device.session_id) {
830 return &(*it);
833 return NULL;
836 UserMediaClientImpl::UserMediaRequestInfo*
837 UserMediaClientImpl::FindUserMediaRequestInfo(int request_id) {
838 UserMediaRequests::iterator it = user_media_requests_.begin();
839 for (; it != user_media_requests_.end(); ++it) {
840 if ((*it)->request_id == request_id)
841 return (*it);
843 return NULL;
846 UserMediaClientImpl::UserMediaRequestInfo*
847 UserMediaClientImpl::FindUserMediaRequestInfo(
848 const blink::WebUserMediaRequest& request) {
849 UserMediaRequests::iterator it = user_media_requests_.begin();
850 for (; it != user_media_requests_.end(); ++it) {
851 if ((*it)->request == request)
852 return (*it);
854 return NULL;
857 void UserMediaClientImpl::DeleteUserMediaRequestInfo(
858 UserMediaRequestInfo* request) {
859 UserMediaRequests::iterator it = user_media_requests_.begin();
860 for (; it != user_media_requests_.end(); ++it) {
861 if ((*it) == request) {
862 user_media_requests_.erase(it);
863 return;
866 NOTREACHED();
869 void UserMediaClientImpl::DeleteAllUserMediaRequests() {
870 UserMediaRequests::iterator request_it = user_media_requests_.begin();
871 while (request_it != user_media_requests_.end()) {
872 DVLOG(1) << "UserMediaClientImpl@" << this
873 << "::DeleteAllUserMediaRequests: "
874 << "Cancel user media request " << (*request_it)->request_id;
875 // If the request is not generated, it means that a request
876 // has been sent to the MediaStreamDispatcher to generate a stream
877 // but MediaStreamDispatcher has not yet responded and we need to cancel
878 // the request.
879 if (!(*request_it)->generated) {
880 DCHECK(!(*request_it)->HasPendingSources());
881 media_stream_dispatcher_->CancelGenerateStream(
882 (*request_it)->request_id, weak_factory_.GetWeakPtr());
883 LogUserMediaRequestWithNoResult(MEDIA_STREAM_REQUEST_NOT_GENERATED);
884 } else {
885 DCHECK((*request_it)->HasPendingSources());
886 LogUserMediaRequestWithNoResult(
887 MEDIA_STREAM_REQUEST_PENDING_MEDIA_TRACKS);
889 request_it = user_media_requests_.erase(request_it);
893 UserMediaClientImpl::MediaDevicesRequestInfo*
894 UserMediaClientImpl::FindMediaDevicesRequestInfo(
895 int request_id) {
896 MediaDevicesRequests::iterator it = media_devices_requests_.begin();
897 for (; it != media_devices_requests_.end(); ++it) {
898 if ((*it)->audio_input_request_id == request_id ||
899 (*it)->video_input_request_id == request_id ||
900 (*it)->audio_output_request_id == request_id) {
901 return (*it);
904 return NULL;
907 UserMediaClientImpl::MediaDevicesRequestInfo*
908 UserMediaClientImpl::FindMediaDevicesRequestInfo(
909 const blink::WebMediaDevicesRequest& request) {
910 MediaDevicesRequests::iterator it = media_devices_requests_.begin();
911 for (; it != media_devices_requests_.end(); ++it) {
912 if ((*it)->media_devices_request == request)
913 return (*it);
915 return NULL;
918 void UserMediaClientImpl::CancelAndDeleteMediaDevicesRequest(
919 MediaDevicesRequestInfo* request) {
920 MediaDevicesRequests::iterator it = media_devices_requests_.begin();
921 for (; it != media_devices_requests_.end(); ++it) {
922 if ((*it) == request) {
923 // Cancel device enumeration.
924 media_stream_dispatcher_->StopEnumerateDevices(
925 request->audio_input_request_id, weak_factory_.GetWeakPtr());
926 media_stream_dispatcher_->StopEnumerateDevices(
927 request->video_input_request_id, weak_factory_.GetWeakPtr());
928 media_stream_dispatcher_->StopEnumerateDevices(
929 request->audio_output_request_id, weak_factory_.GetWeakPtr());
931 media_devices_requests_.erase(it);
932 return;
935 NOTREACHED();
938 void UserMediaClientImpl::FrameWillClose() {
939 // Cancel all outstanding UserMediaRequests.
940 DeleteAllUserMediaRequests();
942 // Loop through all current local sources and stop the sources.
943 LocalStreamSources::iterator sources_it = local_sources_.begin();
944 while (sources_it != local_sources_.end()) {
945 StopLocalSource(*sources_it, true);
946 sources_it = local_sources_.erase(sources_it);
950 void UserMediaClientImpl::OnLocalSourceStopped(
951 const blink::WebMediaStreamSource& source) {
952 DCHECK(CalledOnValidThread());
953 DVLOG(1) << "UserMediaClientImpl::OnLocalSourceStopped";
955 bool device_found = false;
956 for (LocalStreamSources::iterator device_it = local_sources_.begin();
957 device_it != local_sources_.end(); ++device_it) {
958 if (device_it->id() == source.id()) {
959 device_found = true;
960 local_sources_.erase(device_it);
961 break;
964 CHECK(device_found);
966 MediaStreamSource* source_impl =
967 static_cast<MediaStreamSource*>(source.extraData());
968 media_stream_dispatcher_->StopStreamDevice(source_impl->device_info());
971 void UserMediaClientImpl::StopLocalSource(
972 const blink::WebMediaStreamSource& source,
973 bool notify_dispatcher) {
974 MediaStreamSource* source_impl =
975 static_cast<MediaStreamSource*>(source.extraData());
976 DVLOG(1) << "UserMediaClientImpl::StopLocalSource("
977 << "{device_id = " << source_impl->device_info().device.id << "})";
979 if (notify_dispatcher)
980 media_stream_dispatcher_->StopStreamDevice(source_impl->device_info());
982 source_impl->ResetSourceStoppedCallback();
983 source_impl->StopSource();
986 UserMediaClientImpl::UserMediaRequestInfo::UserMediaRequestInfo(
987 int request_id,
988 const blink::WebUserMediaRequest& request,
989 bool enable_automatic_output_device_selection)
990 : request_id(request_id),
991 generated(false),
992 enable_automatic_output_device_selection(
993 enable_automatic_output_device_selection),
994 request(request),
995 request_result_(MEDIA_DEVICE_OK),
996 request_result_name_("") {
999 UserMediaClientImpl::UserMediaRequestInfo::~UserMediaRequestInfo() {
1000 DVLOG(1) << "~UserMediaRequestInfo";
1003 void UserMediaClientImpl::UserMediaRequestInfo::StartAudioTrack(
1004 const blink::WebMediaStreamTrack& track,
1005 const blink::WebMediaConstraints& constraints) {
1006 DCHECK(track.source().type() == blink::WebMediaStreamSource::TypeAudio);
1007 MediaStreamAudioSource* native_source =
1008 static_cast <MediaStreamAudioSource*>(track.source().extraData());
1009 DCHECK(native_source);
1011 sources_.push_back(track.source());
1012 sources_waiting_for_callback_.push_back(native_source);
1013 native_source->AddTrack(
1014 track, constraints, base::Bind(
1015 &UserMediaClientImpl::UserMediaRequestInfo::OnTrackStarted,
1016 AsWeakPtr()));
1019 blink::WebMediaStreamTrack
1020 UserMediaClientImpl::UserMediaRequestInfo::CreateAndStartVideoTrack(
1021 const blink::WebMediaStreamSource& source,
1022 const blink::WebMediaConstraints& constraints) {
1023 DCHECK(source.type() == blink::WebMediaStreamSource::TypeVideo);
1024 MediaStreamVideoSource* native_source =
1025 MediaStreamVideoSource::GetVideoSource(source);
1026 DCHECK(native_source);
1027 sources_.push_back(source);
1028 sources_waiting_for_callback_.push_back(native_source);
1029 return MediaStreamVideoTrack::CreateVideoTrack(
1030 native_source, constraints, base::Bind(
1031 &UserMediaClientImpl::UserMediaRequestInfo::OnTrackStarted,
1032 AsWeakPtr()),
1033 true);
1036 void UserMediaClientImpl::UserMediaRequestInfo::CallbackOnTracksStarted(
1037 const ResourcesReady& callback) {
1038 DCHECK(ready_callback_.is_null());
1039 ready_callback_ = callback;
1040 CheckAllTracksStarted();
1043 void UserMediaClientImpl::UserMediaRequestInfo::OnTrackStarted(
1044 MediaStreamSource* source,
1045 MediaStreamRequestResult result,
1046 const blink::WebString& result_name) {
1047 DVLOG(1) << "OnTrackStarted result " << result;
1048 std::vector<MediaStreamSource*>::iterator it =
1049 std::find(sources_waiting_for_callback_.begin(),
1050 sources_waiting_for_callback_.end(),
1051 source);
1052 DCHECK(it != sources_waiting_for_callback_.end());
1053 sources_waiting_for_callback_.erase(it);
1054 // All tracks must be started successfully. Otherwise the request is a
1055 // failure.
1056 if (result != MEDIA_DEVICE_OK) {
1057 request_result_ = result;
1058 request_result_name_ = result_name;
1061 CheckAllTracksStarted();
1064 void UserMediaClientImpl::UserMediaRequestInfo::CheckAllTracksStarted() {
1065 if (!ready_callback_.is_null() && sources_waiting_for_callback_.empty()) {
1066 ready_callback_.Run(this, request_result_, request_result_name_);
1070 bool UserMediaClientImpl::UserMediaRequestInfo::IsSourceUsed(
1071 const blink::WebMediaStreamSource& source) const {
1072 for (std::vector<blink::WebMediaStreamSource>::const_iterator source_it =
1073 sources_.begin();
1074 source_it != sources_.end(); ++source_it) {
1075 if (source_it->id() == source.id())
1076 return true;
1078 return false;
1081 void UserMediaClientImpl::UserMediaRequestInfo::RemoveSource(
1082 const blink::WebMediaStreamSource& source) {
1083 for (std::vector<blink::WebMediaStreamSource>::iterator it =
1084 sources_.begin();
1085 it != sources_.end(); ++it) {
1086 if (source.id() == it->id()) {
1087 sources_.erase(it);
1088 return;
1093 bool UserMediaClientImpl::UserMediaRequestInfo::HasPendingSources() const {
1094 return !sources_waiting_for_callback_.empty();
1097 } // namespace content