1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "content/renderer/media/webrtc/webrtc_video_capturer_adapter.h"
8 #include "base/memory/aligned_memory.h"
9 #include "base/trace_event/trace_event.h"
10 #include "content/renderer/media/webrtc/webrtc_video_frame_adapter.h"
11 #include "media/base/video_frame.h"
12 #include "media/base/video_frame_pool.h"
13 #include "third_party/libjingle/source/talk/media/base/videoframefactory.h"
14 #include "third_party/libjingle/source/talk/media/webrtc/webrtcvideoframe.h"
15 #include "third_party/libyuv/include/libyuv/convert_from.h"
16 #include "third_party/libyuv/include/libyuv/scale.h"
17 #include "third_party/webrtc/common_video/interface/video_frame_buffer.h"
18 #include "third_party/webrtc/common_video/rotation.h"
23 // Empty method used for keeping a reference to the original media::VideoFrame.
24 // The reference to |frame| is kept in the closure that calls this method.
25 void ReleaseOriginalFrame(const scoped_refptr
<media::VideoFrame
>& frame
) {
28 } // anonymous namespace
30 // A cricket::VideoFrameFactory for media::VideoFrame. The purpose of this
31 // class is to avoid a premature frame copy. A media::VideoFrame is injected
32 // with SetFrame, and converted into a cricket::VideoFrame with
33 // CreateAliasedFrame. SetFrame should be called before CreateAliasedFrame
35 class WebRtcVideoCapturerAdapter::MediaVideoFrameFactory
36 : public cricket::VideoFrameFactory
{
38 void SetFrame(const scoped_refptr
<media::VideoFrame
>& frame
,
39 int64_t elapsed_time
) {
41 // Create a CapturedFrame that only contains header information, not the
43 captured_frame_
.width
= frame
->natural_size().width();
44 captured_frame_
.height
= frame
->natural_size().height();
45 captured_frame_
.elapsed_time
= elapsed_time
;
46 captured_frame_
.time_stamp
= frame
->timestamp().InMicroseconds() *
47 base::Time::kNanosecondsPerMicrosecond
;
48 captured_frame_
.pixel_height
= 1;
49 captured_frame_
.pixel_width
= 1;
50 captured_frame_
.rotation
= webrtc::kVideoRotation_0
;
51 captured_frame_
.data
= NULL
;
52 captured_frame_
.data_size
= cricket::CapturedFrame::kUnknownDataSize
;
53 captured_frame_
.fourcc
= static_cast<uint32
>(cricket::FOURCC_ANY
);
58 void ReleaseFrame() { frame_
= NULL
; }
60 const cricket::CapturedFrame
* GetCapturedFrame() const {
61 return &captured_frame_
;
64 cricket::VideoFrame
* CreateAliasedFrame(
65 const cricket::CapturedFrame
* input_frame
,
66 int cropped_input_width
,
67 int cropped_input_height
,
69 int output_height
) const override
{
70 // Check that captured_frame is actually our frame.
71 DCHECK(input_frame
== &captured_frame_
);
74 const int64_t timestamp_ns
= frame_
->timestamp().InMicroseconds() *
75 base::Time::kNanosecondsPerMicrosecond
;
77 // Return |frame_| directly if it is texture backed, because there is no
78 // cropping support for texture yet. See http://crbug/503653.
79 if (frame_
->HasTextures()) {
80 return new cricket::WebRtcVideoFrame(
81 new rtc::RefCountedObject
<WebRtcVideoFrameAdapter
>(frame_
),
82 captured_frame_
.elapsed_time
, timestamp_ns
);
85 // Create a centered cropped visible rect that preservers aspect ratio for
86 // cropped natural size.
87 gfx::Rect visible_rect
= frame_
->visible_rect();
88 visible_rect
.ClampToCenteredSize(gfx::Size(
89 visible_rect
.width() * cropped_input_width
/ input_frame
->width
,
90 visible_rect
.height() * cropped_input_height
/ input_frame
->height
));
92 const gfx::Size
output_size(output_width
, output_height
);
93 scoped_refptr
<media::VideoFrame
> video_frame
=
94 media::VideoFrame::WrapVideoFrame(frame_
, visible_rect
, output_size
);
95 video_frame
->AddDestructionObserver(
96 base::Bind(&ReleaseOriginalFrame
, frame_
));
98 // If no scaling is needed, return a wrapped version of |frame_| directly.
99 if (video_frame
->natural_size() == video_frame
->visible_rect().size()) {
100 return new cricket::WebRtcVideoFrame(
101 new rtc::RefCountedObject
<WebRtcVideoFrameAdapter
>(video_frame
),
102 captured_frame_
.elapsed_time
, timestamp_ns
);
105 // We need to scale the frame before we hand it over to cricket.
106 scoped_refptr
<media::VideoFrame
> scaled_frame
=
107 scaled_frame_pool_
.CreateFrame(media::PIXEL_FORMAT_I420
, output_size
,
108 gfx::Rect(output_size
), output_size
,
109 frame_
->timestamp());
110 libyuv::I420Scale(video_frame
->visible_data(media::VideoFrame::kYPlane
),
111 video_frame
->stride(media::VideoFrame::kYPlane
),
112 video_frame
->visible_data(media::VideoFrame::kUPlane
),
113 video_frame
->stride(media::VideoFrame::kUPlane
),
114 video_frame
->visible_data(media::VideoFrame::kVPlane
),
115 video_frame
->stride(media::VideoFrame::kVPlane
),
116 video_frame
->visible_rect().width(),
117 video_frame
->visible_rect().height(),
118 scaled_frame
->data(media::VideoFrame::kYPlane
),
119 scaled_frame
->stride(media::VideoFrame::kYPlane
),
120 scaled_frame
->data(media::VideoFrame::kUPlane
),
121 scaled_frame
->stride(media::VideoFrame::kUPlane
),
122 scaled_frame
->data(media::VideoFrame::kVPlane
),
123 scaled_frame
->stride(media::VideoFrame::kVPlane
),
124 output_width
, output_height
, libyuv::kFilterBilinear
);
125 return new cricket::WebRtcVideoFrame(
126 new rtc::RefCountedObject
<WebRtcVideoFrameAdapter
>(scaled_frame
),
127 captured_frame_
.elapsed_time
, timestamp_ns
);
130 cricket::VideoFrame
* CreateAliasedFrame(
131 const cricket::CapturedFrame
* input_frame
,
133 int output_height
) const override
{
134 return CreateAliasedFrame(input_frame
, input_frame
->width
,
135 input_frame
->height
, output_width
, output_height
);
139 scoped_refptr
<media::VideoFrame
> frame_
;
140 cricket::CapturedFrame captured_frame_
;
141 // This is used only if scaling is needed.
142 mutable media::VideoFramePool scaled_frame_pool_
;
145 WebRtcVideoCapturerAdapter::WebRtcVideoCapturerAdapter(bool is_screencast
)
146 : is_screencast_(is_screencast
),
148 first_frame_timestamp_(media::kNoTimestamp()) {
149 thread_checker_
.DetachFromThread();
150 // The base class takes ownership of the frame factory.
151 set_frame_factory(new MediaVideoFrameFactory
);
154 WebRtcVideoCapturerAdapter::~WebRtcVideoCapturerAdapter() {
155 DVLOG(3) << " WebRtcVideoCapturerAdapter::dtor";
158 cricket::CaptureState
WebRtcVideoCapturerAdapter::Start(
159 const cricket::VideoFormat
& capture_format
) {
160 DCHECK(thread_checker_
.CalledOnValidThread());
162 DVLOG(3) << " WebRtcVideoCapturerAdapter::Start w = " << capture_format
.width
163 << " h = " << capture_format
.height
;
166 return cricket::CS_RUNNING
;
169 void WebRtcVideoCapturerAdapter::Stop() {
170 DCHECK(thread_checker_
.CalledOnValidThread());
171 DVLOG(3) << " WebRtcVideoCapturerAdapter::Stop ";
174 SetCaptureFormat(NULL
);
175 SignalStateChange(this, cricket::CS_STOPPED
);
178 bool WebRtcVideoCapturerAdapter::IsRunning() {
179 DCHECK(thread_checker_
.CalledOnValidThread());
183 bool WebRtcVideoCapturerAdapter::GetPreferredFourccs(
184 std::vector
<uint32
>* fourccs
) {
185 DCHECK(thread_checker_
.CalledOnValidThread());
186 DCHECK(!fourccs
|| fourccs
->empty());
188 fourccs
->push_back(cricket::FOURCC_I420
);
189 return fourccs
!= NULL
;
192 bool WebRtcVideoCapturerAdapter::IsScreencast() const {
193 return is_screencast_
;
196 bool WebRtcVideoCapturerAdapter::GetBestCaptureFormat(
197 const cricket::VideoFormat
& desired
,
198 cricket::VideoFormat
* best_format
) {
199 DCHECK(thread_checker_
.CalledOnValidThread());
200 DVLOG(3) << " GetBestCaptureFormat:: "
201 << " w = " << desired
.width
202 << " h = " << desired
.height
;
204 // Capability enumeration is done in MediaStreamVideoSource. The adapter can
205 // just use what is provided.
206 // Use the desired format as the best format.
207 best_format
->width
= desired
.width
;
208 best_format
->height
= desired
.height
;
209 best_format
->fourcc
= cricket::FOURCC_I420
;
210 best_format
->interval
= desired
.interval
;
214 void WebRtcVideoCapturerAdapter::OnFrameCaptured(
215 const scoped_refptr
<media::VideoFrame
>& frame
) {
216 DCHECK(thread_checker_
.CalledOnValidThread());
217 TRACE_EVENT0("video", "WebRtcVideoCapturerAdapter::OnFrameCaptured");
218 if (!((frame
->IsMappable() &&
219 (frame
->format() == media::PIXEL_FORMAT_I420
||
220 frame
->format() == media::PIXEL_FORMAT_YV12
)) ||
221 frame
->HasTextures())) {
222 // Since connecting sources and sinks do not check the format, we need to
223 // just ignore formats that we can not handle.
228 if (first_frame_timestamp_
== media::kNoTimestamp())
229 first_frame_timestamp_
= frame
->timestamp();
231 const int64 elapsed_time
=
232 (frame
->timestamp() - first_frame_timestamp_
).InMicroseconds() *
233 base::Time::kNanosecondsPerMicrosecond
;
235 // Inject the frame via the VideoFrameFactory of base class.
236 MediaVideoFrameFactory
* media_video_frame_factory
=
237 reinterpret_cast<MediaVideoFrameFactory
*>(frame_factory());
238 media_video_frame_factory
->SetFrame(frame
, elapsed_time
);
240 // This signals to libJingle that a new VideoFrame is available.
241 SignalFrameCaptured(this, media_video_frame_factory
->GetCapturedFrame());
243 media_video_frame_factory
->ReleaseFrame(); // Release the frame ASAP.
246 } // namespace content