1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "content/renderer/media/webrtc/webrtc_video_capturer_adapter.h"
8 #include "base/memory/aligned_memory.h"
9 #include "base/trace_event/trace_event.h"
10 #include "content/renderer/media/webrtc/webrtc_video_frame_adapter.h"
11 #include "media/base/timestamp_constants.h"
12 #include "media/base/video_frame.h"
13 #include "media/base/video_frame_pool.h"
14 #include "third_party/libjingle/source/talk/media/base/videoframefactory.h"
15 #include "third_party/libjingle/source/talk/media/webrtc/webrtcvideoframe.h"
16 #include "third_party/libyuv/include/libyuv/convert_from.h"
17 #include "third_party/libyuv/include/libyuv/scale.h"
18 #include "third_party/webrtc/common_video/interface/video_frame_buffer.h"
19 #include "third_party/webrtc/common_video/rotation.h"
24 // Empty method used for keeping a reference to the original media::VideoFrame.
25 // The reference to |frame| is kept in the closure that calls this method.
26 void ReleaseOriginalFrame(const scoped_refptr
<media::VideoFrame
>& frame
) {
29 } // anonymous namespace
31 // A cricket::VideoFrameFactory for media::VideoFrame. The purpose of this
32 // class is to avoid a premature frame copy. A media::VideoFrame is injected
33 // with SetFrame, and converted into a cricket::VideoFrame with
34 // CreateAliasedFrame. SetFrame should be called before CreateAliasedFrame
36 class WebRtcVideoCapturerAdapter::MediaVideoFrameFactory
37 : public cricket::VideoFrameFactory
{
39 void SetFrame(const scoped_refptr
<media::VideoFrame
>& frame
,
40 int64_t elapsed_time
) {
42 // Create a CapturedFrame that only contains header information, not the
44 captured_frame_
.width
= frame
->natural_size().width();
45 captured_frame_
.height
= frame
->natural_size().height();
46 captured_frame_
.elapsed_time
= elapsed_time
;
47 captured_frame_
.time_stamp
= frame
->timestamp().InMicroseconds() *
48 base::Time::kNanosecondsPerMicrosecond
;
49 captured_frame_
.pixel_height
= 1;
50 captured_frame_
.pixel_width
= 1;
51 captured_frame_
.rotation
= webrtc::kVideoRotation_0
;
52 captured_frame_
.data
= NULL
;
53 captured_frame_
.data_size
= cricket::CapturedFrame::kUnknownDataSize
;
54 captured_frame_
.fourcc
= static_cast<uint32
>(cricket::FOURCC_ANY
);
59 void ReleaseFrame() { frame_
= NULL
; }
61 const cricket::CapturedFrame
* GetCapturedFrame() const {
62 return &captured_frame_
;
65 cricket::VideoFrame
* CreateAliasedFrame(
66 const cricket::CapturedFrame
* input_frame
,
67 int cropped_input_width
,
68 int cropped_input_height
,
70 int output_height
) const override
{
71 // Check that captured_frame is actually our frame.
72 DCHECK(input_frame
== &captured_frame_
);
75 const int64_t timestamp_ns
= frame_
->timestamp().InMicroseconds() *
76 base::Time::kNanosecondsPerMicrosecond
;
78 // Return |frame_| directly if it is texture backed, because there is no
79 // cropping support for texture yet. See http://crbug/503653.
80 if (frame_
->HasTextures()) {
81 return new cricket::WebRtcVideoFrame(
82 new rtc::RefCountedObject
<WebRtcVideoFrameAdapter
>(frame_
),
83 captured_frame_
.elapsed_time
, timestamp_ns
);
86 // Create a centered cropped visible rect that preservers aspect ratio for
87 // cropped natural size.
88 gfx::Rect visible_rect
= frame_
->visible_rect();
89 visible_rect
.ClampToCenteredSize(gfx::Size(
90 visible_rect
.width() * cropped_input_width
/ input_frame
->width
,
91 visible_rect
.height() * cropped_input_height
/ input_frame
->height
));
93 const gfx::Size
output_size(output_width
, output_height
);
94 scoped_refptr
<media::VideoFrame
> video_frame
=
95 media::VideoFrame::WrapVideoFrame(frame_
, visible_rect
, output_size
);
96 video_frame
->AddDestructionObserver(
97 base::Bind(&ReleaseOriginalFrame
, frame_
));
99 // If no scaling is needed, return a wrapped version of |frame_| directly.
100 if (video_frame
->natural_size() == video_frame
->visible_rect().size()) {
101 return new cricket::WebRtcVideoFrame(
102 new rtc::RefCountedObject
<WebRtcVideoFrameAdapter
>(video_frame
),
103 captured_frame_
.elapsed_time
, timestamp_ns
);
106 // We need to scale the frame before we hand it over to cricket.
107 scoped_refptr
<media::VideoFrame
> scaled_frame
=
108 scaled_frame_pool_
.CreateFrame(media::PIXEL_FORMAT_I420
, output_size
,
109 gfx::Rect(output_size
), output_size
,
110 frame_
->timestamp());
111 libyuv::I420Scale(video_frame
->visible_data(media::VideoFrame::kYPlane
),
112 video_frame
->stride(media::VideoFrame::kYPlane
),
113 video_frame
->visible_data(media::VideoFrame::kUPlane
),
114 video_frame
->stride(media::VideoFrame::kUPlane
),
115 video_frame
->visible_data(media::VideoFrame::kVPlane
),
116 video_frame
->stride(media::VideoFrame::kVPlane
),
117 video_frame
->visible_rect().width(),
118 video_frame
->visible_rect().height(),
119 scaled_frame
->data(media::VideoFrame::kYPlane
),
120 scaled_frame
->stride(media::VideoFrame::kYPlane
),
121 scaled_frame
->data(media::VideoFrame::kUPlane
),
122 scaled_frame
->stride(media::VideoFrame::kUPlane
),
123 scaled_frame
->data(media::VideoFrame::kVPlane
),
124 scaled_frame
->stride(media::VideoFrame::kVPlane
),
125 output_width
, output_height
, libyuv::kFilterBilinear
);
126 return new cricket::WebRtcVideoFrame(
127 new rtc::RefCountedObject
<WebRtcVideoFrameAdapter
>(scaled_frame
),
128 captured_frame_
.elapsed_time
, timestamp_ns
);
131 cricket::VideoFrame
* CreateAliasedFrame(
132 const cricket::CapturedFrame
* input_frame
,
134 int output_height
) const override
{
135 return CreateAliasedFrame(input_frame
, input_frame
->width
,
136 input_frame
->height
, output_width
, output_height
);
140 scoped_refptr
<media::VideoFrame
> frame_
;
141 cricket::CapturedFrame captured_frame_
;
142 // This is used only if scaling is needed.
143 mutable media::VideoFramePool scaled_frame_pool_
;
146 WebRtcVideoCapturerAdapter::WebRtcVideoCapturerAdapter(bool is_screencast
)
147 : is_screencast_(is_screencast
),
149 first_frame_timestamp_(media::kNoTimestamp()) {
150 thread_checker_
.DetachFromThread();
151 // The base class takes ownership of the frame factory.
152 set_frame_factory(new MediaVideoFrameFactory
);
155 WebRtcVideoCapturerAdapter::~WebRtcVideoCapturerAdapter() {
156 DVLOG(3) << " WebRtcVideoCapturerAdapter::dtor";
159 cricket::CaptureState
WebRtcVideoCapturerAdapter::Start(
160 const cricket::VideoFormat
& capture_format
) {
161 DCHECK(thread_checker_
.CalledOnValidThread());
163 DVLOG(3) << " WebRtcVideoCapturerAdapter::Start w = " << capture_format
.width
164 << " h = " << capture_format
.height
;
167 return cricket::CS_RUNNING
;
170 void WebRtcVideoCapturerAdapter::Stop() {
171 DCHECK(thread_checker_
.CalledOnValidThread());
172 DVLOG(3) << " WebRtcVideoCapturerAdapter::Stop ";
175 SetCaptureFormat(NULL
);
176 SignalStateChange(this, cricket::CS_STOPPED
);
179 bool WebRtcVideoCapturerAdapter::IsRunning() {
180 DCHECK(thread_checker_
.CalledOnValidThread());
184 bool WebRtcVideoCapturerAdapter::GetPreferredFourccs(
185 std::vector
<uint32
>* fourccs
) {
186 DCHECK(thread_checker_
.CalledOnValidThread());
187 DCHECK(!fourccs
|| fourccs
->empty());
189 fourccs
->push_back(cricket::FOURCC_I420
);
190 return fourccs
!= NULL
;
193 bool WebRtcVideoCapturerAdapter::IsScreencast() const {
194 return is_screencast_
;
197 bool WebRtcVideoCapturerAdapter::GetBestCaptureFormat(
198 const cricket::VideoFormat
& desired
,
199 cricket::VideoFormat
* best_format
) {
200 DCHECK(thread_checker_
.CalledOnValidThread());
201 DVLOG(3) << " GetBestCaptureFormat:: "
202 << " w = " << desired
.width
203 << " h = " << desired
.height
;
205 // Capability enumeration is done in MediaStreamVideoSource. The adapter can
206 // just use what is provided.
207 // Use the desired format as the best format.
208 best_format
->width
= desired
.width
;
209 best_format
->height
= desired
.height
;
210 best_format
->fourcc
= cricket::FOURCC_I420
;
211 best_format
->interval
= desired
.interval
;
215 void WebRtcVideoCapturerAdapter::OnFrameCaptured(
216 const scoped_refptr
<media::VideoFrame
>& frame
) {
217 DCHECK(thread_checker_
.CalledOnValidThread());
218 TRACE_EVENT0("video", "WebRtcVideoCapturerAdapter::OnFrameCaptured");
219 if (!((frame
->IsMappable() &&
220 (frame
->format() == media::PIXEL_FORMAT_I420
||
221 frame
->format() == media::PIXEL_FORMAT_YV12
)) ||
222 frame
->HasTextures())) {
223 // Since connecting sources and sinks do not check the format, we need to
224 // just ignore formats that we can not handle.
229 if (first_frame_timestamp_
== media::kNoTimestamp())
230 first_frame_timestamp_
= frame
->timestamp();
232 const int64 elapsed_time
=
233 (frame
->timestamp() - first_frame_timestamp_
).InMicroseconds() *
234 base::Time::kNanosecondsPerMicrosecond
;
236 // Inject the frame via the VideoFrameFactory of base class.
237 MediaVideoFrameFactory
* media_video_frame_factory
=
238 reinterpret_cast<MediaVideoFrameFactory
*>(frame_factory());
239 media_video_frame_factory
->SetFrame(frame
, elapsed_time
);
241 // This signals to libJingle that a new VideoFrame is available.
242 SignalFrameCaptured(this, media_video_frame_factory
->GetCapturedFrame());
244 media_video_frame_factory
->ReleaseFrame(); // Release the frame ASAP.
247 } // namespace content