1 // Copyright 2015 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "remoting/test/test_video_renderer.h"
10 #include "base/callback_helpers.h"
11 #include "base/logging.h"
12 #include "base/synchronization/lock.h"
13 #include "base/thread_task_runner_handle.h"
14 #include "base/threading/thread.h"
15 #include "remoting/codec/video_decoder.h"
16 #include "remoting/codec/video_decoder_verbatim.h"
17 #include "remoting/codec/video_decoder_vpx.h"
18 #include "remoting/proto/video.pb.h"
19 #include "third_party/webrtc/modules/desktop_capture/desktop_frame.h"
23 // Used to store a RGB color, and it can be converted from uint32_t.
25 RGBValue(int r
, int g
, int b
) : red(r
), green(g
), blue(b
) {}
32 // Convert an uint32_t to a RGBValue.
33 RGBValue
ConvertUint32ToRGBValue(uint32_t color
) {
34 RGBValue
rgb_value((color
>> 16) & 0xFF, (color
>> 8) & 0xFF, color
& 0xFF);
38 // Used to account for frame resizing and lossy encoding error in percentage.
39 const double kMaxColorError
= 0.02;
45 // Implements video decoding functionality.
46 class TestVideoRenderer::Core
{
51 // Initializes the internal structures of the class.
54 // Used to decode video packets.
55 void ProcessVideoPacket(scoped_ptr
<VideoPacket
> packet
,
56 const base::Closure
& done
);
58 // Initialize a decoder to decode video packets.
59 void SetCodecForDecoding(const protocol::ChannelConfig::Codec codec
);
61 // Returns a copy of the current frame.
62 scoped_ptr
<webrtc::DesktopFrame
> GetCurrentFrameForTest() const;
64 // Set expected image pattern for comparison and the callback will be called
65 // when the pattern is matched.
66 void ExpectAverageColorInRect(
67 const webrtc::DesktopRect
& expected_rect
,
68 uint32_t expected_avg_color
,
69 const base::Closure
& image_pattern_matched_callback
);
72 // Returns average color of pixels fall within |rect| on the current frame.
73 RGBValue
CalculateAverageColorValue(const webrtc::DesktopRect
& rect
) const;
75 // Compares |candidate_avg_value| to |expected_avg_color_|.
76 // Returns true if the root mean square of the errors in the R, G and B
77 // components does not exceed a given limit.
78 bool ExpectedAverageColorIsMatched(const RGBValue
& candidate_avg_value
) const;
80 // Used to ensure Core methods are called on the same thread.
81 base::ThreadChecker thread_checker_
;
83 // Used to decode video packets.
84 scoped_ptr
<VideoDecoder
> decoder_
;
86 // Updated region of the current desktop frame compared to previous one.
87 webrtc::DesktopRegion updated_region_
;
89 // Screen size of the remote host.
90 webrtc::DesktopSize screen_size_
;
92 // Used to post tasks back to main thread.
93 scoped_refptr
<base::SingleThreadTaskRunner
> main_task_runner_
;
95 // Used to store decoded video frame.
96 scoped_ptr
<webrtc::DesktopFrame
> frame_
;
98 // Protects access to |frame_|.
99 mutable base::Lock lock_
;
101 // Used to store the expected image pattern.
102 webrtc::DesktopRect expected_rect_
;
103 uint32_t expected_avg_color_
;
105 // Used to store the callback when expected pattern is matched.
106 base::Closure image_pattern_matched_callback_
;
108 DISALLOW_COPY_AND_ASSIGN(Core
);
111 TestVideoRenderer::Core::Core()
112 : main_task_runner_(base::ThreadTaskRunnerHandle::Get()) {
113 thread_checker_
.DetachFromThread();
116 TestVideoRenderer::Core::~Core() {
117 DCHECK(thread_checker_
.CalledOnValidThread());
120 void TestVideoRenderer::Core::Initialize() {
121 DCHECK(thread_checker_
.CalledOnValidThread());
124 void TestVideoRenderer::Core::SetCodecForDecoding(
125 const protocol::ChannelConfig::Codec codec
) {
126 DCHECK(thread_checker_
.CalledOnValidThread());
129 LOG(WARNING
) << "Decoder is set more than once";
133 case protocol::ChannelConfig::CODEC_VP8
: {
134 VLOG(1) << "Test Video Renderer will use VP8 decoder";
135 decoder_
= VideoDecoderVpx::CreateForVP8();
138 case protocol::ChannelConfig::CODEC_VP9
: {
139 VLOG(1) << "Test Video Renderer will use VP9 decoder";
140 decoder_
= VideoDecoderVpx::CreateForVP9();
143 case protocol::ChannelConfig::CODEC_VERBATIM
: {
144 VLOG(1) << "Test Video Renderer will use VERBATIM decoder";
145 decoder_
.reset(new VideoDecoderVerbatim());
149 NOTREACHED() << "Unsupported codec: " << codec
;
154 scoped_ptr
<webrtc::DesktopFrame
>
155 TestVideoRenderer::Core::GetCurrentFrameForTest() const {
156 base::AutoLock
auto_lock(lock_
);
158 return make_scoped_ptr(webrtc::BasicDesktopFrame::CopyOf(*frame_
.get()));
161 void TestVideoRenderer::Core::ProcessVideoPacket(
162 scoped_ptr
<VideoPacket
> packet
, const base::Closure
& done
) {
163 DCHECK(thread_checker_
.CalledOnValidThread());
167 VLOG(2) << "TestVideoRenderer::Core::ProcessVideoPacket() Called";
169 // Screen size is attached on the first packet as well as when the
170 // host screen is resized.
171 if (packet
->format().has_screen_width() &&
172 packet
->format().has_screen_height()) {
173 webrtc::DesktopSize
source_size(packet
->format().screen_width(),
174 packet
->format().screen_height());
175 if (!screen_size_
.equals(source_size
)) {
176 screen_size_
= source_size
;
177 decoder_
->Initialize(screen_size_
);
178 frame_
.reset(new webrtc::BasicDesktopFrame(screen_size_
));
182 // To make life easier, assume that the desktop shape is a single rectangle.
183 packet
->clear_use_desktop_shape();
184 if (!decoder_
->DecodePacket(*packet
.get())) {
185 LOG(ERROR
) << "Decoder::DecodePacket() failed.";
190 base::AutoLock
auto_lock(lock_
);
192 // Render the decoded packet and write results to the buffer.
193 // Note that the |updated_region_| maintains the changed regions compared to
194 // previous video frame.
195 decoder_
->RenderFrame(screen_size_
,
196 webrtc::DesktopRect::MakeWH(screen_size_
.width(),
197 screen_size_
.height()),
198 frame_
->data(), frame_
->stride(), &updated_region_
);
201 main_task_runner_
->PostTask(FROM_HERE
, done
);
203 // Check to see if a image pattern matched reply is passed in, and whether
204 // the |expected_rect_| falls within the current frame.
205 if (image_pattern_matched_callback_
.is_null() ||
206 expected_rect_
.right() > frame_
->size().width() ||
207 expected_rect_
.bottom() > frame_
->size().height()) {
211 // Compare the expected image pattern with the corresponding rectangle region
212 // on the current frame.
213 RGBValue accumulating_avg_value
= CalculateAverageColorValue(expected_rect_
);
214 VLOG(2) << accumulating_avg_value
.red
<< " " << accumulating_avg_value
.green
215 << " " << accumulating_avg_value
.blue
;
217 if (ExpectedAverageColorIsMatched(accumulating_avg_value
)) {
218 main_task_runner_
->PostTask(
219 FROM_HERE
, base::ResetAndReturn(&image_pattern_matched_callback_
));
223 void TestVideoRenderer::Core::ExpectAverageColorInRect(
224 const webrtc::DesktopRect
& expected_rect
,
225 uint32_t expected_avg_color
,
226 const base::Closure
& image_pattern_matched_callback
) {
227 DCHECK(thread_checker_
.CalledOnValidThread());
229 expected_rect_
= expected_rect
;
230 expected_avg_color_
= expected_avg_color
;
231 image_pattern_matched_callback_
= image_pattern_matched_callback
;
234 RGBValue
TestVideoRenderer::Core::CalculateAverageColorValue(
235 const webrtc::DesktopRect
& rect
) const {
240 // Loop through pixels that fall within |accumulating_rect_| to obtain the
241 // average color value.
242 for (int y
= rect
.top(); y
< rect
.bottom(); ++y
) {
244 frame_
->data() + (y
* frame_
->stride() +
245 rect
.left() * webrtc::DesktopFrame::kBytesPerPixel
);
247 // Pixels of decoded video frame are presented in ARGB format.
248 for (int x
= 0; x
< rect
.width(); ++x
) {
249 red_sum
+= frame_pos
[2];
250 green_sum
+= frame_pos
[1];
251 blue_sum
+= frame_pos
[0];
256 int area
= rect
.width() * rect
.height();
257 RGBValue
rgb_value(red_sum
/ area
, green_sum
/ area
, blue_sum
/ area
);
261 bool TestVideoRenderer::Core::ExpectedAverageColorIsMatched(
262 const RGBValue
& candidate_avg_value
) const {
263 RGBValue expected_avg_value
= ConvertUint32ToRGBValue(expected_avg_color_
);
264 double error_sum_squares
= 0;
265 double red_error
= expected_avg_value
.red
- candidate_avg_value
.red
;
266 double green_error
= expected_avg_value
.green
- candidate_avg_value
.green
;
267 double blue_error
= expected_avg_value
.blue
- candidate_avg_value
.blue
;
268 error_sum_squares
= red_error
* red_error
+ green_error
* green_error
+
269 blue_error
* blue_error
;
270 error_sum_squares
/= (255.0 * 255.0);
272 return sqrt(error_sum_squares
/ 3) < kMaxColorError
;
275 TestVideoRenderer::TestVideoRenderer()
276 : video_decode_thread_(
277 new base::Thread("TestVideoRendererVideoDecodingThread")),
278 weak_factory_(this) {
279 DCHECK(thread_checker_
.CalledOnValidThread());
281 core_
.reset(new Core());
282 if (!video_decode_thread_
->Start()) {
283 LOG(ERROR
) << "Cannot start TestVideoRenderer";
285 video_decode_task_runner_
= video_decode_thread_
->task_runner();
286 video_decode_task_runner_
->PostTask(FROM_HERE
, base::Bind(&Core::Initialize
,
287 base::Unretained(core_
.get())));
291 TestVideoRenderer::~TestVideoRenderer() {
292 DCHECK(thread_checker_
.CalledOnValidThread());
294 video_decode_task_runner_
->DeleteSoon(FROM_HERE
, core_
.release());
296 // The thread's message loop will run until it runs out of work.
297 video_decode_thread_
->Stop();
300 void TestVideoRenderer::OnSessionConfig(const protocol::SessionConfig
& config
) {
301 DCHECK(thread_checker_
.CalledOnValidThread());
303 VLOG(2) << "TestVideoRenderer::OnSessionConfig() Called";
304 protocol::ChannelConfig::Codec codec
= config
.video_config().codec
;
305 SetCodecForDecoding(codec
);
308 ChromotingStats
* TestVideoRenderer::GetStats() {
309 DCHECK(thread_checker_
.CalledOnValidThread());
311 VLOG(2) << "TestVideoRenderer::GetStats() Called";
315 protocol::VideoStub
* TestVideoRenderer::GetVideoStub() {
316 DCHECK(thread_checker_
.CalledOnValidThread());
318 VLOG(2) << "TestVideoRenderer::GetVideoStub() Called";
322 void TestVideoRenderer::ProcessVideoPacket(scoped_ptr
<VideoPacket
> video_packet
,
323 const base::Closure
& done
) {
324 DCHECK(thread_checker_
.CalledOnValidThread());
325 DCHECK(video_decode_task_runner_
) << "Failed to start video decode thread";
327 if (video_packet
->has_data() && video_packet
->data().size() != 0) {
328 VLOG(2) << "process video packet is called!";
330 // Post video process task to the video decode thread.
331 base::Closure process_video_task
= base::Bind(
332 &TestVideoRenderer::Core::ProcessVideoPacket
,
333 base::Unretained(core_
.get()), base::Passed(&video_packet
), done
);
334 video_decode_task_runner_
->PostTask(FROM_HERE
, process_video_task
);
336 // Log at a high verbosity level as we receive empty packets frequently and
337 // they can clutter up the debug output if the level is set too low.
338 VLOG(3) << "Empty Video Packet received.";
343 void TestVideoRenderer::SetCodecForDecoding(
344 const protocol::ChannelConfig::Codec codec
) {
345 DCHECK(thread_checker_
.CalledOnValidThread());
347 VLOG(2) << "TestVideoRenderer::SetDecoder() Called";
348 video_decode_task_runner_
->PostTask(
349 FROM_HERE
, base::Bind(&Core::SetCodecForDecoding
,
350 base::Unretained(core_
.get()),
354 scoped_ptr
<webrtc::DesktopFrame
> TestVideoRenderer::GetCurrentFrameForTest()
356 DCHECK(thread_checker_
.CalledOnValidThread());
358 return core_
->GetCurrentFrameForTest();
361 void TestVideoRenderer::ExpectAverageColorInRect(
362 const webrtc::DesktopRect
& expected_rect
,
363 uint32_t expected_avg_color
,
364 const base::Closure
& image_pattern_matched_callback
) {
365 DCHECK(thread_checker_
.CalledOnValidThread());
366 DCHECK(!expected_rect
.is_empty()) << "Expected rect cannot be empty";
368 DVLOG(2) << "TestVideoRenderer::SetImagePatternAndMatchedCallback() Called";
369 video_decode_task_runner_
->PostTask(
371 base::Bind(&Core::ExpectAverageColorInRect
, base::Unretained(core_
.get()),
372 expected_rect
, expected_avg_color
,
373 image_pattern_matched_callback
));
377 } // namespace remoting