Merge Chromium + Blink git repositories
[chromium-blink-merge.git] / remoting / test / test_video_renderer_unittest.cc
blob54dc761eda86adb082d30f2204952cac9bce106f
1 // Copyright 2015 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "remoting/test/test_video_renderer.h"
7 #include <cmath>
9 #include "base/memory/scoped_vector.h"
10 #include "base/message_loop/message_loop.h"
11 #include "base/run_loop.h"
12 #include "base/thread_task_runner_handle.h"
13 #include "base/timer/timer.h"
14 #include "remoting/codec/video_encoder.h"
15 #include "remoting/codec/video_encoder_verbatim.h"
16 #include "remoting/codec/video_encoder_vpx.h"
17 #include "remoting/proto/video.pb.h"
18 #include "remoting/test/rgb_value.h"
19 #include "testing/gtest/include/gtest/gtest.h"
20 #include "third_party/webrtc/modules/desktop_capture/desktop_frame.h"
21 #include "third_party/webrtc/modules/desktop_capture/desktop_region.h"
23 namespace {
25 // Used to verify if image pattern is matched.
26 void ProcessPacketDoneHandler(const base::Closure& done_closure,
27 bool* handler_called) {
28 *handler_called = true;
29 done_closure.Run();
32 const int kDefaultScreenWidthPx = 1024;
33 const int kDefaultScreenHeightPx = 768;
35 // Default max error for encoding and decoding, measured in percent.
36 const double kDefaultErrorLimit = 0.02;
38 // Default expected rect for image pattern, measured in pixels.
39 const webrtc::DesktopRect kDefaultExpectedRect =
40 webrtc::DesktopRect::MakeLTRB(100, 100, 200, 200);
41 } // namespace
43 namespace remoting {
44 namespace test {
46 // Provides basic functionality for for the TestVideoRenderer Tests below.
47 // This fixture also creates an MessageLoop to test decoding video packets.
48 class TestVideoRendererTest : public testing::Test {
49 public:
50 TestVideoRendererTest();
51 ~TestVideoRendererTest() override;
53 // Handles creating a frame and sending to TestVideoRenderer for processing.
54 void TestVideoPacketProcessing(int screen_width, int screen_height,
55 double error_limit);
57 // Handles setting an image pattern and sending a frame to TestVideoRenderer.
58 // |expect_to_match| indicates if the image pattern is expected to match.
59 void TestImagePatternMatch(int screen_width,
60 int screen_height,
61 const webrtc::DesktopRect& expected_rect,
62 bool expect_to_match);
64 // Generate a basic desktop frame containing a gradient.
65 scoped_ptr<webrtc::DesktopFrame> CreateDesktopFrameWithGradient(
66 int screen_width, int screen_height) const;
68 protected:
69 // Used to post tasks to the message loop.
70 scoped_ptr<base::RunLoop> run_loop_;
72 // Used to set timeouts and delays.
73 scoped_ptr<base::Timer> timer_;
75 // Manages the decoder and process generated video packets.
76 scoped_ptr<TestVideoRenderer> test_video_renderer_;
78 // Used to encode desktop frames to generate video packets.
79 scoped_ptr<VideoEncoder> encoder_;
81 private:
82 // testing::Test interface.
83 void SetUp() override;
85 // Set image pattern, send video packet and returns if the expected pattern is
86 // matched.
87 bool SendPacketAndWaitForMatch(scoped_ptr<VideoPacket> packet,
88 const webrtc::DesktopRect& expected_rect,
89 const RGBValue& expected_average_color);
91 // Returns the average color value of pixels fall within |rect|.
92 // NOTE: Callers should not release the objects.
93 RGBValue CalculateAverageColorValueForFrame(
94 const webrtc::DesktopFrame* frame,
95 const webrtc::DesktopRect& rect) const;
97 // Return the mean error of two frames over all pixels, where error at each
98 // pixel is the root mean square of the errors in the R, G and B components,
99 // each normalized to [0, 1].
100 double CalculateError(const webrtc::DesktopFrame* original_frame,
101 const webrtc::DesktopFrame* decoded_frame) const;
103 // Fill a desktop frame with a gradient.
104 void FillFrameWithGradient(webrtc::DesktopFrame* frame) const;
106 // The thread's message loop. Valid only when the thread is alive.
107 scoped_ptr<base::MessageLoop> message_loop_;
109 DISALLOW_COPY_AND_ASSIGN(TestVideoRendererTest);
112 TestVideoRendererTest::TestVideoRendererTest()
113 : timer_(new base::Timer(true, false)) {}
115 TestVideoRendererTest::~TestVideoRendererTest() {}
117 void TestVideoRendererTest::SetUp() {
118 if (!base::MessageLoop::current()) {
119 // Create a temporary message loop if the current thread does not already
120 // have one.
121 message_loop_.reset(new base::MessageLoop);
123 test_video_renderer_.reset(new TestVideoRenderer());
126 void TestVideoRendererTest::TestVideoPacketProcessing(int screen_width,
127 int screen_height,
128 double error_limit) {
129 DCHECK(encoder_);
130 DCHECK(test_video_renderer_);
132 // Generate a frame containing a gradient.
133 scoped_ptr<webrtc::DesktopFrame> original_frame =
134 CreateDesktopFrameWithGradient(screen_width, screen_height);
135 EXPECT_TRUE(original_frame);
137 scoped_ptr<VideoPacket> packet = encoder_->Encode(*original_frame.get());
139 DCHECK(!run_loop_ || !run_loop_->running());
140 DCHECK(!timer_->IsRunning());
141 run_loop_.reset(new base::RunLoop());
143 // Set an extremely long time: 10 min to prevent bugs from hanging the system.
144 // NOTE: We've seen cases which take up to 1 min to process a packet, so an
145 // extremely long time as 10 min is chosen to avoid being variable/flaky.
146 timer_->Start(FROM_HERE, base::TimeDelta::FromMinutes(10),
147 run_loop_->QuitClosure());
149 // Wait for the video packet to be processed and rendered to buffer.
150 test_video_renderer_->ProcessVideoPacket(packet.Pass(),
151 run_loop_->QuitClosure());
153 run_loop_->Run();
154 EXPECT_TRUE(timer_->IsRunning());
155 timer_->Stop();
156 run_loop_.reset();
158 scoped_ptr<webrtc::DesktopFrame> buffer_copy =
159 test_video_renderer_->GetCurrentFrameForTest();
160 EXPECT_NE(buffer_copy, nullptr);
162 // The original frame is compared to the decoded video frame to check that
163 // the mean error over all pixels does not exceed a given limit.
164 double error = CalculateError(original_frame.get(), buffer_copy.get());
165 EXPECT_LT(error, error_limit);
168 bool TestVideoRendererTest::SendPacketAndWaitForMatch(
169 scoped_ptr<VideoPacket> packet,
170 const webrtc::DesktopRect& expected_rect,
171 const RGBValue& expected_average_color) {
172 DCHECK(!run_loop_ || !run_loop_->running());
173 DCHECK(!timer_->IsRunning());
174 run_loop_.reset(new base::RunLoop());
176 // Set an extremely long time: 10 min to prevent bugs from hanging the system.
177 // NOTE: We've seen cases which take up to 1 min to process a packet, so an
178 // extremely long time as 10 min is chosen to avoid being variable/flaky.
179 timer_->Start(FROM_HERE, base::TimeDelta::FromMinutes(10),
180 run_loop_->QuitClosure());
182 // Set expected image pattern.
183 test_video_renderer_->ExpectAverageColorInRect(
184 expected_rect, expected_average_color, run_loop_->QuitClosure());
186 // Used to verify if the expected image pattern will be matched by |packet|.
187 scoped_ptr<VideoPacket> packet_copy(new VideoPacket(*packet.get()));
189 // Post first test packet: |packet|.
190 test_video_renderer_->ProcessVideoPacket(packet.Pass(),
191 base::Bind(&base::DoNothing));
193 // Second packet: |packet_copy| is posted, and |second_packet_done_callback|
194 // will always be posted back to main thread, however, whether it will be
195 // called depends on whether the expected pattern is matched or not.
196 bool second_packet_done_is_called = false;
197 base::Closure second_packet_done_callback =
198 base::Bind(&ProcessPacketDoneHandler, run_loop_->QuitClosure(),
199 &second_packet_done_is_called);
201 test_video_renderer_->ProcessVideoPacket(packet_copy.Pass(),
202 second_packet_done_callback);
204 run_loop_->Run();
205 EXPECT_TRUE(timer_->IsRunning());
206 timer_->Stop();
207 run_loop_.reset();
209 // if expected image pattern is matched, the QuitClosure of |run_loop_| will
210 // be called before |second_packet_done_callback|, which leaves
211 // |second_packet_done_is_called| be false.
212 bool image_pattern_is_matched = !second_packet_done_is_called;
214 return image_pattern_is_matched;
217 void TestVideoRendererTest::TestImagePatternMatch(
218 int screen_width,
219 int screen_height,
220 const webrtc::DesktopRect& expected_rect,
221 bool expect_to_match) {
222 DCHECK(encoder_);
223 DCHECK(test_video_renderer_);
225 scoped_ptr<webrtc::DesktopFrame> frame =
226 CreateDesktopFrameWithGradient(screen_width, screen_height);
227 RGBValue expected_average_color =
228 CalculateAverageColorValueForFrame(frame.get(), expected_rect);
229 scoped_ptr<VideoPacket> packet = encoder_->Encode(*frame.get());
231 if (expect_to_match) {
232 EXPECT_TRUE(SendPacketAndWaitForMatch(packet.Pass(), expected_rect,
233 expected_average_color));
234 } else {
235 // Shift each channel by 128.
236 // e.g. (10, 127, 200) -> (138, 255, 73).
237 // In this way, the error between expected color and true value is always
238 // around 0.5.
239 int red_shift = (expected_average_color.red + 128) % 255;
240 int green_shift = (expected_average_color.green + 128) % 255;
241 int blue_shift = (expected_average_color.blue + 128) % 255;
243 RGBValue expected_average_color_shift =
244 RGBValue(red_shift, green_shift, blue_shift);
246 EXPECT_FALSE(SendPacketAndWaitForMatch(packet.Pass(), expected_rect,
247 expected_average_color_shift));
251 RGBValue TestVideoRendererTest::CalculateAverageColorValueForFrame(
252 const webrtc::DesktopFrame* frame,
253 const webrtc::DesktopRect& rect) const {
254 int red_sum = 0;
255 int green_sum = 0;
256 int blue_sum = 0;
258 // Loop through pixels that fall within |accumulating_rect_| to obtain the
259 // average color value.
260 for (int y = rect.top(); y < rect.bottom(); ++y) {
261 uint8_t* frame_pos =
262 frame->data() + (y * frame->stride() +
263 rect.left() * webrtc::DesktopFrame::kBytesPerPixel);
265 // Pixels of decoded video frame are presented in ARGB format.
266 for (int x = 0; x < rect.width(); ++x) {
267 red_sum += frame_pos[2];
268 green_sum += frame_pos[1];
269 blue_sum += frame_pos[0];
270 frame_pos += 4;
274 int area = rect.width() * rect.height();
276 return RGBValue(red_sum / area, green_sum / area, blue_sum / area);
279 double TestVideoRendererTest::CalculateError(
280 const webrtc::DesktopFrame* original_frame,
281 const webrtc::DesktopFrame* decoded_frame) const {
282 DCHECK(original_frame);
283 DCHECK(decoded_frame);
285 // Check size remains the same after encoding and decoding.
286 EXPECT_EQ(original_frame->size().width(), decoded_frame->size().width());
287 EXPECT_EQ(original_frame->size().height(), decoded_frame->size().height());
288 EXPECT_EQ(original_frame->stride(), decoded_frame->stride());
289 int screen_width = original_frame->size().width();
290 int screen_height = original_frame->size().height();
292 // Error is calculated as the sum of the square error at each pixel in the
293 // R, G and B components, each normalized to [0, 1].
294 double error_sum_squares = 0.0;
296 // The mapping between the position of a pixel on 3-dimensional image
297 // (origin at top left corner) and its position in 1-dimensional buffer.
299 // _______________
300 // | | | stride = 4 * width;
301 // | | |
302 // | | height | height * stride + width + 0; Red channel.
303 // | | | => height * stride + width + 1; Green channel.
304 // |------- | height * stride + width + 2; Blue channel.
305 // | width |
306 // |_______________|
308 for (int height = 0; height < screen_height; ++height) {
309 uint8_t* original_ptr = original_frame->data() +
310 height * original_frame->stride();
311 uint8_t* decoded_ptr = decoded_frame->data() +
312 height * decoded_frame->stride();
314 for (int width = 0; width < screen_width; ++width) {
315 // Errors are calculated in the R, G, B components.
316 for (int j = 0; j < 3; ++j) {
317 int offset = webrtc::DesktopFrame::kBytesPerPixel * width + j;
318 double original_value = static_cast<double>(*(original_ptr + offset));
319 double decoded_value = static_cast<double>(*(decoded_ptr + offset));
320 double error = original_value - decoded_value;
322 // Normalize the error to [0, 1].
323 error /= 255.0;
324 error_sum_squares += error * error;
328 return sqrt(error_sum_squares / (3 * screen_width * screen_height));
331 scoped_ptr<webrtc::DesktopFrame>
332 TestVideoRendererTest::CreateDesktopFrameWithGradient(
333 int screen_width, int screen_height) const {
334 webrtc::DesktopSize screen_size(screen_width, screen_height);
335 scoped_ptr<webrtc::DesktopFrame> frame(
336 new webrtc::BasicDesktopFrame(screen_size));
337 frame->mutable_updated_region()->SetRect(
338 webrtc::DesktopRect::MakeSize(screen_size));
339 FillFrameWithGradient(frame.get());
340 return frame.Pass();
343 void TestVideoRendererTest::FillFrameWithGradient(
344 webrtc::DesktopFrame* frame) const {
345 for (int y = 0; y < frame->size().height(); ++y) {
346 uint8* p = frame->data() + y * frame->stride();
347 for (int x = 0; x < frame->size().width(); ++x) {
348 *p++ = (255.0 * x) / frame->size().width();
349 *p++ = (164.0 * y) / frame->size().height();
350 *p++ = (82.0 * (x + y)) /
351 (frame->size().width() + frame->size().height());
352 *p++ = 0;
357 // Verify video decoding for VP8 Codec.
358 TEST_F(TestVideoRendererTest, VerifyVideoDecodingForVP8) {
359 encoder_ = VideoEncoderVpx::CreateForVP8();
360 test_video_renderer_->SetCodecForDecoding(
361 protocol::ChannelConfig::CODEC_VP8);
362 TestVideoPacketProcessing(kDefaultScreenWidthPx, kDefaultScreenHeightPx,
363 kDefaultErrorLimit);
366 // Verify video decoding for VP9 Codec.
367 TEST_F(TestVideoRendererTest, VerifyVideoDecodingForVP9) {
368 encoder_ = VideoEncoderVpx::CreateForVP9();
369 test_video_renderer_->SetCodecForDecoding(
370 protocol::ChannelConfig::CODEC_VP9);
371 TestVideoPacketProcessing(kDefaultScreenWidthPx, kDefaultScreenHeightPx,
372 kDefaultErrorLimit);
376 // Verify video decoding for VERBATIM Codec.
377 TEST_F(TestVideoRendererTest, VerifyVideoDecodingForVERBATIM) {
378 encoder_.reset(new VideoEncoderVerbatim());
379 test_video_renderer_->SetCodecForDecoding(
380 protocol::ChannelConfig::CODEC_VERBATIM);
381 TestVideoPacketProcessing(kDefaultScreenWidthPx, kDefaultScreenHeightPx,
382 kDefaultErrorLimit);
385 // Verify a set of video packets are processed correctly.
386 TEST_F(TestVideoRendererTest, VerifyMultipleVideoProcessing) {
387 encoder_ = VideoEncoderVpx::CreateForVP8();
388 test_video_renderer_->SetCodecForDecoding(
389 protocol::ChannelConfig::CODEC_VP8);
391 // Post multiple tasks to |test_video_renderer_|, and it should not crash.
392 // 20 is chosen because it's large enough to make sure that there will be
393 // more than one task on the video decode thread, while not too large to wait
394 // for too long for the unit test to complete.
395 const int task_num = 20;
396 ScopedVector<VideoPacket> video_packets;
397 for (int i = 0; i < task_num; ++i) {
398 scoped_ptr<webrtc::DesktopFrame> original_frame =
399 CreateDesktopFrameWithGradient(kDefaultScreenWidthPx,
400 kDefaultScreenHeightPx);
401 video_packets.push_back(encoder_->Encode(*original_frame.get()));
404 for (int i = 0; i < task_num; ++i) {
405 // Transfer ownership of video packet.
406 VideoPacket* packet = video_packets[i];
407 video_packets[i] = nullptr;
408 test_video_renderer_->ProcessVideoPacket(make_scoped_ptr(packet),
409 base::Bind(&base::DoNothing));
413 // Verify video packet size change is handled properly.
414 TEST_F(TestVideoRendererTest, VerifyVideoPacketSizeChange) {
415 encoder_ = VideoEncoderVpx::CreateForVP8();
416 test_video_renderer_->SetCodecForDecoding(
417 protocol::ChannelConfig::Codec::CODEC_VP8);
419 TestVideoPacketProcessing(kDefaultScreenWidthPx, kDefaultScreenHeightPx,
420 kDefaultErrorLimit);
422 TestVideoPacketProcessing(2 * kDefaultScreenWidthPx,
423 2 * kDefaultScreenHeightPx, kDefaultErrorLimit);
425 TestVideoPacketProcessing(kDefaultScreenWidthPx / 2,
426 kDefaultScreenHeightPx / 2, kDefaultErrorLimit);
429 // Verify setting expected image pattern doesn't break video packet processing.
430 TEST_F(TestVideoRendererTest, VerifySetExpectedImagePattern) {
431 encoder_ = VideoEncoderVpx::CreateForVP8();
432 test_video_renderer_->SetCodecForDecoding(
433 protocol::ChannelConfig::Codec::CODEC_VP8);
435 DCHECK(encoder_);
436 DCHECK(test_video_renderer_);
438 scoped_ptr<webrtc::DesktopFrame> frame = CreateDesktopFrameWithGradient(
439 kDefaultScreenWidthPx, kDefaultScreenHeightPx);
441 // Since we don't care whether expected image pattern is matched or not in
442 // this case, an expected color is chosen arbitrarily.
443 RGBValue black_color = RGBValue();
445 // Set expected image pattern.
446 test_video_renderer_->ExpectAverageColorInRect(
447 kDefaultExpectedRect, black_color, base::Bind(&base::DoNothing));
449 // Post test video packet.
450 scoped_ptr<VideoPacket> packet = encoder_->Encode(*frame.get());
451 test_video_renderer_->ProcessVideoPacket(packet.Pass(),
452 base::Bind(&base::DoNothing));
455 // Verify correct image pattern can be matched for VP8.
456 TEST_F(TestVideoRendererTest, VerifyImagePatternMatchForVP8) {
457 encoder_ = VideoEncoderVpx::CreateForVP8();
458 test_video_renderer_->SetCodecForDecoding(
459 protocol::ChannelConfig::Codec::CODEC_VP8);
460 TestImagePatternMatch(kDefaultScreenWidthPx, kDefaultScreenHeightPx,
461 kDefaultExpectedRect, true);
464 // Verify expected image pattern can be matched for VP9.
465 TEST_F(TestVideoRendererTest, VerifyImagePatternMatchForVP9) {
466 encoder_ = VideoEncoderVpx::CreateForVP9();
467 test_video_renderer_->SetCodecForDecoding(
468 protocol::ChannelConfig::Codec::CODEC_VP9);
469 TestImagePatternMatch(kDefaultScreenWidthPx, kDefaultScreenHeightPx,
470 kDefaultExpectedRect, true);
473 // Verify expected image pattern can be matched for VERBATIM.
474 TEST_F(TestVideoRendererTest, VerifyImagePatternMatchForVERBATIM) {
475 encoder_.reset(new VideoEncoderVerbatim());
476 test_video_renderer_->SetCodecForDecoding(
477 protocol::ChannelConfig::Codec::CODEC_VERBATIM);
478 TestImagePatternMatch(kDefaultScreenWidthPx, kDefaultScreenHeightPx,
479 kDefaultExpectedRect, true);
482 // Verify incorrect image pattern shouldn't be matched for VP8.
483 TEST_F(TestVideoRendererTest, VerifyImagePatternNotMatchForVP8) {
484 encoder_ = VideoEncoderVpx::CreateForVP8();
485 test_video_renderer_->SetCodecForDecoding(
486 protocol::ChannelConfig::Codec::CODEC_VP8);
487 TestImagePatternMatch(kDefaultScreenWidthPx, kDefaultScreenHeightPx,
488 kDefaultExpectedRect, false);
491 // Verify incorrect image pattern shouldn't be matched for VP9.
492 TEST_F(TestVideoRendererTest, VerifyImagePatternNotMatchForVP9) {
493 encoder_ = VideoEncoderVpx::CreateForVP9();
494 test_video_renderer_->SetCodecForDecoding(
495 protocol::ChannelConfig::Codec::CODEC_VP9);
496 TestImagePatternMatch(kDefaultScreenWidthPx, kDefaultScreenWidthPx,
497 kDefaultExpectedRect, false);
500 // Verify incorrect image pattern shouldn't be matched for VERBATIM.
501 TEST_F(TestVideoRendererTest, VerifyImagePatternNotMatchForVERBATIM) {
502 encoder_.reset(new VideoEncoderVerbatim());
503 test_video_renderer_->SetCodecForDecoding(
504 protocol::ChannelConfig::Codec::CODEC_VERBATIM);
505 TestImagePatternMatch(kDefaultScreenWidthPx, kDefaultScreenHeightPx,
506 kDefaultExpectedRect, false);
509 } // namespace test
510 } // namespace remoting