Upstreaming browser/ui/uikit_ui_util from iOS.
[chromium-blink-merge.git] / remoting / test / test_video_renderer_unittest.cc
blobccfffe683343904dd3d6f6f5530c5eda0fd18b15
1 // Copyright 2015 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "remoting/test/test_video_renderer.h"
7 #include <cmath>
9 #include "base/memory/scoped_vector.h"
10 #include "base/message_loop/message_loop.h"
11 #include "base/run_loop.h"
12 #include "base/thread_task_runner_handle.h"
13 #include "base/timer/timer.h"
14 #include "media/base/video_frame.h"
15 #include "remoting/codec/video_encoder.h"
16 #include "remoting/codec/video_encoder_verbatim.h"
17 #include "remoting/codec/video_encoder_vpx.h"
18 #include "remoting/proto/video.pb.h"
19 #include "remoting/test/rgb_value.h"
20 #include "testing/gtest/include/gtest/gtest.h"
21 #include "third_party/webrtc/modules/desktop_capture/desktop_frame.h"
22 #include "third_party/webrtc/modules/desktop_capture/desktop_region.h"
24 namespace {
26 // Used to verify if image pattern is matched.
27 void ProcessPacketDoneHandler(const base::Closure& done_closure,
28 bool* handler_called) {
29 *handler_called = true;
30 done_closure.Run();
33 const int kDefaultScreenWidthPx = 1024;
34 const int kDefaultScreenHeightPx = 768;
36 // Default max error for encoding and decoding, measured in percent.
37 const double kDefaultErrorLimit = 0.02;
39 // Default expected rect for image pattern, measured in pixels.
40 const webrtc::DesktopRect kDefaultExpectedRect =
41 webrtc::DesktopRect::MakeLTRB(100, 100, 200, 200);
42 } // namespace
44 namespace remoting {
45 namespace test {
47 // Provides basic functionality for for the TestVideoRenderer Tests below.
48 // This fixture also creates an MessageLoop to test decoding video packets.
49 class TestVideoRendererTest : public testing::Test {
50 public:
51 TestVideoRendererTest();
52 ~TestVideoRendererTest() override;
54 // Handles creating a frame and sending to TestVideoRenderer for processing.
55 void TestVideoPacketProcessing(int screen_width, int screen_height,
56 double error_limit);
58 // Handles setting an image pattern and sending a frame to TestVideoRenderer.
59 // |expect_to_match| indicates if the image pattern is expected to match.
60 void TestImagePatternMatch(int screen_width,
61 int screen_height,
62 const webrtc::DesktopRect& expected_rect,
63 bool expect_to_match);
65 // Generate a basic desktop frame containing a gradient.
66 scoped_ptr<webrtc::DesktopFrame> CreateDesktopFrameWithGradient(
67 int screen_width, int screen_height) const;
69 protected:
70 // Used to post tasks to the message loop.
71 scoped_ptr<base::RunLoop> run_loop_;
73 // Used to set timeouts and delays.
74 scoped_ptr<base::Timer> timer_;
76 // Manages the decoder and process generated video packets.
77 scoped_ptr<TestVideoRenderer> test_video_renderer_;
79 // Used to encode desktop frames to generate video packets.
80 scoped_ptr<VideoEncoder> encoder_;
82 private:
83 // testing::Test interface.
84 void SetUp() override;
86 // Set image pattern, send video packet and returns if the expected pattern is
87 // matched.
88 bool SendPacketAndWaitForMatch(scoped_ptr<VideoPacket> packet,
89 const webrtc::DesktopRect& expected_rect,
90 const RGBValue& expected_average_color);
92 // Returns the average color value of pixels fall within |rect|.
93 // NOTE: Callers should not release the objects.
94 RGBValue CalculateAverageColorValueForFrame(
95 const webrtc::DesktopFrame* frame,
96 const webrtc::DesktopRect& rect) const;
98 // Return the mean error of two frames over all pixels, where error at each
99 // pixel is the root mean square of the errors in the R, G and B components,
100 // each normalized to [0, 1].
101 double CalculateError(const webrtc::DesktopFrame* original_frame,
102 const webrtc::DesktopFrame* decoded_frame) const;
104 // Fill a desktop frame with a gradient.
105 void FillFrameWithGradient(webrtc::DesktopFrame* frame) const;
107 // The thread's message loop. Valid only when the thread is alive.
108 scoped_ptr<base::MessageLoop> message_loop_;
110 DISALLOW_COPY_AND_ASSIGN(TestVideoRendererTest);
113 TestVideoRendererTest::TestVideoRendererTest()
114 : timer_(new base::Timer(true, false)) {}
116 TestVideoRendererTest::~TestVideoRendererTest() {}
118 void TestVideoRendererTest::SetUp() {
119 if (!base::MessageLoop::current()) {
120 // Create a temporary message loop if the current thread does not already
121 // have one.
122 message_loop_.reset(new base::MessageLoop);
124 test_video_renderer_.reset(new TestVideoRenderer());
127 void TestVideoRendererTest::TestVideoPacketProcessing(int screen_width,
128 int screen_height,
129 double error_limit) {
130 DCHECK(encoder_);
131 DCHECK(test_video_renderer_);
133 // Generate a frame containing a gradient.
134 scoped_ptr<webrtc::DesktopFrame> original_frame =
135 CreateDesktopFrameWithGradient(screen_width, screen_height);
136 EXPECT_TRUE(original_frame);
138 scoped_ptr<VideoPacket> packet = encoder_->Encode(*original_frame.get());
140 DCHECK(!run_loop_ || !run_loop_->running());
141 DCHECK(!timer_->IsRunning());
142 run_loop_.reset(new base::RunLoop());
144 // Set an extremely long time: 10 min to prevent bugs from hanging the system.
145 // NOTE: We've seen cases which take up to 1 min to process a packet, so an
146 // extremely long time as 10 min is chosen to avoid being variable/flaky.
147 timer_->Start(FROM_HERE, base::TimeDelta::FromMinutes(10),
148 run_loop_->QuitClosure());
150 // Wait for the video packet to be processed and rendered to buffer.
151 test_video_renderer_->ProcessVideoPacket(packet.Pass(),
152 run_loop_->QuitClosure());
154 run_loop_->Run();
155 EXPECT_TRUE(timer_->IsRunning());
156 timer_->Stop();
157 run_loop_.reset();
159 scoped_ptr<webrtc::DesktopFrame> buffer_copy =
160 test_video_renderer_->GetCurrentFrameForTest();
161 EXPECT_NE(buffer_copy, nullptr);
163 // The original frame is compared to the decoded video frame to check that
164 // the mean error over all pixels does not exceed a given limit.
165 double error = CalculateError(original_frame.get(), buffer_copy.get());
166 EXPECT_LT(error, error_limit);
169 bool TestVideoRendererTest::SendPacketAndWaitForMatch(
170 scoped_ptr<VideoPacket> packet,
171 const webrtc::DesktopRect& expected_rect,
172 const RGBValue& expected_average_color) {
173 DCHECK(!run_loop_ || !run_loop_->running());
174 DCHECK(!timer_->IsRunning());
175 run_loop_.reset(new base::RunLoop());
177 // Set an extremely long time: 10 min to prevent bugs from hanging the system.
178 // NOTE: We've seen cases which take up to 1 min to process a packet, so an
179 // extremely long time as 10 min is chosen to avoid being variable/flaky.
180 timer_->Start(FROM_HERE, base::TimeDelta::FromMinutes(10),
181 run_loop_->QuitClosure());
183 // Set expected image pattern.
184 test_video_renderer_->ExpectAverageColorInRect(
185 expected_rect, expected_average_color, run_loop_->QuitClosure());
187 // Used to verify if the expected image pattern will be matched by |packet|.
188 scoped_ptr<VideoPacket> packet_copy(new VideoPacket(*packet.get()));
190 // Post first test packet: |packet|.
191 test_video_renderer_->ProcessVideoPacket(packet.Pass(),
192 base::Bind(&base::DoNothing));
194 // Second packet: |packet_copy| is posted, and |second_packet_done_callback|
195 // will always be posted back to main thread, however, whether it will be
196 // called depends on whether the expected pattern is matched or not.
197 bool second_packet_done_is_called = false;
198 base::Closure second_packet_done_callback =
199 base::Bind(&ProcessPacketDoneHandler, run_loop_->QuitClosure(),
200 &second_packet_done_is_called);
202 test_video_renderer_->ProcessVideoPacket(packet_copy.Pass(),
203 second_packet_done_callback);
205 run_loop_->Run();
206 EXPECT_TRUE(timer_->IsRunning());
207 timer_->Stop();
208 run_loop_.reset();
210 // if expected image pattern is matched, the QuitClosure of |run_loop_| will
211 // be called before |second_packet_done_callback|, which leaves
212 // |second_packet_done_is_called| be false.
213 bool image_pattern_is_matched = !second_packet_done_is_called;
215 return image_pattern_is_matched;
218 void TestVideoRendererTest::TestImagePatternMatch(
219 int screen_width,
220 int screen_height,
221 const webrtc::DesktopRect& expected_rect,
222 bool expect_to_match) {
223 DCHECK(encoder_);
224 DCHECK(test_video_renderer_);
226 scoped_ptr<webrtc::DesktopFrame> frame =
227 CreateDesktopFrameWithGradient(screen_width, screen_height);
228 RGBValue expected_average_color =
229 CalculateAverageColorValueForFrame(frame.get(), expected_rect);
230 scoped_ptr<VideoPacket> packet = encoder_->Encode(*frame.get());
232 if (expect_to_match) {
233 EXPECT_TRUE(SendPacketAndWaitForMatch(packet.Pass(), expected_rect,
234 expected_average_color));
235 } else {
236 // Shift each channel by 128.
237 // e.g. (10, 127, 200) -> (138, 255, 73).
238 // In this way, the error between expected color and true value is always
239 // around 0.5.
240 int red_shift = (expected_average_color.red + 128) % 255;
241 int green_shift = (expected_average_color.green + 128) % 255;
242 int blue_shift = (expected_average_color.blue + 128) % 255;
244 RGBValue expected_average_color_shift =
245 RGBValue(red_shift, green_shift, blue_shift);
247 EXPECT_FALSE(SendPacketAndWaitForMatch(packet.Pass(), expected_rect,
248 expected_average_color_shift));
252 RGBValue TestVideoRendererTest::CalculateAverageColorValueForFrame(
253 const webrtc::DesktopFrame* frame,
254 const webrtc::DesktopRect& rect) const {
255 int red_sum = 0;
256 int green_sum = 0;
257 int blue_sum = 0;
259 // Loop through pixels that fall within |accumulating_rect_| to obtain the
260 // average color value.
261 for (int y = rect.top(); y < rect.bottom(); ++y) {
262 uint8_t* frame_pos =
263 frame->data() + (y * frame->stride() +
264 rect.left() * webrtc::DesktopFrame::kBytesPerPixel);
266 // Pixels of decoded video frame are presented in ARGB format.
267 for (int x = 0; x < rect.width(); ++x) {
268 red_sum += frame_pos[2];
269 green_sum += frame_pos[1];
270 blue_sum += frame_pos[0];
271 frame_pos += 4;
275 int area = rect.width() * rect.height();
277 return RGBValue(red_sum / area, green_sum / area, blue_sum / area);
280 double TestVideoRendererTest::CalculateError(
281 const webrtc::DesktopFrame* original_frame,
282 const webrtc::DesktopFrame* decoded_frame) const {
283 DCHECK(original_frame);
284 DCHECK(decoded_frame);
286 // Check size remains the same after encoding and decoding.
287 EXPECT_EQ(original_frame->size().width(), decoded_frame->size().width());
288 EXPECT_EQ(original_frame->size().height(), decoded_frame->size().height());
289 EXPECT_EQ(original_frame->stride(), decoded_frame->stride());
290 int screen_width = original_frame->size().width();
291 int screen_height = original_frame->size().height();
293 // Error is calculated as the sum of the square error at each pixel in the
294 // R, G and B components, each normalized to [0, 1].
295 double error_sum_squares = 0.0;
297 // The mapping between the position of a pixel on 3-dimensional image
298 // (origin at top left corner) and its position in 1-dimensional buffer.
300 // _______________
301 // | | | stride = 4 * width;
302 // | | |
303 // | | height | height * stride + width + 0; Red channel.
304 // | | | => height * stride + width + 1; Green channel.
305 // |------- | height * stride + width + 2; Blue channel.
306 // | width |
307 // |_______________|
309 for (int height = 0; height < screen_height; ++height) {
310 uint8_t* original_ptr = original_frame->data() +
311 height * original_frame->stride();
312 uint8_t* decoded_ptr = decoded_frame->data() +
313 height * decoded_frame->stride();
315 for (int width = 0; width < screen_width; ++width) {
316 // Errors are calculated in the R, G, B components.
317 for (int j = 0; j < 3; ++j) {
318 int offset = webrtc::DesktopFrame::kBytesPerPixel * width + j;
319 double original_value = static_cast<double>(*(original_ptr + offset));
320 double decoded_value = static_cast<double>(*(decoded_ptr + offset));
321 double error = original_value - decoded_value;
323 // Normalize the error to [0, 1].
324 error /= 255.0;
325 error_sum_squares += error * error;
329 return sqrt(error_sum_squares / (3 * screen_width * screen_height));
332 scoped_ptr<webrtc::DesktopFrame>
333 TestVideoRendererTest::CreateDesktopFrameWithGradient(
334 int screen_width, int screen_height) const {
335 webrtc::DesktopSize screen_size(screen_width, screen_height);
336 scoped_ptr<webrtc::DesktopFrame> frame(
337 new webrtc::BasicDesktopFrame(screen_size));
338 frame->mutable_updated_region()->SetRect(
339 webrtc::DesktopRect::MakeSize(screen_size));
340 FillFrameWithGradient(frame.get());
341 return frame.Pass();
344 void TestVideoRendererTest::FillFrameWithGradient(
345 webrtc::DesktopFrame* frame) const {
346 for (int y = 0; y < frame->size().height(); ++y) {
347 uint8* p = frame->data() + y * frame->stride();
348 for (int x = 0; x < frame->size().width(); ++x) {
349 *p++ = (255.0 * x) / frame->size().width();
350 *p++ = (164.0 * y) / frame->size().height();
351 *p++ = (82.0 * (x + y)) /
352 (frame->size().width() + frame->size().height());
353 *p++ = 0;
358 // Verify video decoding for VP8 Codec.
359 TEST_F(TestVideoRendererTest, VerifyVideoDecodingForVP8) {
360 encoder_ = VideoEncoderVpx::CreateForVP8();
361 test_video_renderer_->SetCodecForDecoding(
362 protocol::ChannelConfig::CODEC_VP8);
363 TestVideoPacketProcessing(kDefaultScreenWidthPx, kDefaultScreenHeightPx,
364 kDefaultErrorLimit);
367 // Verify video decoding for VP9 Codec.
368 TEST_F(TestVideoRendererTest, VerifyVideoDecodingForVP9) {
369 encoder_ = VideoEncoderVpx::CreateForVP9();
370 test_video_renderer_->SetCodecForDecoding(
371 protocol::ChannelConfig::CODEC_VP9);
372 TestVideoPacketProcessing(kDefaultScreenWidthPx, kDefaultScreenHeightPx,
373 kDefaultErrorLimit);
377 // Verify video decoding for VERBATIM Codec.
378 TEST_F(TestVideoRendererTest, VerifyVideoDecodingForVERBATIM) {
379 encoder_.reset(new VideoEncoderVerbatim());
380 test_video_renderer_->SetCodecForDecoding(
381 protocol::ChannelConfig::CODEC_VERBATIM);
382 TestVideoPacketProcessing(kDefaultScreenWidthPx, kDefaultScreenHeightPx,
383 kDefaultErrorLimit);
386 // Verify a set of video packets are processed correctly.
387 TEST_F(TestVideoRendererTest, VerifyMultipleVideoProcessing) {
388 encoder_ = VideoEncoderVpx::CreateForVP8();
389 test_video_renderer_->SetCodecForDecoding(
390 protocol::ChannelConfig::CODEC_VP8);
392 // Post multiple tasks to |test_video_renderer_|, and it should not crash.
393 // 20 is chosen because it's large enough to make sure that there will be
394 // more than one task on the video decode thread, while not too large to wait
395 // for too long for the unit test to complete.
396 const int task_num = 20;
397 ScopedVector<VideoPacket> video_packets;
398 for (int i = 0; i < task_num; ++i) {
399 scoped_ptr<webrtc::DesktopFrame> original_frame =
400 CreateDesktopFrameWithGradient(kDefaultScreenWidthPx,
401 kDefaultScreenHeightPx);
402 video_packets.push_back(encoder_->Encode(*original_frame.get()));
405 for (int i = 0; i < task_num; ++i) {
406 // Transfer ownership of video packet.
407 VideoPacket* packet = video_packets[i];
408 video_packets[i] = nullptr;
409 test_video_renderer_->ProcessVideoPacket(make_scoped_ptr(packet),
410 base::Bind(&base::DoNothing));
414 // Verify video packet size change is handled properly.
415 TEST_F(TestVideoRendererTest, VerifyVideoPacketSizeChange) {
416 encoder_ = VideoEncoderVpx::CreateForVP8();
417 test_video_renderer_->SetCodecForDecoding(
418 protocol::ChannelConfig::Codec::CODEC_VP8);
420 TestVideoPacketProcessing(kDefaultScreenWidthPx, kDefaultScreenHeightPx,
421 kDefaultErrorLimit);
423 TestVideoPacketProcessing(2 * kDefaultScreenWidthPx,
424 2 * kDefaultScreenHeightPx, kDefaultErrorLimit);
426 TestVideoPacketProcessing(kDefaultScreenWidthPx / 2,
427 kDefaultScreenHeightPx / 2, kDefaultErrorLimit);
430 // Verify setting expected image pattern doesn't break video packet processing.
431 TEST_F(TestVideoRendererTest, VerifySetExpectedImagePattern) {
432 encoder_ = VideoEncoderVpx::CreateForVP8();
433 test_video_renderer_->SetCodecForDecoding(
434 protocol::ChannelConfig::Codec::CODEC_VP8);
436 DCHECK(encoder_);
437 DCHECK(test_video_renderer_);
439 scoped_ptr<webrtc::DesktopFrame> frame = CreateDesktopFrameWithGradient(
440 kDefaultScreenWidthPx, kDefaultScreenHeightPx);
442 // Since we don't care whether expected image pattern is matched or not in
443 // this case, an expected color is chosen arbitrarily.
444 RGBValue black_color = RGBValue();
446 // Set expected image pattern.
447 test_video_renderer_->ExpectAverageColorInRect(
448 kDefaultExpectedRect, black_color, base::Bind(&base::DoNothing));
450 // Post test video packet.
451 scoped_ptr<VideoPacket> packet = encoder_->Encode(*frame.get());
452 test_video_renderer_->ProcessVideoPacket(packet.Pass(),
453 base::Bind(&base::DoNothing));
456 // Verify correct image pattern can be matched for VP8.
457 TEST_F(TestVideoRendererTest, VerifyImagePatternMatchForVP8) {
458 encoder_ = VideoEncoderVpx::CreateForVP8();
459 test_video_renderer_->SetCodecForDecoding(
460 protocol::ChannelConfig::Codec::CODEC_VP8);
461 TestImagePatternMatch(kDefaultScreenWidthPx, kDefaultScreenHeightPx,
462 kDefaultExpectedRect, true);
465 // Verify expected image pattern can be matched for VP9.
466 TEST_F(TestVideoRendererTest, VerifyImagePatternMatchForVP9) {
467 encoder_ = VideoEncoderVpx::CreateForVP9();
468 test_video_renderer_->SetCodecForDecoding(
469 protocol::ChannelConfig::Codec::CODEC_VP9);
470 TestImagePatternMatch(kDefaultScreenWidthPx, kDefaultScreenHeightPx,
471 kDefaultExpectedRect, true);
474 // Verify expected image pattern can be matched for VERBATIM.
475 TEST_F(TestVideoRendererTest, VerifyImagePatternMatchForVERBATIM) {
476 encoder_.reset(new VideoEncoderVerbatim());
477 test_video_renderer_->SetCodecForDecoding(
478 protocol::ChannelConfig::Codec::CODEC_VERBATIM);
479 TestImagePatternMatch(kDefaultScreenWidthPx, kDefaultScreenHeightPx,
480 kDefaultExpectedRect, true);
483 // Verify incorrect image pattern shouldn't be matched for VP8.
484 TEST_F(TestVideoRendererTest, VerifyImagePatternNotMatchForVP8) {
485 encoder_ = VideoEncoderVpx::CreateForVP8();
486 test_video_renderer_->SetCodecForDecoding(
487 protocol::ChannelConfig::Codec::CODEC_VP8);
488 TestImagePatternMatch(kDefaultScreenWidthPx, kDefaultScreenHeightPx,
489 kDefaultExpectedRect, false);
492 // Verify incorrect image pattern shouldn't be matched for VP9.
493 TEST_F(TestVideoRendererTest, VerifyImagePatternNotMatchForVP9) {
494 encoder_ = VideoEncoderVpx::CreateForVP9();
495 test_video_renderer_->SetCodecForDecoding(
496 protocol::ChannelConfig::Codec::CODEC_VP9);
497 TestImagePatternMatch(kDefaultScreenWidthPx, kDefaultScreenWidthPx,
498 kDefaultExpectedRect, false);
501 // Verify incorrect image pattern shouldn't be matched for VERBATIM.
502 TEST_F(TestVideoRendererTest, VerifyImagePatternNotMatchForVERBATIM) {
503 encoder_.reset(new VideoEncoderVerbatim());
504 test_video_renderer_->SetCodecForDecoding(
505 protocol::ChannelConfig::Codec::CODEC_VERBATIM);
506 TestImagePatternMatch(kDefaultScreenWidthPx, kDefaultScreenHeightPx,
507 kDefaultExpectedRect, false);
510 } // namespace test
511 } // namespace remoting