1 // Copyright 2015 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "remoting/test/test_video_renderer.h"
10 #include "base/callback_helpers.h"
11 #include "base/logging.h"
12 #include "base/synchronization/lock.h"
13 #include "base/thread_task_runner_handle.h"
14 #include "base/threading/thread.h"
15 #include "remoting/codec/video_decoder.h"
16 #include "remoting/codec/video_decoder_verbatim.h"
17 #include "remoting/codec/video_decoder_vpx.h"
18 #include "remoting/proto/video.pb.h"
19 #include "third_party/webrtc/modules/desktop_capture/desktop_frame.h"
23 // Used to store a RGB color, and it can be converted from uint32_t.
25 RGBValue(int r
, int g
, int b
) : red(r
), green(g
), blue(b
) {}
32 // Convert an uint32_t to a RGBValue.
33 RGBValue
ConvertUint32ToRGBValue(uint32_t color
) {
34 RGBValue
rgb_value((color
>> 16) & 0xFF, (color
>> 8) & 0xFF, color
& 0xFF);
38 // Used to account for frame resizing and lossy encoding error in percentage.
39 // The average color usually only varies by 1 on each channel, so 0.01 is large
40 // enough to allow variations while not being flaky for false negative cases.
41 const double kMaxColorError
= 0.01;
47 // Implements video decoding functionality.
48 class TestVideoRenderer::Core
{
53 // Initializes the internal structures of the class.
56 // Used to decode video packets.
57 void ProcessVideoPacket(scoped_ptr
<VideoPacket
> packet
,
58 const base::Closure
& done
);
60 // Initialize a decoder to decode video packets.
61 void SetCodecForDecoding(const protocol::ChannelConfig::Codec codec
);
63 // Returns a copy of the current frame.
64 scoped_ptr
<webrtc::DesktopFrame
> GetCurrentFrameForTest() const;
66 // Set expected image pattern for comparison and the callback will be called
67 // when the pattern is matched.
68 void ExpectAverageColorInRect(
69 const webrtc::DesktopRect
& expected_rect
,
70 uint32_t expected_avg_color
,
71 const base::Closure
& image_pattern_matched_callback
);
74 // Returns average color of pixels fall within |rect| on the current frame.
75 RGBValue
CalculateAverageColorValue(const webrtc::DesktopRect
& rect
) const;
77 // Compares |candidate_avg_value| to |expected_avg_color_|.
78 // Returns true if the root mean square of the errors in the R, G and B
79 // components does not exceed a given limit.
80 bool ExpectedAverageColorIsMatched(const RGBValue
& candidate_avg_value
) const;
82 // Used to ensure Core methods are called on the same thread.
83 base::ThreadChecker thread_checker_
;
85 // Used to decode video packets.
86 scoped_ptr
<VideoDecoder
> decoder_
;
88 // Updated region of the current desktop frame compared to previous one.
89 webrtc::DesktopRegion updated_region_
;
91 // Screen size of the remote host.
92 webrtc::DesktopSize screen_size_
;
94 // Used to post tasks back to main thread.
95 scoped_refptr
<base::SingleThreadTaskRunner
> main_task_runner_
;
97 // Used to store decoded video frame.
98 scoped_ptr
<webrtc::DesktopFrame
> frame_
;
100 // Protects access to |frame_|.
101 mutable base::Lock lock_
;
103 // Used to store the expected image pattern.
104 webrtc::DesktopRect expected_rect_
;
105 uint32_t expected_avg_color_
;
107 // Used to store the callback when expected pattern is matched.
108 base::Closure image_pattern_matched_callback_
;
110 DISALLOW_COPY_AND_ASSIGN(Core
);
113 TestVideoRenderer::Core::Core()
114 : main_task_runner_(base::ThreadTaskRunnerHandle::Get()) {
115 thread_checker_
.DetachFromThread();
118 TestVideoRenderer::Core::~Core() {
119 DCHECK(thread_checker_
.CalledOnValidThread());
122 void TestVideoRenderer::Core::Initialize() {
123 DCHECK(thread_checker_
.CalledOnValidThread());
126 void TestVideoRenderer::Core::SetCodecForDecoding(
127 const protocol::ChannelConfig::Codec codec
) {
128 DCHECK(thread_checker_
.CalledOnValidThread());
131 LOG(WARNING
) << "Decoder is set more than once";
135 case protocol::ChannelConfig::CODEC_VP8
: {
136 VLOG(1) << "Test Video Renderer will use VP8 decoder";
137 decoder_
= VideoDecoderVpx::CreateForVP8();
140 case protocol::ChannelConfig::CODEC_VP9
: {
141 VLOG(1) << "Test Video Renderer will use VP9 decoder";
142 decoder_
= VideoDecoderVpx::CreateForVP9();
145 case protocol::ChannelConfig::CODEC_VERBATIM
: {
146 VLOG(1) << "Test Video Renderer will use VERBATIM decoder";
147 decoder_
.reset(new VideoDecoderVerbatim());
151 NOTREACHED() << "Unsupported codec: " << codec
;
156 scoped_ptr
<webrtc::DesktopFrame
>
157 TestVideoRenderer::Core::GetCurrentFrameForTest() const {
158 base::AutoLock
auto_lock(lock_
);
160 return make_scoped_ptr(webrtc::BasicDesktopFrame::CopyOf(*frame_
.get()));
163 void TestVideoRenderer::Core::ProcessVideoPacket(
164 scoped_ptr
<VideoPacket
> packet
, const base::Closure
& done
) {
165 DCHECK(thread_checker_
.CalledOnValidThread());
169 VLOG(2) << "TestVideoRenderer::Core::ProcessVideoPacket() Called";
171 // Screen size is attached on the first packet as well as when the
172 // host screen is resized.
173 if (packet
->format().has_screen_width() &&
174 packet
->format().has_screen_height()) {
175 webrtc::DesktopSize
source_size(packet
->format().screen_width(),
176 packet
->format().screen_height());
177 if (!screen_size_
.equals(source_size
)) {
178 screen_size_
= source_size
;
179 decoder_
->Initialize(screen_size_
);
180 frame_
.reset(new webrtc::BasicDesktopFrame(screen_size_
));
184 // To make life easier, assume that the desktop shape is a single rectangle.
185 packet
->clear_use_desktop_shape();
186 if (!decoder_
->DecodePacket(*packet
.get())) {
187 LOG(ERROR
) << "Decoder::DecodePacket() failed.";
192 base::AutoLock
auto_lock(lock_
);
194 // Render the decoded packet and write results to the buffer.
195 // Note that the |updated_region_| maintains the changed regions compared to
196 // previous video frame.
197 decoder_
->RenderFrame(screen_size_
,
198 webrtc::DesktopRect::MakeWH(screen_size_
.width(),
199 screen_size_
.height()),
200 frame_
->data(), frame_
->stride(), &updated_region_
);
203 main_task_runner_
->PostTask(FROM_HERE
, done
);
205 // Check to see if a image pattern matched reply is passed in, and whether
206 // the |expected_rect_| falls within the current frame.
207 if (image_pattern_matched_callback_
.is_null() ||
208 expected_rect_
.right() > frame_
->size().width() ||
209 expected_rect_
.bottom() > frame_
->size().height()) {
213 // Compare the expected image pattern with the corresponding rectangle region
214 // on the current frame.
215 RGBValue accumulating_avg_value
= CalculateAverageColorValue(expected_rect_
);
216 VLOG(2) << accumulating_avg_value
.red
<< " " << accumulating_avg_value
.green
217 << " " << accumulating_avg_value
.blue
;
219 if (ExpectedAverageColorIsMatched(accumulating_avg_value
)) {
220 main_task_runner_
->PostTask(
221 FROM_HERE
, base::ResetAndReturn(&image_pattern_matched_callback_
));
225 void TestVideoRenderer::Core::ExpectAverageColorInRect(
226 const webrtc::DesktopRect
& expected_rect
,
227 uint32_t expected_avg_color
,
228 const base::Closure
& image_pattern_matched_callback
) {
229 DCHECK(thread_checker_
.CalledOnValidThread());
231 expected_rect_
= expected_rect
;
232 expected_avg_color_
= expected_avg_color
;
233 image_pattern_matched_callback_
= image_pattern_matched_callback
;
236 RGBValue
TestVideoRenderer::Core::CalculateAverageColorValue(
237 const webrtc::DesktopRect
& rect
) const {
242 // Loop through pixels that fall within |accumulating_rect_| to obtain the
243 // average color value.
244 for (int y
= rect
.top(); y
< rect
.bottom(); ++y
) {
246 frame_
->data() + (y
* frame_
->stride() +
247 rect
.left() * webrtc::DesktopFrame::kBytesPerPixel
);
249 // Pixels of decoded video frame are presented in ARGB format.
250 for (int x
= 0; x
< rect
.width(); ++x
) {
251 red_sum
+= frame_pos
[2];
252 green_sum
+= frame_pos
[1];
253 blue_sum
+= frame_pos
[0];
258 int area
= rect
.width() * rect
.height();
259 RGBValue
rgb_value(red_sum
/ area
, green_sum
/ area
, blue_sum
/ area
);
263 bool TestVideoRenderer::Core::ExpectedAverageColorIsMatched(
264 const RGBValue
& candidate_avg_value
) const {
265 RGBValue expected_avg_value
= ConvertUint32ToRGBValue(expected_avg_color_
);
266 double error_sum_squares
= 0;
267 double red_error
= expected_avg_value
.red
- candidate_avg_value
.red
;
268 double green_error
= expected_avg_value
.green
- candidate_avg_value
.green
;
269 double blue_error
= expected_avg_value
.blue
- candidate_avg_value
.blue
;
270 error_sum_squares
= red_error
* red_error
+ green_error
* green_error
+
271 blue_error
* blue_error
;
272 error_sum_squares
/= (255.0 * 255.0);
274 return sqrt(error_sum_squares
/ 3) < kMaxColorError
;
277 TestVideoRenderer::TestVideoRenderer()
278 : video_decode_thread_(
279 new base::Thread("TestVideoRendererVideoDecodingThread")),
280 weak_factory_(this) {
281 DCHECK(thread_checker_
.CalledOnValidThread());
283 core_
.reset(new Core());
284 if (!video_decode_thread_
->Start()) {
285 LOG(ERROR
) << "Cannot start TestVideoRenderer";
287 video_decode_task_runner_
= video_decode_thread_
->task_runner();
288 video_decode_task_runner_
->PostTask(FROM_HERE
, base::Bind(&Core::Initialize
,
289 base::Unretained(core_
.get())));
293 TestVideoRenderer::~TestVideoRenderer() {
294 DCHECK(thread_checker_
.CalledOnValidThread());
296 video_decode_task_runner_
->DeleteSoon(FROM_HERE
, core_
.release());
298 // The thread's message loop will run until it runs out of work.
299 video_decode_thread_
->Stop();
302 void TestVideoRenderer::OnSessionConfig(const protocol::SessionConfig
& config
) {
303 DCHECK(thread_checker_
.CalledOnValidThread());
305 VLOG(2) << "TestVideoRenderer::OnSessionConfig() Called";
306 protocol::ChannelConfig::Codec codec
= config
.video_config().codec
;
307 SetCodecForDecoding(codec
);
310 ChromotingStats
* TestVideoRenderer::GetStats() {
311 DCHECK(thread_checker_
.CalledOnValidThread());
313 VLOG(2) << "TestVideoRenderer::GetStats() Called";
317 protocol::VideoStub
* TestVideoRenderer::GetVideoStub() {
318 DCHECK(thread_checker_
.CalledOnValidThread());
320 VLOG(2) << "TestVideoRenderer::GetVideoStub() Called";
324 void TestVideoRenderer::ProcessVideoPacket(scoped_ptr
<VideoPacket
> video_packet
,
325 const base::Closure
& done
) {
326 DCHECK(thread_checker_
.CalledOnValidThread());
327 DCHECK(video_decode_task_runner_
) << "Failed to start video decode thread";
329 if (video_packet
->has_data() && video_packet
->data().size() != 0) {
330 VLOG(2) << "process video packet is called!";
332 // Post video process task to the video decode thread.
333 base::Closure process_video_task
= base::Bind(
334 &TestVideoRenderer::Core::ProcessVideoPacket
,
335 base::Unretained(core_
.get()), base::Passed(&video_packet
), done
);
336 video_decode_task_runner_
->PostTask(FROM_HERE
, process_video_task
);
338 // Log at a high verbosity level as we receive empty packets frequently and
339 // they can clutter up the debug output if the level is set too low.
340 VLOG(3) << "Empty Video Packet received.";
345 void TestVideoRenderer::SetCodecForDecoding(
346 const protocol::ChannelConfig::Codec codec
) {
347 DCHECK(thread_checker_
.CalledOnValidThread());
349 VLOG(2) << "TestVideoRenderer::SetDecoder() Called";
350 video_decode_task_runner_
->PostTask(
351 FROM_HERE
, base::Bind(&Core::SetCodecForDecoding
,
352 base::Unretained(core_
.get()),
356 scoped_ptr
<webrtc::DesktopFrame
> TestVideoRenderer::GetCurrentFrameForTest()
358 DCHECK(thread_checker_
.CalledOnValidThread());
360 return core_
->GetCurrentFrameForTest();
363 void TestVideoRenderer::ExpectAverageColorInRect(
364 const webrtc::DesktopRect
& expected_rect
,
365 uint32_t expected_avg_color
,
366 const base::Closure
& image_pattern_matched_callback
) {
367 DCHECK(thread_checker_
.CalledOnValidThread());
368 DCHECK(!expected_rect
.is_empty()) << "Expected rect cannot be empty";
370 DVLOG(2) << "TestVideoRenderer::SetImagePatternAndMatchedCallback() Called";
371 video_decode_task_runner_
->PostTask(
373 base::Bind(&Core::ExpectAverageColorInRect
, base::Unretained(core_
.get()),
374 expected_rect
, expected_avg_color
,
375 image_pattern_matched_callback
));
379 } // namespace remoting