Upstreaming browser/ui/uikit_ui_util from iOS.
[chromium-blink-merge.git] / remoting / test / test_video_renderer.cc
blobefc92746900bb5979c5da42b886fdeeac6653aef
1 // Copyright 2015 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "remoting/test/test_video_renderer.h"
7 #include "base/bind.h"
8 #include "base/callback_helpers.h"
9 #include "base/logging.h"
10 #include "base/synchronization/lock.h"
11 #include "base/thread_task_runner_handle.h"
12 #include "base/threading/thread.h"
13 #include "remoting/codec/video_decoder.h"
14 #include "remoting/codec/video_decoder_verbatim.h"
15 #include "remoting/codec/video_decoder_vpx.h"
16 #include "remoting/proto/video.pb.h"
17 #include "remoting/test/rgb_value.h"
18 #include "remoting/test/video_frame_writer.h"
19 #include "third_party/webrtc/modules/desktop_capture/desktop_frame.h"
21 namespace {
23 // Used to account for frame resizing and lossy encoding error in percentage.
24 // The average color usually only varies by 1 on each channel, so 0.01 is large
25 // enough to allow variations while not being flaky for false negative cases.
26 const double kMaxColorError = 0.01;
28 } // namespace
30 namespace remoting {
31 namespace test {
33 // Implements video decoding functionality.
34 class TestVideoRenderer::Core {
35 public:
36 Core();
37 ~Core();
39 // Initializes the internal structures of the class.
40 void Initialize();
42 // Used to decode video packets.
43 void ProcessVideoPacket(scoped_ptr<VideoPacket> packet,
44 const base::Closure& done);
46 // Initialize a decoder to decode video packets.
47 void SetCodecForDecoding(const protocol::ChannelConfig::Codec codec);
49 // Returns a copy of the current frame.
50 scoped_ptr<webrtc::DesktopFrame> GetCurrentFrameForTest() const;
52 // Set expected image pattern for comparison and the callback will be called
53 // when the pattern is matched.
54 void ExpectAverageColorInRect(
55 const webrtc::DesktopRect& expected_rect,
56 const RGBValue& expected_avg_color,
57 const base::Closure& image_pattern_matched_callback);
59 // Turn on/off saving video frames to disk.
60 void save_frame_data_to_disk(bool save_frame_data_to_disk) {
61 save_frame_data_to_disk_ = save_frame_data_to_disk;
64 private:
65 // Returns average color of pixels fall within |rect| on the current frame.
66 RGBValue CalculateAverageColorValue(const webrtc::DesktopRect& rect) const;
68 // Compares |candidate_avg_value| to |expected_avg_color_|.
69 // Returns true if the root mean square of the errors in the R, G and B
70 // components does not exceed a given limit.
71 bool ExpectedAverageColorIsMatched(const RGBValue& candidate_avg_value) const;
73 // Used to ensure Core methods are called on the same thread.
74 base::ThreadChecker thread_checker_;
76 // Used to decode video packets.
77 scoped_ptr<VideoDecoder> decoder_;
79 // Updated region of the current desktop frame compared to previous one.
80 webrtc::DesktopRegion updated_region_;
82 // Screen size of the remote host.
83 webrtc::DesktopSize screen_size_;
85 // Used to post tasks back to main thread.
86 scoped_refptr<base::SingleThreadTaskRunner> main_task_runner_;
88 // Used to store decoded video frame.
89 scoped_ptr<webrtc::DesktopFrame> frame_;
91 // Protects access to |frame_|.
92 mutable base::Lock lock_;
94 // Used to store the expected image pattern.
95 webrtc::DesktopRect expected_rect_;
96 RGBValue expected_avg_color_;
98 // Used to store the callback when expected pattern is matched.
99 base::Closure image_pattern_matched_callback_;
101 // Used to identify whether saving frame frame data to disk.
102 bool save_frame_data_to_disk_;
104 // Used to dump video frames and generate image patterns.
105 VideoFrameWriter video_frame_writer;
107 DISALLOW_COPY_AND_ASSIGN(Core);
110 TestVideoRenderer::Core::Core()
111 : main_task_runner_(base::ThreadTaskRunnerHandle::Get()),
112 save_frame_data_to_disk_(false) {
113 thread_checker_.DetachFromThread();
116 TestVideoRenderer::Core::~Core() {
117 DCHECK(thread_checker_.CalledOnValidThread());
120 void TestVideoRenderer::Core::Initialize() {
121 DCHECK(thread_checker_.CalledOnValidThread());
124 void TestVideoRenderer::Core::SetCodecForDecoding(
125 const protocol::ChannelConfig::Codec codec) {
126 DCHECK(thread_checker_.CalledOnValidThread());
128 if (decoder_) {
129 LOG(WARNING) << "Decoder is set more than once";
132 switch (codec) {
133 case protocol::ChannelConfig::CODEC_VP8: {
134 VLOG(1) << "Test Video Renderer will use VP8 decoder";
135 decoder_ = VideoDecoderVpx::CreateForVP8();
136 break;
138 case protocol::ChannelConfig::CODEC_VP9: {
139 VLOG(1) << "Test Video Renderer will use VP9 decoder";
140 decoder_ = VideoDecoderVpx::CreateForVP9();
141 break;
143 case protocol::ChannelConfig::CODEC_VERBATIM: {
144 VLOG(1) << "Test Video Renderer will use VERBATIM decoder";
145 decoder_.reset(new VideoDecoderVerbatim());
146 break;
148 default: {
149 NOTREACHED() << "Unsupported codec: " << codec;
154 scoped_ptr<webrtc::DesktopFrame>
155 TestVideoRenderer::Core::GetCurrentFrameForTest() const {
156 base::AutoLock auto_lock(lock_);
157 DCHECK(frame_);
158 return make_scoped_ptr(webrtc::BasicDesktopFrame::CopyOf(*frame_.get()));
161 void TestVideoRenderer::Core::ProcessVideoPacket(
162 scoped_ptr<VideoPacket> packet, const base::Closure& done) {
163 DCHECK(thread_checker_.CalledOnValidThread());
164 DCHECK(decoder_);
165 DCHECK(packet);
167 VLOG(2) << "TestVideoRenderer::Core::ProcessVideoPacket() Called";
169 // Screen size is attached on the first packet as well as when the
170 // host screen is resized.
171 if (packet->format().has_screen_width() &&
172 packet->format().has_screen_height()) {
173 webrtc::DesktopSize source_size(packet->format().screen_width(),
174 packet->format().screen_height());
175 if (!screen_size_.equals(source_size)) {
176 screen_size_ = source_size;
177 decoder_->Initialize(screen_size_);
178 frame_.reset(new webrtc::BasicDesktopFrame(screen_size_));
182 // To make life easier, assume that the desktop shape is a single rectangle.
183 packet->clear_use_desktop_shape();
184 if (!decoder_->DecodePacket(*packet.get())) {
185 LOG(ERROR) << "Decoder::DecodePacket() failed.";
186 return;
190 base::AutoLock auto_lock(lock_);
192 // Render the decoded packet and write results to the buffer.
193 // Note that the |updated_region_| maintains the changed regions compared to
194 // previous video frame.
195 decoder_->RenderFrame(screen_size_,
196 webrtc::DesktopRect::MakeWH(screen_size_.width(),
197 screen_size_.height()),
198 frame_->data(), frame_->stride(), &updated_region_);
201 main_task_runner_->PostTask(FROM_HERE, done);
203 if (save_frame_data_to_disk_) {
204 scoped_ptr<webrtc::DesktopFrame> frame(
205 webrtc::BasicDesktopFrame::CopyOf(*frame_.get()));
206 video_frame_writer.HighlightRectInFrame(frame.get(), expected_rect_);
207 video_frame_writer.WriteFrameToDefaultPath(*frame.get());
210 // Check to see if a image pattern matched reply is passed in, and whether
211 // the |expected_rect_| falls within the current frame.
212 if (image_pattern_matched_callback_.is_null() ||
213 expected_rect_.right() > frame_->size().width() ||
214 expected_rect_.bottom() > frame_->size().height()) {
215 return;
217 // Compare the expected image pattern with the corresponding rectangle
218 // region
219 // on the current frame.
220 RGBValue accumulating_avg_value = CalculateAverageColorValue(expected_rect_);
221 if (ExpectedAverageColorIsMatched(accumulating_avg_value)) {
222 main_task_runner_->PostTask(
223 FROM_HERE, base::ResetAndReturn(&image_pattern_matched_callback_));
227 void TestVideoRenderer::Core::ExpectAverageColorInRect(
228 const webrtc::DesktopRect& expected_rect,
229 const RGBValue& expected_avg_color,
230 const base::Closure& image_pattern_matched_callback) {
231 DCHECK(thread_checker_.CalledOnValidThread());
233 expected_rect_ = expected_rect;
234 expected_avg_color_ = expected_avg_color;
235 image_pattern_matched_callback_ = image_pattern_matched_callback;
238 RGBValue TestVideoRenderer::Core::CalculateAverageColorValue(
239 const webrtc::DesktopRect& rect) const {
240 int red_sum = 0;
241 int green_sum = 0;
242 int blue_sum = 0;
244 // Loop through pixels that fall within |accumulating_rect_| to obtain the
245 // average color value.
246 for (int y = rect.top(); y < rect.bottom(); ++y) {
247 uint8_t* frame_pos =
248 frame_->data() + (y * frame_->stride() +
249 rect.left() * webrtc::DesktopFrame::kBytesPerPixel);
251 // Pixels of decoded video frame are presented in ARGB format.
252 for (int x = 0; x < rect.width(); ++x) {
253 red_sum += frame_pos[2];
254 green_sum += frame_pos[1];
255 blue_sum += frame_pos[0];
256 frame_pos += 4;
260 int area = rect.width() * rect.height();
261 RGBValue rgb_value(red_sum / area, green_sum / area, blue_sum / area);
262 return rgb_value;
265 bool TestVideoRenderer::Core::ExpectedAverageColorIsMatched(
266 const RGBValue& candidate_avg_value) const {
267 double error_sum_squares = 0;
268 double red_error = expected_avg_color_.red - candidate_avg_value.red;
269 double green_error = expected_avg_color_.green - candidate_avg_value.green;
270 double blue_error = expected_avg_color_.blue - candidate_avg_value.blue;
271 error_sum_squares = red_error * red_error + green_error * green_error +
272 blue_error * blue_error;
273 error_sum_squares /= (255.0 * 255.0);
275 return sqrt(error_sum_squares / 3) < kMaxColorError;
278 TestVideoRenderer::TestVideoRenderer()
279 : video_decode_thread_(
280 new base::Thread("TestVideoRendererVideoDecodingThread")),
281 weak_factory_(this) {
282 DCHECK(thread_checker_.CalledOnValidThread());
284 core_.reset(new Core());
285 if (!video_decode_thread_->Start()) {
286 LOG(ERROR) << "Cannot start TestVideoRenderer";
287 } else {
288 video_decode_task_runner_ = video_decode_thread_->task_runner();
289 video_decode_task_runner_->PostTask(FROM_HERE, base::Bind(&Core::Initialize,
290 base::Unretained(core_.get())));
294 TestVideoRenderer::~TestVideoRenderer() {
295 DCHECK(thread_checker_.CalledOnValidThread());
297 video_decode_task_runner_->DeleteSoon(FROM_HERE, core_.release());
299 // The thread's message loop will run until it runs out of work.
300 video_decode_thread_->Stop();
303 void TestVideoRenderer::OnSessionConfig(const protocol::SessionConfig& config) {
304 DCHECK(thread_checker_.CalledOnValidThread());
306 VLOG(2) << "TestVideoRenderer::OnSessionConfig() Called";
307 protocol::ChannelConfig::Codec codec = config.video_config().codec;
308 SetCodecForDecoding(codec);
311 ChromotingStats* TestVideoRenderer::GetStats() {
312 DCHECK(thread_checker_.CalledOnValidThread());
314 VLOG(2) << "TestVideoRenderer::GetStats() Called";
315 return nullptr;
318 protocol::VideoStub* TestVideoRenderer::GetVideoStub() {
319 DCHECK(thread_checker_.CalledOnValidThread());
321 VLOG(2) << "TestVideoRenderer::GetVideoStub() Called";
322 return this;
325 void TestVideoRenderer::ProcessVideoPacket(scoped_ptr<VideoPacket> video_packet,
326 const base::Closure& done) {
327 DCHECK(thread_checker_.CalledOnValidThread());
328 DCHECK(video_decode_task_runner_) << "Failed to start video decode thread";
330 if (video_packet->has_data() && video_packet->data().size() != 0) {
331 VLOG(2) << "process video packet is called!";
333 // Post video process task to the video decode thread.
334 base::Closure process_video_task = base::Bind(
335 &TestVideoRenderer::Core::ProcessVideoPacket,
336 base::Unretained(core_.get()), base::Passed(&video_packet), done);
337 video_decode_task_runner_->PostTask(FROM_HERE, process_video_task);
338 } else {
339 // Log at a high verbosity level as we receive empty packets frequently and
340 // they can clutter up the debug output if the level is set too low.
341 VLOG(3) << "Empty Video Packet received.";
342 done.Run();
346 void TestVideoRenderer::SetCodecForDecoding(
347 const protocol::ChannelConfig::Codec codec) {
348 DCHECK(thread_checker_.CalledOnValidThread());
350 VLOG(2) << "TestVideoRenderer::SetDecoder() Called";
351 video_decode_task_runner_->PostTask(
352 FROM_HERE, base::Bind(&Core::SetCodecForDecoding,
353 base::Unretained(core_.get()),
354 codec));
357 scoped_ptr<webrtc::DesktopFrame> TestVideoRenderer::GetCurrentFrameForTest()
358 const {
359 DCHECK(thread_checker_.CalledOnValidThread());
361 return core_->GetCurrentFrameForTest();
364 void TestVideoRenderer::ExpectAverageColorInRect(
365 const webrtc::DesktopRect& expected_rect,
366 const RGBValue& expected_avg_color,
367 const base::Closure& image_pattern_matched_callback) {
368 DCHECK(thread_checker_.CalledOnValidThread());
369 DCHECK(!expected_rect.is_empty()) << "Expected rect cannot be empty";
371 DVLOG(2) << "TestVideoRenderer::SetImagePatternAndMatchedCallback() Called";
372 video_decode_task_runner_->PostTask(
373 FROM_HERE,
374 base::Bind(&Core::ExpectAverageColorInRect, base::Unretained(core_.get()),
375 expected_rect, expected_avg_color,
376 image_pattern_matched_callback));
379 void TestVideoRenderer::SaveFrameDataToDisk(bool save_frame_data_to_disk) {
380 DCHECK(thread_checker_.CalledOnValidThread());
382 video_decode_task_runner_->PostTask(
383 FROM_HERE,
384 base::Bind(&Core::save_frame_data_to_disk, base::Unretained(core_.get()),
385 save_frame_data_to_disk));
388 } // namespace test
389 } // namespace remoting