Add new certificateProvider extension API.
[chromium-blink-merge.git] / chrome / browser / extensions / api / cast_streaming / cast_streaming_apitest.cc
blob3e451413c671d367ac059f4b3efdcecda5ca8707
1 // Copyright 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include <algorithm>
6 #include <cmath>
7 #include <vector>
9 #include "base/callback_helpers.h"
10 #include "base/command_line.h"
11 #include "base/memory/scoped_ptr.h"
12 #include "base/run_loop.h"
13 #include "base/strings/string_number_conversions.h"
14 #include "base/strings/stringprintf.h"
15 #include "chrome/browser/extensions/extension_apitest.h"
16 #include "chrome/common/chrome_switches.h"
17 #include "content/public/common/content_switches.h"
18 #include "extensions/common/switches.h"
19 #include "media/base/bind_to_current_loop.h"
20 #include "media/base/video_frame.h"
21 #include "media/cast/cast_config.h"
22 #include "media/cast/cast_environment.h"
23 #include "media/cast/test/utility/audio_utility.h"
24 #include "media/cast/test/utility/default_config.h"
25 #include "media/cast/test/utility/in_process_receiver.h"
26 #include "media/cast/test/utility/net_utility.h"
27 #include "media/cast/test/utility/standalone_cast_environment.h"
28 #include "net/base/net_errors.h"
29 #include "net/base/net_util.h"
30 #include "net/base/rand_callback.h"
31 #include "net/udp/udp_server_socket.h"
32 #include "testing/gtest/include/gtest/gtest.h"
34 using media::cast::test::GetFreeLocalPort;
36 namespace extensions {
38 class CastStreamingApiTest : public ExtensionApiTest {
39 public:
40 void SetUpCommandLine(base::CommandLine* command_line) override {
41 ExtensionApiTest::SetUpCommandLine(command_line);
42 command_line->AppendSwitchASCII(
43 extensions::switches::kWhitelistedExtensionID,
44 "ddchlicdkolnonkihahngkmmmjnjlkkf");
45 command_line->AppendSwitchASCII(::switches::kWindowSize, "300,300");
49 // Test running the test extension for Cast Mirroring API.
50 IN_PROC_BROWSER_TEST_F(CastStreamingApiTest, Basics) {
51 ASSERT_TRUE(RunExtensionSubtest("cast_streaming", "basics.html")) << message_;
54 IN_PROC_BROWSER_TEST_F(CastStreamingApiTest, Stats) {
55 ASSERT_TRUE(RunExtensionSubtest("cast_streaming", "stats.html")) << message_;
58 IN_PROC_BROWSER_TEST_F(CastStreamingApiTest, BadLogging) {
59 ASSERT_TRUE(RunExtensionSubtest("cast_streaming", "bad_logging.html"))
60 << message_;
63 IN_PROC_BROWSER_TEST_F(CastStreamingApiTest, DestinationNotSet) {
64 ASSERT_TRUE(RunExtensionSubtest("cast_streaming", "destination_not_set.html"))
65 << message_;
68 IN_PROC_BROWSER_TEST_F(CastStreamingApiTest, StopNoStart) {
69 ASSERT_TRUE(RunExtensionSubtest("cast_streaming", "stop_no_start.html"))
70 << message_;
73 IN_PROC_BROWSER_TEST_F(CastStreamingApiTest, NullStream) {
74 ASSERT_TRUE(RunExtensionSubtest("cast_streaming", "null_stream.html"))
75 << message_;
78 namespace {
80 struct YUVColor {
81 int y;
82 int u;
83 int v;
85 YUVColor() : y(0), u(0), v(0) {}
86 YUVColor(int y_val, int u_val, int v_val) : y(y_val), u(u_val), v(v_val) {}
90 media::cast::FrameReceiverConfig WithFakeAesKeyAndIv(
91 media::cast::FrameReceiverConfig config) {
92 config.aes_key = "0123456789abcdef";
93 config.aes_iv_mask = "fedcba9876543210";
94 return config;
97 // An in-process Cast receiver that examines the audio/video frames being
98 // received for expected colors and tones. Used in
99 // CastStreamingApiTest.EndToEnd, below.
100 class TestPatternReceiver : public media::cast::InProcessReceiver {
101 public:
102 explicit TestPatternReceiver(
103 const scoped_refptr<media::cast::CastEnvironment>& cast_environment,
104 const net::IPEndPoint& local_end_point)
105 : InProcessReceiver(
106 cast_environment,
107 local_end_point,
108 net::IPEndPoint(),
109 WithFakeAesKeyAndIv(media::cast::GetDefaultAudioReceiverConfig()),
110 WithFakeAesKeyAndIv(media::cast::GetDefaultVideoReceiverConfig())) {
113 ~TestPatternReceiver() override {}
115 void AddExpectedTone(int tone_frequency) {
116 expected_tones_.push_back(tone_frequency);
119 void AddExpectedColor(const YUVColor& yuv_color) {
120 expected_yuv_colors_.push_back(yuv_color);
123 // Blocks the caller until all expected tones and colors have been observed.
124 void WaitForExpectedTonesAndColors() {
125 base::RunLoop run_loop;
126 cast_env()->PostTask(
127 media::cast::CastEnvironment::MAIN,
128 FROM_HERE,
129 base::Bind(&TestPatternReceiver::NotifyOnceObservedAllTonesAndColors,
130 base::Unretained(this),
131 media::BindToCurrentLoop(run_loop.QuitClosure())));
132 run_loop.Run();
135 private:
136 void NotifyOnceObservedAllTonesAndColors(const base::Closure& done_callback) {
137 DCHECK(cast_env()->CurrentlyOn(media::cast::CastEnvironment::MAIN));
138 done_callback_ = done_callback;
139 MaybeRunDoneCallback();
142 void MaybeRunDoneCallback() {
143 DCHECK(cast_env()->CurrentlyOn(media::cast::CastEnvironment::MAIN));
144 if (done_callback_.is_null())
145 return;
146 if (expected_tones_.empty() && expected_yuv_colors_.empty()) {
147 base::ResetAndReturn(&done_callback_).Run();
148 } else {
149 LOG(INFO) << "Waiting to encounter " << expected_tones_.size()
150 << " more tone(s) and " << expected_yuv_colors_.size()
151 << " more color(s).";
155 // Invoked by InProcessReceiver for each received audio frame.
156 void OnAudioFrame(scoped_ptr<media::AudioBus> audio_frame,
157 const base::TimeTicks& playout_time,
158 bool is_continuous) override {
159 DCHECK(cast_env()->CurrentlyOn(media::cast::CastEnvironment::MAIN));
161 if (audio_frame->frames() <= 0) {
162 NOTREACHED() << "OnAudioFrame called with no samples?!?";
163 return;
166 if (done_callback_.is_null() || expected_tones_.empty())
167 return; // No need to waste CPU doing analysis on the signal.
169 // Assume the audio signal is a single sine wave (it can have some
170 // low-amplitude noise). Count zero crossings, and extrapolate the
171 // frequency of the sine wave in |audio_frame|.
172 int crossings = 0;
173 for (int ch = 0; ch < audio_frame->channels(); ++ch) {
174 crossings += media::cast::CountZeroCrossings(audio_frame->channel(ch),
175 audio_frame->frames());
177 crossings /= audio_frame->channels(); // Take the average.
178 const float seconds_per_frame =
179 audio_frame->frames() / static_cast<float>(audio_config().rtp_timebase);
180 const float frequency = crossings / seconds_per_frame / 2.0f;
181 VLOG(1) << "Current audio tone frequency: " << frequency;
183 const int kTargetWindowHz = 20;
184 for (std::vector<int>::iterator it = expected_tones_.begin();
185 it != expected_tones_.end(); ++it) {
186 if (abs(static_cast<int>(frequency) - *it) < kTargetWindowHz) {
187 LOG(INFO) << "Heard tone at frequency " << *it << " Hz.";
188 expected_tones_.erase(it);
189 MaybeRunDoneCallback();
190 break;
195 void OnVideoFrame(const scoped_refptr<media::VideoFrame>& video_frame,
196 const base::TimeTicks& playout_time,
197 bool is_continuous) override {
198 DCHECK(cast_env()->CurrentlyOn(media::cast::CastEnvironment::MAIN));
200 CHECK(video_frame->format() == media::PIXEL_FORMAT_YV12 ||
201 video_frame->format() == media::PIXEL_FORMAT_I420 ||
202 video_frame->format() == media::PIXEL_FORMAT_YV12A);
204 if (done_callback_.is_null() || expected_yuv_colors_.empty())
205 return; // No need to waste CPU doing analysis on the frame.
207 // Take the median value of each plane because the test image will contain a
208 // letterboxed content region of mostly a solid color plus a small piece of
209 // "something" that's animating to keep the tab capture pipeline generating
210 // new frames.
211 const gfx::Rect region = FindLetterboxedContentRegion(video_frame.get());
212 YUVColor current_color;
213 current_color.y = ComputeMedianIntensityInRegionInPlane(
214 region,
215 video_frame->stride(media::VideoFrame::kYPlane),
216 video_frame->data(media::VideoFrame::kYPlane));
217 current_color.u = ComputeMedianIntensityInRegionInPlane(
218 gfx::ScaleToEnclosedRect(region, 0.5f),
219 video_frame->stride(media::VideoFrame::kUPlane),
220 video_frame->data(media::VideoFrame::kUPlane));
221 current_color.v = ComputeMedianIntensityInRegionInPlane(
222 gfx::ScaleToEnclosedRect(region, 0.5f),
223 video_frame->stride(media::VideoFrame::kVPlane),
224 video_frame->data(media::VideoFrame::kVPlane));
225 VLOG(1) << "Current video color: yuv(" << current_color.y << ", "
226 << current_color.u << ", " << current_color.v << ')';
228 const int kTargetWindow = 10;
229 for (std::vector<YUVColor>::iterator it = expected_yuv_colors_.begin();
230 it != expected_yuv_colors_.end(); ++it) {
231 if (abs(current_color.y - it->y) < kTargetWindow &&
232 abs(current_color.u - it->u) < kTargetWindow &&
233 abs(current_color.v - it->v) < kTargetWindow) {
234 LOG(INFO) << "Saw color yuv(" << it->y << ", " << it->u << ", "
235 << it->v << ").";
236 expected_yuv_colors_.erase(it);
237 MaybeRunDoneCallback();
238 break;
243 // Return the region that excludes the black letterboxing borders surrounding
244 // the content within |frame|, if any.
245 static gfx::Rect FindLetterboxedContentRegion(
246 const media::VideoFrame* frame) {
247 const int kNonBlackIntensityThreshold = 20; // 16 plus some fuzz.
248 const int width = frame->row_bytes(media::VideoFrame::kYPlane);
249 const int height = frame->rows(media::VideoFrame::kYPlane);
250 const int stride = frame->stride(media::VideoFrame::kYPlane);
252 gfx::Rect result;
254 // Scan from the bottom-right until the first non-black pixel is
255 // encountered.
256 for (int y = height - 1; y >= 0; --y) {
257 const uint8* const start =
258 frame->data(media::VideoFrame::kYPlane) + y * stride;
259 const uint8* const end = start + width;
260 for (const uint8* p = end - 1; p >= start; --p) {
261 if (*p > kNonBlackIntensityThreshold) {
262 result.set_width(p - start + 1);
263 result.set_height(y + 1);
264 y = 0; // Discontinue outer loop.
265 break;
270 // Scan from the upper-left until the first non-black pixel is encountered.
271 for (int y = 0; y < result.height(); ++y) {
272 const uint8* const start =
273 frame->data(media::VideoFrame::kYPlane) + y * stride;
274 const uint8* const end = start + result.width();
275 for (const uint8* p = start; p < end; ++p) {
276 if (*p > kNonBlackIntensityThreshold) {
277 result.set_x(p - start);
278 result.set_width(result.width() - result.x());
279 result.set_y(y);
280 result.set_height(result.height() - result.y());
281 y = result.height(); // Discontinue outer loop.
282 break;
287 return result;
290 static uint8 ComputeMedianIntensityInRegionInPlane(const gfx::Rect& region,
291 int stride,
292 const uint8* data) {
293 if (region.IsEmpty())
294 return 0;
295 const size_t num_values = region.size().GetArea();
296 scoped_ptr<uint8[]> values(new uint8[num_values]);
297 for (int y = 0; y < region.height(); ++y) {
298 memcpy(values.get() + y * region.width(),
299 data + (region.y() + y) * stride + region.x(),
300 region.width());
302 const size_t middle_idx = num_values / 2;
303 std::nth_element(values.get(),
304 values.get() + middle_idx,
305 values.get() + num_values);
306 return values[middle_idx];
309 std::vector<int> expected_tones_;
310 std::vector<YUVColor> expected_yuv_colors_;
311 base::Closure done_callback_;
313 DISALLOW_COPY_AND_ASSIGN(TestPatternReceiver);
316 } // namespace
318 class CastStreamingApiTestWithPixelOutput : public CastStreamingApiTest {
319 void SetUp() override {
320 EnablePixelOutput();
321 CastStreamingApiTest::SetUp();
324 void SetUpCommandLine(base::CommandLine* command_line) override {
325 command_line->AppendSwitchASCII(::switches::kWindowSize, "128,128");
326 CastStreamingApiTest::SetUpCommandLine(command_line);
330 // Tests the Cast streaming API and its basic functionality end-to-end. An
331 // extension subtest is run to generate test content, capture that content, and
332 // use the API to send it out. At the same time, this test launches an
333 // in-process Cast receiver, listening on a localhost UDP socket, to receive the
334 // content and check whether it matches expectations.
336 // TODO(miu): Now that this test has been long-stable on Release build bots, it
337 // should be enabled for the Debug build bots. http://crbug.com/396413
338 #if defined(NDEBUG)
339 #define MAYBE_EndToEnd EndToEnd
340 #else
341 #define MAYBE_EndToEnd DISABLED_EndToEnd
342 #endif
343 IN_PROC_BROWSER_TEST_F(CastStreamingApiTestWithPixelOutput, MAYBE_EndToEnd) {
344 scoped_ptr<net::UDPServerSocket> receive_socket(
345 new net::UDPServerSocket(NULL, net::NetLog::Source()));
346 receive_socket->AllowAddressReuse();
347 ASSERT_EQ(net::OK, receive_socket->Listen(GetFreeLocalPort()));
348 net::IPEndPoint receiver_end_point;
349 ASSERT_EQ(net::OK, receive_socket->GetLocalAddress(&receiver_end_point));
350 receive_socket.reset();
352 // Start the in-process receiver that examines audio/video for the expected
353 // test patterns.
354 const scoped_refptr<media::cast::StandaloneCastEnvironment> cast_environment(
355 new media::cast::StandaloneCastEnvironment());
356 TestPatternReceiver* const receiver =
357 new TestPatternReceiver(cast_environment, receiver_end_point);
359 // Launch the page that: 1) renders the source content; 2) uses the
360 // chrome.tabCapture and chrome.cast.streaming APIs to capture its content and
361 // stream using Cast; and 3) calls chrome.test.succeed() once it is
362 // operational.
363 const std::string page_url = base::StringPrintf(
364 "end_to_end_sender.html?port=%d&aesKey=%s&aesIvMask=%s",
365 receiver_end_point.port(),
366 base::HexEncode(receiver->audio_config().aes_key.data(),
367 receiver->audio_config().aes_key.size()).c_str(),
368 base::HexEncode(receiver->audio_config().aes_iv_mask.data(),
369 receiver->audio_config().aes_iv_mask.size()).c_str());
370 ASSERT_TRUE(RunExtensionSubtest("cast_streaming", page_url)) << message_;
372 // Examine the Cast receiver for expected audio/video test patterns. The
373 // colors and tones specified here must match those in end_to_end_sender.js.
374 // Note that we do not check that the color and tone are received
375 // simultaneously since A/V sync should be measured in perf tests.
376 receiver->AddExpectedTone(200 /* Hz */);
377 receiver->AddExpectedTone(500 /* Hz */);
378 receiver->AddExpectedTone(1800 /* Hz */);
379 receiver->AddExpectedColor(YUVColor(82, 90, 240)); // rgb(255, 0, 0)
380 receiver->AddExpectedColor(YUVColor(145, 54, 34)); // rgb(0, 255, 0)
381 receiver->AddExpectedColor(YUVColor(41, 240, 110)); // rgb(0, 0, 255)
382 receiver->Start();
383 receiver->WaitForExpectedTonesAndColors();
384 receiver->Stop();
386 delete receiver;
387 cast_environment->Shutdown();
390 IN_PROC_BROWSER_TEST_F(CastStreamingApiTestWithPixelOutput, RtpStreamError) {
391 ASSERT_TRUE(RunExtensionSubtest("cast_streaming", "rtp_stream_error.html"));
394 } // namespace extensions