Pin Chrome's shortcut to the Win10 Start menu on install and OS upgrade.
[chromium-blink-merge.git] / chrome / browser / extensions / api / cast_streaming / performance_test.cc
blob13c58b6855a895799e52ecdf8d84c9979332a538
1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include <map>
6 #include <vector>
8 #include "base/basictypes.h"
9 #include "base/command_line.h"
10 #include "base/strings/stringprintf.h"
11 #include "base/test/trace_event_analyzer.h"
12 #include "base/time/default_tick_clock.h"
13 #include "base/win/windows_version.h"
14 #include "chrome/browser/extensions/extension_apitest.h"
15 #include "chrome/browser/extensions/extension_service.h"
16 #include "chrome/browser/extensions/tab_helper.h"
17 #include "chrome/browser/profiles/profile.h"
18 #include "chrome/browser/ui/exclusive_access/fullscreen_controller.h"
19 #include "chrome/common/chrome_switches.h"
20 #include "chrome/common/chrome_version_info.h"
21 #include "chrome/test/base/test_launcher_utils.h"
22 #include "chrome/test/base/test_switches.h"
23 #include "chrome/test/base/tracing.h"
24 #include "content/public/browser/render_process_host.h"
25 #include "content/public/browser/render_view_host.h"
26 #include "content/public/common/content_switches.h"
27 #include "extensions/common/feature_switch.h"
28 #include "extensions/common/features/base_feature_provider.h"
29 #include "extensions/common/features/complex_feature.h"
30 #include "extensions/common/features/feature.h"
31 #include "extensions/common/features/simple_feature.h"
32 #include "extensions/common/switches.h"
33 #include "extensions/test/extension_test_message_listener.h"
34 #include "media/base/audio_bus.h"
35 #include "media/base/video_frame.h"
36 #include "media/cast/cast_config.h"
37 #include "media/cast/cast_environment.h"
38 #include "media/cast/test/utility/audio_utility.h"
39 #include "media/cast/test/utility/barcode.h"
40 #include "media/cast/test/utility/default_config.h"
41 #include "media/cast/test/utility/in_process_receiver.h"
42 #include "media/cast/test/utility/standalone_cast_environment.h"
43 #include "media/cast/test/utility/udp_proxy.h"
44 #include "net/base/ip_endpoint.h"
45 #include "net/base/net_errors.h"
46 #include "net/base/net_util.h"
47 #include "net/base/rand_callback.h"
48 #include "net/udp/udp_server_socket.h"
49 #include "testing/gtest/include/gtest/gtest.h"
50 #include "testing/perf/perf_test.h"
51 #include "ui/compositor/compositor_switches.h"
52 #include "ui/gl/gl_switches.h"
54 namespace {
56 const char kExtensionId[] = "ddchlicdkolnonkihahngkmmmjnjlkkf";
58 // Skip a few events from the beginning.
59 static const size_t kSkipEvents = 3;
61 enum TestFlags {
62 kUseGpu = 1 << 0, // Only execute test if --enable-gpu was given
63 // on the command line. This is required for
64 // tests that run on GPU.
65 kDisableVsync = 1 << 1, // Do not limit framerate to vertical refresh.
66 // when on GPU, nor to 60hz when not on GPU.
67 kSmallWindow = 1 << 2, // 1 = 800x600, 0 = 2000x1000
68 k24fps = 1 << 3, // use 24 fps video
69 k30fps = 1 << 4, // use 30 fps video
70 k60fps = 1 << 5, // use 60 fps video
71 kProxyWifi = 1 << 6, // Run UDP through UDPProxy wifi profile
72 kProxyBad = 1 << 7, // Run UDP through UDPProxy bad profile
73 kSlowClock = 1 << 8, // Receiver clock is 10 seconds slow
74 kFastClock = 1 << 9, // Receiver clock is 10 seconds fast
77 class SkewedTickClock : public base::DefaultTickClock {
78 public:
79 explicit SkewedTickClock(const base::TimeDelta& delta) : delta_(delta) {
81 base::TimeTicks NowTicks() override {
82 return DefaultTickClock::NowTicks() + delta_;
84 private:
85 base::TimeDelta delta_;
88 class SkewedCastEnvironment : public media::cast::StandaloneCastEnvironment {
89 public:
90 explicit SkewedCastEnvironment(const base::TimeDelta& delta) :
91 StandaloneCastEnvironment() {
92 clock_.reset(new SkewedTickClock(delta));
95 protected:
96 ~SkewedCastEnvironment() override {}
99 // We log one of these for each call to OnAudioFrame/OnVideoFrame.
100 struct TimeData {
101 TimeData(uint16 frame_no_, base::TimeTicks render_time_) :
102 frame_no(frame_no_),
103 render_time(render_time_) {
105 // The unit here is video frames, for audio data there can be duplicates.
106 // This was decoded from the actual audio/video data.
107 uint16 frame_no;
108 // This is when we should play this data, according to the sender.
109 base::TimeTicks render_time;
112 // TODO(hubbe): Move to media/cast to use for offline log analysis.
113 class MeanAndError {
114 public:
115 MeanAndError() {}
116 explicit MeanAndError(const std::vector<double>& values) {
117 double sum = 0.0;
118 double sqr_sum = 0.0;
119 num_values = values.size();
120 if (num_values) {
121 for (size_t i = 0; i < num_values; i++) {
122 sum += values[i];
123 sqr_sum += values[i] * values[i];
125 mean = sum / num_values;
126 std_dev = sqrt(std::max(0.0, num_values * sqr_sum - sum * sum)) /
127 num_values;
130 std::string AsString() const {
131 return base::StringPrintf("%f,%f", mean, std_dev);
134 void Print(const std::string& measurement,
135 const std::string& modifier,
136 const std::string& trace,
137 const std::string& unit) {
138 if (num_values >= 20) {
139 perf_test::PrintResultMeanAndError(measurement,
140 modifier,
141 trace,
142 AsString(),
143 unit,
144 true);
145 } else {
146 LOG(ERROR) << "Not enough events for "
147 << measurement << modifier << " " << trace;
151 size_t num_values;
152 double mean;
153 double std_dev;
156 // This function checks how smooth the data in |data| is.
157 // It computes the average error of deltas and the average delta.
158 // If data[x] == x * A + B, then this function returns zero.
159 // The unit is milliseconds.
160 static MeanAndError AnalyzeJitter(const std::vector<TimeData>& data) {
161 CHECK_GT(data.size(), 1UL);
162 VLOG(0) << "Jitter analyzis on " << data.size() << " values.";
163 std::vector<double> deltas;
164 double sum = 0.0;
165 for (size_t i = 1; i < data.size(); i++) {
166 double delta = (data[i].render_time -
167 data[i - 1].render_time).InMillisecondsF();
168 deltas.push_back(delta);
169 sum += delta;
171 double mean = sum / deltas.size();
172 for (size_t i = 0; i < deltas.size(); i++) {
173 deltas[i] = fabs(mean - deltas[i]);
176 return MeanAndError(deltas);
179 // An in-process Cast receiver that examines the audio/video frames being
180 // received and logs some data about each received audio/video frame.
181 class TestPatternReceiver : public media::cast::InProcessReceiver {
182 public:
183 explicit TestPatternReceiver(
184 const scoped_refptr<media::cast::CastEnvironment>& cast_environment,
185 const net::IPEndPoint& local_end_point)
186 : InProcessReceiver(cast_environment,
187 local_end_point,
188 net::IPEndPoint(),
189 media::cast::GetDefaultAudioReceiverConfig(),
190 media::cast::GetDefaultVideoReceiverConfig()) {
193 typedef std::map<uint16, base::TimeTicks> TimeMap;
195 // Build a map from frame ID (as encoded in the audio and video data)
196 // to the rtp timestamp for that frame. Note that there will be multiple
197 // audio frames which all have the same frame ID. When that happens we
198 // want the minimum rtp timestamp, because that audio frame is supposed
199 // to play at the same time that the corresponding image is presented.
200 void MapFrameTimes(const std::vector<TimeData>& events, TimeMap* map) {
201 for (size_t i = kSkipEvents; i < events.size(); i++) {
202 base::TimeTicks& frame_tick = (*map)[events[i].frame_no];
203 if (frame_tick.is_null()) {
204 frame_tick = events[i].render_time;
205 } else {
206 frame_tick = std::min(events[i].render_time, frame_tick);
211 void Analyze(const std::string& name, const std::string& modifier) {
212 // First, find the minimum rtp timestamp for each audio and video frame.
213 // Note that the data encoded in the audio stream contains video frame
214 // numbers. So in a 30-fps video stream, there will be 1/30s of "1", then
215 // 1/30s of "2", etc.
216 TimeMap audio_frame_times, video_frame_times;
217 MapFrameTimes(audio_events_, &audio_frame_times);
218 MapFrameTimes(video_events_, &video_frame_times);
219 std::vector<double> deltas;
220 for (TimeMap::const_iterator i = audio_frame_times.begin();
221 i != audio_frame_times.end();
222 ++i) {
223 TimeMap::const_iterator j = video_frame_times.find(i->first);
224 if (j != video_frame_times.end()) {
225 deltas.push_back((i->second - j->second).InMillisecondsF());
229 // Close to zero is better. (can be negative)
230 MeanAndError(deltas).Print(name, modifier, "av_sync", "ms");
231 // lower is better.
232 AnalyzeJitter(audio_events_).Print(name, modifier, "audio_jitter", "ms");
233 // lower is better.
234 AnalyzeJitter(video_events_).Print(name, modifier, "video_jitter", "ms");
237 private:
238 // Invoked by InProcessReceiver for each received audio frame.
239 void OnAudioFrame(scoped_ptr<media::AudioBus> audio_frame,
240 const base::TimeTicks& playout_time,
241 bool is_continuous) override {
242 CHECK(cast_env()->CurrentlyOn(media::cast::CastEnvironment::MAIN));
244 if (audio_frame->frames() <= 0) {
245 NOTREACHED() << "OnAudioFrame called with no samples?!?";
246 return;
249 // Note: This is the number of the video frame that this audio belongs to.
250 uint16 frame_no;
251 if (media::cast::DecodeTimestamp(audio_frame->channel(0),
252 audio_frame->frames(),
253 &frame_no)) {
254 audio_events_.push_back(TimeData(frame_no, playout_time));
255 } else {
256 VLOG(0) << "Failed to decode audio timestamp!";
260 void OnVideoFrame(const scoped_refptr<media::VideoFrame>& video_frame,
261 const base::TimeTicks& render_time,
262 bool is_continuous) override {
263 CHECK(cast_env()->CurrentlyOn(media::cast::CastEnvironment::MAIN));
265 TRACE_EVENT_INSTANT1(
266 "mirroring", "TestPatternReceiver::OnVideoFrame",
267 TRACE_EVENT_SCOPE_THREAD,
268 "render_time", render_time.ToInternalValue());
270 uint16 frame_no;
271 if (media::cast::test::DecodeBarcode(video_frame, &frame_no)) {
272 video_events_.push_back(TimeData(frame_no, render_time));
273 } else {
274 VLOG(0) << "Failed to decode barcode!";
278 std::vector<TimeData> audio_events_;
279 std::vector<TimeData> video_events_;
281 DISALLOW_COPY_AND_ASSIGN(TestPatternReceiver);
284 class CastV2PerformanceTest
285 : public ExtensionApiTest,
286 public testing::WithParamInterface<int> {
287 public:
288 CastV2PerformanceTest() {}
290 bool HasFlag(TestFlags flag) const {
291 return (GetParam() & flag) == flag;
294 bool IsGpuAvailable() const {
295 return base::CommandLine::ForCurrentProcess()->HasSwitch("enable-gpu");
298 std::string GetSuffixForTestFlags() {
299 std::string suffix;
300 if (HasFlag(kUseGpu))
301 suffix += "_gpu";
302 if (HasFlag(kDisableVsync))
303 suffix += "_novsync";
304 if (HasFlag(kSmallWindow))
305 suffix += "_small";
306 if (HasFlag(k24fps))
307 suffix += "_24fps";
308 if (HasFlag(k30fps))
309 suffix += "_30fps";
310 if (HasFlag(k60fps))
311 suffix += "_60fps";
312 if (HasFlag(kProxyWifi))
313 suffix += "_wifi";
314 if (HasFlag(kProxyBad))
315 suffix += "_bad";
316 if (HasFlag(kSlowClock))
317 suffix += "_slow";
318 if (HasFlag(kFastClock))
319 suffix += "_fast";
320 return suffix;
323 int getfps() {
324 if (HasFlag(k24fps))
325 return 24;
326 if (HasFlag(k30fps))
327 return 30;
328 if (HasFlag(k60fps))
329 return 60;
330 NOTREACHED();
331 return 0;
334 net::IPEndPoint GetFreeLocalPort() {
335 // Determine a unused UDP port for the in-process receiver to listen on.
336 // Method: Bind a UDP socket on port 0, and then check which port the
337 // operating system assigned to it.
338 net::IPAddressNumber localhost;
339 localhost.push_back(127);
340 localhost.push_back(0);
341 localhost.push_back(0);
342 localhost.push_back(1);
343 scoped_ptr<net::UDPServerSocket> receive_socket(
344 new net::UDPServerSocket(NULL, net::NetLog::Source()));
345 receive_socket->AllowAddressReuse();
346 CHECK_EQ(net::OK, receive_socket->Listen(net::IPEndPoint(localhost, 0)));
347 net::IPEndPoint endpoint;
348 CHECK_EQ(net::OK, receive_socket->GetLocalAddress(&endpoint));
349 return endpoint;
352 void SetUp() override {
353 EnablePixelOutput();
354 ExtensionApiTest::SetUp();
357 void SetUpCommandLine(base::CommandLine* command_line) override {
358 // Some of the tests may launch http requests through JSON or AJAX
359 // which causes a security error (cross domain request) when the page
360 // is loaded from the local file system ( file:// ). The following switch
361 // fixes that error.
362 command_line->AppendSwitch(switches::kAllowFileAccessFromFiles);
364 if (HasFlag(kSmallWindow)) {
365 command_line->AppendSwitchASCII(switches::kWindowSize, "800,600");
366 } else {
367 command_line->AppendSwitchASCII(switches::kWindowSize, "2000,1500");
370 if (!HasFlag(kUseGpu))
371 command_line->AppendSwitch(switches::kDisableGpu);
373 if (HasFlag(kDisableVsync))
374 command_line->AppendSwitch(switches::kDisableGpuVsync);
376 command_line->AppendSwitchASCII(
377 extensions::switches::kWhitelistedExtensionID,
378 kExtensionId);
379 ExtensionApiTest::SetUpCommandLine(command_line);
382 void GetTraceEvents(trace_analyzer::TraceAnalyzer* analyzer,
383 const std::string& event_name,
384 trace_analyzer::TraceEventVector* events) {
385 trace_analyzer::Query query =
386 trace_analyzer::Query::EventNameIs(event_name) &&
387 (trace_analyzer::Query::EventPhaseIs(TRACE_EVENT_PHASE_BEGIN) ||
388 trace_analyzer::Query::EventPhaseIs(TRACE_EVENT_PHASE_ASYNC_BEGIN) ||
389 trace_analyzer::Query::EventPhaseIs(TRACE_EVENT_PHASE_FLOW_BEGIN) ||
390 trace_analyzer::Query::EventPhaseIs(TRACE_EVENT_PHASE_INSTANT) ||
391 trace_analyzer::Query::EventPhaseIs(TRACE_EVENT_PHASE_COMPLETE));
392 analyzer->FindEvents(query, events);
395 // The key contains the name of the argument and the argument.
396 typedef std::pair<std::string, double> EventMapKey;
397 typedef std::map<EventMapKey, const trace_analyzer::TraceEvent*> EventMap;
399 // Make events findable by their arguments, for instance, if an
400 // event has a "timestamp": 238724 argument, the map will contain
401 // pair<"timestamp", 238724> -> &event. All arguments are indexed.
402 void IndexEvents(trace_analyzer::TraceAnalyzer* analyzer,
403 const std::string& event_name,
404 EventMap* event_map) {
405 trace_analyzer::TraceEventVector events;
406 GetTraceEvents(analyzer, event_name, &events);
407 for (size_t i = 0; i < events.size(); i++) {
408 std::map<std::string, double>::const_iterator j;
409 for (j = events[i]->arg_numbers.begin();
410 j != events[i]->arg_numbers.end();
411 ++j) {
412 (*event_map)[*j] = events[i];
417 // Look up an event in |event_map|. The return event will have the same
418 // value for the argument |key_name| as |prev_event|. Note that if
419 // the |key_name| is "time_delta", then we allow some fuzzy logic since
420 // the time deltas are truncated to milliseconds in the code.
421 const trace_analyzer::TraceEvent* FindNextEvent(
422 const EventMap& event_map,
423 std::vector<const trace_analyzer::TraceEvent*> prev_events,
424 std::string key_name) {
425 EventMapKey key;
426 for (size_t i = prev_events.size(); i;) {
427 --i;
428 std::map<std::string, double>::const_iterator j =
429 prev_events[i]->arg_numbers.find(key_name);
430 if (j != prev_events[i]->arg_numbers.end()) {
431 key = *j;
432 break;
435 EventMap::const_iterator i = event_map.lower_bound(key);
436 if (i == event_map.end())
437 return NULL;
438 if (i->first.second == key.second)
439 return i->second;
440 if (key_name != "time_delta")
441 return NULL;
442 if (fabs(i->first.second - key.second) < 1000)
443 return i->second;
444 if (i == event_map.begin())
445 return NULL;
446 i--;
447 if (fabs(i->first.second - key.second) < 1000)
448 return i->second;
449 return NULL;
452 // Given a vector of vector of data, extract the difference between
453 // two columns (|col_a| and |col_b|) and output the result as a
454 // performance metric.
455 void OutputMeasurement(const std::string& test_name,
456 const std::vector<std::vector<double> > data,
457 const std::string& measurement_name,
458 int col_a,
459 int col_b) {
460 std::vector<double> tmp;
461 for (size_t i = 0; i < data.size(); i++) {
462 tmp.push_back((data[i][col_b] - data[i][col_a]) / 1000.0);
464 return MeanAndError(tmp).Print(test_name,
465 GetSuffixForTestFlags(),
466 measurement_name,
467 "ms");
470 // Analyzing latency is hard, because there is no unifying identifier for
471 // frames throughout the code. At first, we have a capture timestamp, which
472 // gets converted to a time delta, then back to a timestamp. Once it enters
473 // the cast library it gets converted to an rtp_timestamp, and when it leaves
474 // the cast library, all we have is the render_time.
476 // To be able to follow the frame throughout all this, we insert TRACE
477 // calls that tracks each conversion as it happens. Then we extract all
478 // these events and link them together.
479 void AnalyzeLatency(const std::string& test_name,
480 trace_analyzer::TraceAnalyzer* analyzer) {
481 EventMap onbuffer, sink, inserted, encoded, transmitted, decoded, done;
482 IndexEvents(analyzer, "OnBufferReceived", &onbuffer);
483 IndexEvents(analyzer, "MediaStreamVideoSink::OnVideoFrame", &sink);
484 IndexEvents(analyzer, "InsertRawVideoFrame", &inserted);
485 IndexEvents(analyzer, "VideoFrameEncoded", &encoded);
486 IndexEvents(analyzer, "PullEncodedVideoFrame", &transmitted);
487 IndexEvents(analyzer, "FrameDecoded", &decoded);
488 IndexEvents(analyzer, "TestPatternReceiver::OnVideoFrame", &done);
489 std::vector<std::pair<EventMap*, std::string> > event_maps;
490 event_maps.push_back(std::make_pair(&onbuffer, "timestamp"));
491 event_maps.push_back(std::make_pair(&sink, "time_delta"));
492 event_maps.push_back(std::make_pair(&inserted, "timestamp"));
493 event_maps.push_back(std::make_pair(&encoded, "rtp_timestamp"));
494 event_maps.push_back(std::make_pair(&transmitted, "rtp_timestamp"));
495 event_maps.push_back(std::make_pair(&decoded, "rtp_timestamp"));
496 event_maps.push_back(std::make_pair(&done, "render_time"));
498 trace_analyzer::TraceEventVector capture_events;
499 GetTraceEvents(analyzer, "Capture" , &capture_events);
500 EXPECT_GT(capture_events.size(), 0UL);
501 std::vector<std::vector<double> > traced_frames;
502 for (size_t i = kSkipEvents; i < capture_events.size(); i++) {
503 std::vector<double> times;
504 const trace_analyzer::TraceEvent *event = capture_events[i];
505 times.push_back(event->timestamp); // begin capture
506 event = event->other_event;
507 if (!event) {
508 continue;
510 times.push_back(event->timestamp); // end capture (with timestamp)
511 std::vector<const trace_analyzer::TraceEvent*> prev_events;
512 prev_events.push_back(event);
513 for (size_t j = 0; j < event_maps.size(); j++) {
514 event = FindNextEvent(*event_maps[j].first,
515 prev_events,
516 event_maps[j].second);
517 if (!event) {
518 break;
520 prev_events.push_back(event);
521 times.push_back(event->timestamp);
523 if (event) {
524 // Successfully traced frame from beginning to end
525 traced_frames.push_back(times);
529 // 0 = capture begin
530 // 1 = capture end
531 // 2 = onbuffer
532 // 3 = sink
533 // 4 = inserted
534 // 5 = encoded
535 // 6 = transmitted
536 // 7 = decoded
537 // 8 = done
539 // Lower is better for all of these.
540 OutputMeasurement(test_name, traced_frames, "total_latency", 0, 8);
541 OutputMeasurement(test_name, traced_frames, "capture_duration", 0, 1);
542 OutputMeasurement(test_name, traced_frames, "send_to_renderer", 1, 3);
543 OutputMeasurement(test_name, traced_frames, "encode", 3, 5);
544 OutputMeasurement(test_name, traced_frames, "transmit", 5, 6);
545 OutputMeasurement(test_name, traced_frames, "decode", 6, 7);
546 OutputMeasurement(test_name, traced_frames, "cast_latency", 3, 8);
549 MeanAndError AnalyzeTraceDistance(trace_analyzer::TraceAnalyzer* analyzer,
550 const std::string& event_name) {
551 trace_analyzer::TraceEventVector events;
552 GetTraceEvents(analyzer, event_name, &events);
554 std::vector<double> deltas;
555 for (size_t i = kSkipEvents + 1; i < events.size(); ++i) {
556 double delta_micros = events[i]->timestamp - events[i - 1]->timestamp;
557 deltas.push_back(delta_micros / 1000.0);
559 return MeanAndError(deltas);
562 void RunTest(const std::string& test_name) {
563 if (HasFlag(kUseGpu) && !IsGpuAvailable()) {
564 LOG(WARNING) <<
565 "Test skipped: requires gpu. Pass --enable-gpu on the command "
566 "line if use of GPU is desired.";
567 return;
570 ASSERT_EQ(1,
571 (HasFlag(k24fps) ? 1 : 0) +
572 (HasFlag(k30fps) ? 1 : 0) +
573 (HasFlag(k60fps) ? 1 : 0));
575 net::IPEndPoint receiver_end_point = GetFreeLocalPort();
577 // Start the in-process receiver that examines audio/video for the expected
578 // test patterns.
579 base::TimeDelta delta = base::TimeDelta::FromSeconds(0);
580 if (HasFlag(kFastClock)) {
581 delta = base::TimeDelta::FromSeconds(10);
583 if (HasFlag(kSlowClock)) {
584 delta = base::TimeDelta::FromSeconds(-10);
586 scoped_refptr<media::cast::StandaloneCastEnvironment> cast_environment(
587 new SkewedCastEnvironment(delta));
588 TestPatternReceiver* const receiver =
589 new TestPatternReceiver(cast_environment, receiver_end_point);
590 receiver->Start();
592 scoped_ptr<media::cast::test::UDPProxy> udp_proxy;
593 if (HasFlag(kProxyWifi) || HasFlag(kProxyBad)) {
594 net::IPEndPoint proxy_end_point = GetFreeLocalPort();
595 if (HasFlag(kProxyWifi)) {
596 udp_proxy = media::cast::test::UDPProxy::Create(
597 proxy_end_point,
598 receiver_end_point,
599 media::cast::test::WifiNetwork().Pass(),
600 media::cast::test::WifiNetwork().Pass(),
601 NULL);
602 } else if (HasFlag(kProxyBad)) {
603 udp_proxy = media::cast::test::UDPProxy::Create(
604 proxy_end_point,
605 receiver_end_point,
606 media::cast::test::BadNetwork().Pass(),
607 media::cast::test::BadNetwork().Pass(),
608 NULL);
610 receiver_end_point = proxy_end_point;
613 std::string json_events;
614 ASSERT_TRUE(tracing::BeginTracing(
615 "test_fps,mirroring,gpu.capture,cast_perf_test"));
616 const std::string page_url = base::StringPrintf(
617 "performance%d.html?port=%d",
618 getfps(),
619 receiver_end_point.port());
620 ASSERT_TRUE(RunExtensionSubtest("cast_streaming", page_url)) << message_;
621 ASSERT_TRUE(tracing::EndTracing(&json_events));
622 receiver->Stop();
624 // Stop all threads, removes the need for synchronization when analyzing
625 // the data.
626 cast_environment->Shutdown();
627 scoped_ptr<trace_analyzer::TraceAnalyzer> analyzer;
628 analyzer.reset(trace_analyzer::TraceAnalyzer::Create(json_events));
629 analyzer->AssociateAsyncBeginEndEvents();
631 MeanAndError frame_data = AnalyzeTraceDistance(
632 analyzer.get(),
633 "OnSwapCompositorFrame");
635 EXPECT_GT(frame_data.num_values, 0UL);
636 // Lower is better.
637 frame_data.Print(test_name,
638 GetSuffixForTestFlags(),
639 "time_between_frames",
640 "ms");
642 // This prints out the average time between capture events.
643 // As the capture frame rate is capped at 30fps, this score
644 // cannot get any better than (lower) 33.33 ms.
645 MeanAndError capture_data = AnalyzeTraceDistance(analyzer.get(), "Capture");
646 // Lower is better.
647 capture_data.Print(test_name,
648 GetSuffixForTestFlags(),
649 "time_between_captures",
650 "ms");
652 receiver->Analyze(test_name, GetSuffixForTestFlags());
654 AnalyzeLatency(test_name, analyzer.get());
658 } // namespace
660 IN_PROC_BROWSER_TEST_P(CastV2PerformanceTest, Performance) {
661 RunTest("CastV2Performance");
664 // Note: First argument is optional and intentionally left blank.
665 // (it's a prefix for the generated test cases)
666 INSTANTIATE_TEST_CASE_P(
668 CastV2PerformanceTest,
669 testing::Values(
670 kUseGpu | k24fps,
671 kUseGpu | k30fps,
672 kUseGpu | k60fps,
673 kUseGpu | k24fps | kDisableVsync,
674 kUseGpu | k30fps | kProxyWifi,
675 kUseGpu | k30fps | kProxyBad,
676 kUseGpu | k30fps | kSlowClock,
677 kUseGpu | k30fps | kFastClock));