1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
8 #include "base/basictypes.h"
9 #include "base/command_line.h"
10 #include "base/strings/stringprintf.h"
11 #include "base/test/trace_event_analyzer.h"
12 #include "base/time/default_tick_clock.h"
13 #include "base/win/windows_version.h"
14 #include "chrome/browser/extensions/extension_apitest.h"
15 #include "chrome/browser/extensions/extension_service.h"
16 #include "chrome/browser/extensions/tab_helper.h"
17 #include "chrome/browser/profiles/profile.h"
18 #include "chrome/browser/ui/exclusive_access/fullscreen_controller.h"
19 #include "chrome/common/chrome_switches.h"
20 #include "chrome/common/chrome_version_info.h"
21 #include "chrome/test/base/test_launcher_utils.h"
22 #include "chrome/test/base/test_switches.h"
23 #include "chrome/test/base/tracing.h"
24 #include "content/public/browser/render_process_host.h"
25 #include "content/public/browser/render_view_host.h"
26 #include "content/public/common/content_switches.h"
27 #include "extensions/common/feature_switch.h"
28 #include "extensions/common/features/base_feature_provider.h"
29 #include "extensions/common/features/complex_feature.h"
30 #include "extensions/common/features/feature.h"
31 #include "extensions/common/features/simple_feature.h"
32 #include "extensions/common/switches.h"
33 #include "extensions/test/extension_test_message_listener.h"
34 #include "media/base/audio_bus.h"
35 #include "media/base/video_frame.h"
36 #include "media/cast/cast_config.h"
37 #include "media/cast/cast_environment.h"
38 #include "media/cast/test/utility/audio_utility.h"
39 #include "media/cast/test/utility/barcode.h"
40 #include "media/cast/test/utility/default_config.h"
41 #include "media/cast/test/utility/in_process_receiver.h"
42 #include "media/cast/test/utility/standalone_cast_environment.h"
43 #include "media/cast/test/utility/udp_proxy.h"
44 #include "net/base/ip_endpoint.h"
45 #include "net/base/net_errors.h"
46 #include "net/base/net_util.h"
47 #include "net/base/rand_callback.h"
48 #include "net/udp/udp_server_socket.h"
49 #include "testing/gtest/include/gtest/gtest.h"
50 #include "testing/perf/perf_test.h"
51 #include "ui/compositor/compositor_switches.h"
52 #include "ui/gl/gl_switches.h"
56 const char kExtensionId
[] = "ddchlicdkolnonkihahngkmmmjnjlkkf";
58 // Skip a few events from the beginning.
59 static const size_t kSkipEvents
= 3;
62 kUseGpu
= 1 << 0, // Only execute test if --enable-gpu was given
63 // on the command line. This is required for
64 // tests that run on GPU.
65 kDisableVsync
= 1 << 1, // Do not limit framerate to vertical refresh.
66 // when on GPU, nor to 60hz when not on GPU.
67 kSmallWindow
= 1 << 2, // 1 = 800x600, 0 = 2000x1000
68 k24fps
= 1 << 3, // use 24 fps video
69 k30fps
= 1 << 4, // use 30 fps video
70 k60fps
= 1 << 5, // use 60 fps video
71 kProxyWifi
= 1 << 6, // Run UDP through UDPProxy wifi profile
72 kProxyBad
= 1 << 7, // Run UDP through UDPProxy bad profile
73 kSlowClock
= 1 << 8, // Receiver clock is 10 seconds slow
74 kFastClock
= 1 << 9, // Receiver clock is 10 seconds fast
77 class SkewedTickClock
: public base::DefaultTickClock
{
79 explicit SkewedTickClock(const base::TimeDelta
& delta
) : delta_(delta
) {
81 base::TimeTicks
NowTicks() override
{
82 return DefaultTickClock::NowTicks() + delta_
;
85 base::TimeDelta delta_
;
88 class SkewedCastEnvironment
: public media::cast::StandaloneCastEnvironment
{
90 explicit SkewedCastEnvironment(const base::TimeDelta
& delta
) :
91 StandaloneCastEnvironment() {
92 clock_
.reset(new SkewedTickClock(delta
));
96 ~SkewedCastEnvironment() override
{}
99 // We log one of these for each call to OnAudioFrame/OnVideoFrame.
101 TimeData(uint16 frame_no_
, base::TimeTicks render_time_
) :
103 render_time(render_time_
) {
105 // The unit here is video frames, for audio data there can be duplicates.
106 // This was decoded from the actual audio/video data.
108 // This is when we should play this data, according to the sender.
109 base::TimeTicks render_time
;
112 // TODO(hubbe): Move to media/cast to use for offline log analysis.
116 explicit MeanAndError(const std::vector
<double>& values
) {
118 double sqr_sum
= 0.0;
119 num_values
= values
.size();
121 for (size_t i
= 0; i
< num_values
; i
++) {
123 sqr_sum
+= values
[i
] * values
[i
];
125 mean
= sum
/ num_values
;
126 std_dev
= sqrt(std::max(0.0, num_values
* sqr_sum
- sum
* sum
)) /
130 std::string
AsString() const {
131 return base::StringPrintf("%f,%f", mean
, std_dev
);
134 void Print(const std::string
& measurement
,
135 const std::string
& modifier
,
136 const std::string
& trace
,
137 const std::string
& unit
) {
138 if (num_values
>= 20) {
139 perf_test::PrintResultMeanAndError(measurement
,
146 LOG(ERROR
) << "Not enough events for "
147 << measurement
<< modifier
<< " " << trace
;
156 // This function checks how smooth the data in |data| is.
157 // It computes the average error of deltas and the average delta.
158 // If data[x] == x * A + B, then this function returns zero.
159 // The unit is milliseconds.
160 static MeanAndError
AnalyzeJitter(const std::vector
<TimeData
>& data
) {
161 CHECK_GT(data
.size(), 1UL);
162 VLOG(0) << "Jitter analyzis on " << data
.size() << " values.";
163 std::vector
<double> deltas
;
165 for (size_t i
= 1; i
< data
.size(); i
++) {
166 double delta
= (data
[i
].render_time
-
167 data
[i
- 1].render_time
).InMillisecondsF();
168 deltas
.push_back(delta
);
171 double mean
= sum
/ deltas
.size();
172 for (size_t i
= 0; i
< deltas
.size(); i
++) {
173 deltas
[i
] = fabs(mean
- deltas
[i
]);
176 return MeanAndError(deltas
);
179 // An in-process Cast receiver that examines the audio/video frames being
180 // received and logs some data about each received audio/video frame.
181 class TestPatternReceiver
: public media::cast::InProcessReceiver
{
183 explicit TestPatternReceiver(
184 const scoped_refptr
<media::cast::CastEnvironment
>& cast_environment
,
185 const net::IPEndPoint
& local_end_point
)
186 : InProcessReceiver(cast_environment
,
189 media::cast::GetDefaultAudioReceiverConfig(),
190 media::cast::GetDefaultVideoReceiverConfig()) {
193 typedef std::map
<uint16
, base::TimeTicks
> TimeMap
;
195 // Build a map from frame ID (as encoded in the audio and video data)
196 // to the rtp timestamp for that frame. Note that there will be multiple
197 // audio frames which all have the same frame ID. When that happens we
198 // want the minimum rtp timestamp, because that audio frame is supposed
199 // to play at the same time that the corresponding image is presented.
200 void MapFrameTimes(const std::vector
<TimeData
>& events
, TimeMap
* map
) {
201 for (size_t i
= kSkipEvents
; i
< events
.size(); i
++) {
202 base::TimeTicks
& frame_tick
= (*map
)[events
[i
].frame_no
];
203 if (frame_tick
.is_null()) {
204 frame_tick
= events
[i
].render_time
;
206 frame_tick
= std::min(events
[i
].render_time
, frame_tick
);
211 void Analyze(const std::string
& name
, const std::string
& modifier
) {
212 // First, find the minimum rtp timestamp for each audio and video frame.
213 // Note that the data encoded in the audio stream contains video frame
214 // numbers. So in a 30-fps video stream, there will be 1/30s of "1", then
215 // 1/30s of "2", etc.
216 TimeMap audio_frame_times
, video_frame_times
;
217 MapFrameTimes(audio_events_
, &audio_frame_times
);
218 MapFrameTimes(video_events_
, &video_frame_times
);
219 std::vector
<double> deltas
;
220 for (TimeMap::const_iterator i
= audio_frame_times
.begin();
221 i
!= audio_frame_times
.end();
223 TimeMap::const_iterator j
= video_frame_times
.find(i
->first
);
224 if (j
!= video_frame_times
.end()) {
225 deltas
.push_back((i
->second
- j
->second
).InMillisecondsF());
229 // Close to zero is better. (can be negative)
230 MeanAndError(deltas
).Print(name
, modifier
, "av_sync", "ms");
232 AnalyzeJitter(audio_events_
).Print(name
, modifier
, "audio_jitter", "ms");
234 AnalyzeJitter(video_events_
).Print(name
, modifier
, "video_jitter", "ms");
238 // Invoked by InProcessReceiver for each received audio frame.
239 void OnAudioFrame(scoped_ptr
<media::AudioBus
> audio_frame
,
240 const base::TimeTicks
& playout_time
,
241 bool is_continuous
) override
{
242 CHECK(cast_env()->CurrentlyOn(media::cast::CastEnvironment::MAIN
));
244 if (audio_frame
->frames() <= 0) {
245 NOTREACHED() << "OnAudioFrame called with no samples?!?";
249 // Note: This is the number of the video frame that this audio belongs to.
251 if (media::cast::DecodeTimestamp(audio_frame
->channel(0),
252 audio_frame
->frames(),
254 audio_events_
.push_back(TimeData(frame_no
, playout_time
));
256 VLOG(0) << "Failed to decode audio timestamp!";
260 void OnVideoFrame(const scoped_refptr
<media::VideoFrame
>& video_frame
,
261 const base::TimeTicks
& render_time
,
262 bool is_continuous
) override
{
263 CHECK(cast_env()->CurrentlyOn(media::cast::CastEnvironment::MAIN
));
265 TRACE_EVENT_INSTANT1(
266 "mirroring", "TestPatternReceiver::OnVideoFrame",
267 TRACE_EVENT_SCOPE_THREAD
,
268 "render_time", render_time
.ToInternalValue());
271 if (media::cast::test::DecodeBarcode(video_frame
, &frame_no
)) {
272 video_events_
.push_back(TimeData(frame_no
, render_time
));
274 VLOG(0) << "Failed to decode barcode!";
278 std::vector
<TimeData
> audio_events_
;
279 std::vector
<TimeData
> video_events_
;
281 DISALLOW_COPY_AND_ASSIGN(TestPatternReceiver
);
284 class CastV2PerformanceTest
285 : public ExtensionApiTest
,
286 public testing::WithParamInterface
<int> {
288 CastV2PerformanceTest() {}
290 bool HasFlag(TestFlags flag
) const {
291 return (GetParam() & flag
) == flag
;
294 bool IsGpuAvailable() const {
295 return base::CommandLine::ForCurrentProcess()->HasSwitch("enable-gpu");
298 std::string
GetSuffixForTestFlags() {
300 if (HasFlag(kUseGpu
))
302 if (HasFlag(kDisableVsync
))
303 suffix
+= "_novsync";
304 if (HasFlag(kSmallWindow
))
312 if (HasFlag(kProxyWifi
))
314 if (HasFlag(kProxyBad
))
316 if (HasFlag(kSlowClock
))
318 if (HasFlag(kFastClock
))
334 net::IPEndPoint
GetFreeLocalPort() {
335 // Determine a unused UDP port for the in-process receiver to listen on.
336 // Method: Bind a UDP socket on port 0, and then check which port the
337 // operating system assigned to it.
338 net::IPAddressNumber localhost
;
339 localhost
.push_back(127);
340 localhost
.push_back(0);
341 localhost
.push_back(0);
342 localhost
.push_back(1);
343 scoped_ptr
<net::UDPServerSocket
> receive_socket(
344 new net::UDPServerSocket(NULL
, net::NetLog::Source()));
345 receive_socket
->AllowAddressReuse();
346 CHECK_EQ(net::OK
, receive_socket
->Listen(net::IPEndPoint(localhost
, 0)));
347 net::IPEndPoint endpoint
;
348 CHECK_EQ(net::OK
, receive_socket
->GetLocalAddress(&endpoint
));
352 void SetUp() override
{
354 ExtensionApiTest::SetUp();
357 void SetUpCommandLine(base::CommandLine
* command_line
) override
{
358 // Some of the tests may launch http requests through JSON or AJAX
359 // which causes a security error (cross domain request) when the page
360 // is loaded from the local file system ( file:// ). The following switch
362 command_line
->AppendSwitch(switches::kAllowFileAccessFromFiles
);
364 if (HasFlag(kSmallWindow
)) {
365 command_line
->AppendSwitchASCII(switches::kWindowSize
, "800,600");
367 command_line
->AppendSwitchASCII(switches::kWindowSize
, "2000,1500");
370 if (!HasFlag(kUseGpu
))
371 command_line
->AppendSwitch(switches::kDisableGpu
);
373 if (HasFlag(kDisableVsync
))
374 command_line
->AppendSwitch(switches::kDisableGpuVsync
);
376 command_line
->AppendSwitchASCII(
377 extensions::switches::kWhitelistedExtensionID
,
379 ExtensionApiTest::SetUpCommandLine(command_line
);
382 void GetTraceEvents(trace_analyzer::TraceAnalyzer
* analyzer
,
383 const std::string
& event_name
,
384 trace_analyzer::TraceEventVector
* events
) {
385 trace_analyzer::Query query
=
386 trace_analyzer::Query::EventNameIs(event_name
) &&
387 (trace_analyzer::Query::EventPhaseIs(TRACE_EVENT_PHASE_BEGIN
) ||
388 trace_analyzer::Query::EventPhaseIs(TRACE_EVENT_PHASE_ASYNC_BEGIN
) ||
389 trace_analyzer::Query::EventPhaseIs(TRACE_EVENT_PHASE_FLOW_BEGIN
) ||
390 trace_analyzer::Query::EventPhaseIs(TRACE_EVENT_PHASE_INSTANT
) ||
391 trace_analyzer::Query::EventPhaseIs(TRACE_EVENT_PHASE_COMPLETE
));
392 analyzer
->FindEvents(query
, events
);
395 // The key contains the name of the argument and the argument.
396 typedef std::pair
<std::string
, double> EventMapKey
;
397 typedef std::map
<EventMapKey
, const trace_analyzer::TraceEvent
*> EventMap
;
399 // Make events findable by their arguments, for instance, if an
400 // event has a "timestamp": 238724 argument, the map will contain
401 // pair<"timestamp", 238724> -> &event. All arguments are indexed.
402 void IndexEvents(trace_analyzer::TraceAnalyzer
* analyzer
,
403 const std::string
& event_name
,
404 EventMap
* event_map
) {
405 trace_analyzer::TraceEventVector events
;
406 GetTraceEvents(analyzer
, event_name
, &events
);
407 for (size_t i
= 0; i
< events
.size(); i
++) {
408 std::map
<std::string
, double>::const_iterator j
;
409 for (j
= events
[i
]->arg_numbers
.begin();
410 j
!= events
[i
]->arg_numbers
.end();
412 (*event_map
)[*j
] = events
[i
];
417 // Look up an event in |event_map|. The return event will have the same
418 // value for the argument |key_name| as |prev_event|. Note that if
419 // the |key_name| is "time_delta", then we allow some fuzzy logic since
420 // the time deltas are truncated to milliseconds in the code.
421 const trace_analyzer::TraceEvent
* FindNextEvent(
422 const EventMap
& event_map
,
423 std::vector
<const trace_analyzer::TraceEvent
*> prev_events
,
424 std::string key_name
) {
426 for (size_t i
= prev_events
.size(); i
;) {
428 std::map
<std::string
, double>::const_iterator j
=
429 prev_events
[i
]->arg_numbers
.find(key_name
);
430 if (j
!= prev_events
[i
]->arg_numbers
.end()) {
435 EventMap::const_iterator i
= event_map
.lower_bound(key
);
436 if (i
== event_map
.end())
438 if (i
->first
.second
== key
.second
)
440 if (key_name
!= "time_delta")
442 if (fabs(i
->first
.second
- key
.second
) < 1000)
444 if (i
== event_map
.begin())
447 if (fabs(i
->first
.second
- key
.second
) < 1000)
452 // Given a vector of vector of data, extract the difference between
453 // two columns (|col_a| and |col_b|) and output the result as a
454 // performance metric.
455 void OutputMeasurement(const std::string
& test_name
,
456 const std::vector
<std::vector
<double> > data
,
457 const std::string
& measurement_name
,
460 std::vector
<double> tmp
;
461 for (size_t i
= 0; i
< data
.size(); i
++) {
462 tmp
.push_back((data
[i
][col_b
] - data
[i
][col_a
]) / 1000.0);
464 return MeanAndError(tmp
).Print(test_name
,
465 GetSuffixForTestFlags(),
470 // Analyzing latency is hard, because there is no unifying identifier for
471 // frames throughout the code. At first, we have a capture timestamp, which
472 // gets converted to a time delta, then back to a timestamp. Once it enters
473 // the cast library it gets converted to an rtp_timestamp, and when it leaves
474 // the cast library, all we have is the render_time.
476 // To be able to follow the frame throughout all this, we insert TRACE
477 // calls that tracks each conversion as it happens. Then we extract all
478 // these events and link them together.
479 void AnalyzeLatency(const std::string
& test_name
,
480 trace_analyzer::TraceAnalyzer
* analyzer
) {
481 EventMap onbuffer
, sink
, inserted
, encoded
, transmitted
, decoded
, done
;
482 IndexEvents(analyzer
, "OnBufferReceived", &onbuffer
);
483 IndexEvents(analyzer
, "MediaStreamVideoSink::OnVideoFrame", &sink
);
484 IndexEvents(analyzer
, "InsertRawVideoFrame", &inserted
);
485 IndexEvents(analyzer
, "VideoFrameEncoded", &encoded
);
486 IndexEvents(analyzer
, "PullEncodedVideoFrame", &transmitted
);
487 IndexEvents(analyzer
, "FrameDecoded", &decoded
);
488 IndexEvents(analyzer
, "TestPatternReceiver::OnVideoFrame", &done
);
489 std::vector
<std::pair
<EventMap
*, std::string
> > event_maps
;
490 event_maps
.push_back(std::make_pair(&onbuffer
, "timestamp"));
491 event_maps
.push_back(std::make_pair(&sink
, "time_delta"));
492 event_maps
.push_back(std::make_pair(&inserted
, "timestamp"));
493 event_maps
.push_back(std::make_pair(&encoded
, "rtp_timestamp"));
494 event_maps
.push_back(std::make_pair(&transmitted
, "rtp_timestamp"));
495 event_maps
.push_back(std::make_pair(&decoded
, "rtp_timestamp"));
496 event_maps
.push_back(std::make_pair(&done
, "render_time"));
498 trace_analyzer::TraceEventVector capture_events
;
499 GetTraceEvents(analyzer
, "Capture" , &capture_events
);
500 EXPECT_GT(capture_events
.size(), 0UL);
501 std::vector
<std::vector
<double> > traced_frames
;
502 for (size_t i
= kSkipEvents
; i
< capture_events
.size(); i
++) {
503 std::vector
<double> times
;
504 const trace_analyzer::TraceEvent
*event
= capture_events
[i
];
505 times
.push_back(event
->timestamp
); // begin capture
506 event
= event
->other_event
;
510 times
.push_back(event
->timestamp
); // end capture (with timestamp)
511 std::vector
<const trace_analyzer::TraceEvent
*> prev_events
;
512 prev_events
.push_back(event
);
513 for (size_t j
= 0; j
< event_maps
.size(); j
++) {
514 event
= FindNextEvent(*event_maps
[j
].first
,
516 event_maps
[j
].second
);
520 prev_events
.push_back(event
);
521 times
.push_back(event
->timestamp
);
524 // Successfully traced frame from beginning to end
525 traced_frames
.push_back(times
);
539 // Lower is better for all of these.
540 OutputMeasurement(test_name
, traced_frames
, "total_latency", 0, 8);
541 OutputMeasurement(test_name
, traced_frames
, "capture_duration", 0, 1);
542 OutputMeasurement(test_name
, traced_frames
, "send_to_renderer", 1, 3);
543 OutputMeasurement(test_name
, traced_frames
, "encode", 3, 5);
544 OutputMeasurement(test_name
, traced_frames
, "transmit", 5, 6);
545 OutputMeasurement(test_name
, traced_frames
, "decode", 6, 7);
546 OutputMeasurement(test_name
, traced_frames
, "cast_latency", 3, 8);
549 MeanAndError
AnalyzeTraceDistance(trace_analyzer::TraceAnalyzer
* analyzer
,
550 const std::string
& event_name
) {
551 trace_analyzer::TraceEventVector events
;
552 GetTraceEvents(analyzer
, event_name
, &events
);
554 std::vector
<double> deltas
;
555 for (size_t i
= kSkipEvents
+ 1; i
< events
.size(); ++i
) {
556 double delta_micros
= events
[i
]->timestamp
- events
[i
- 1]->timestamp
;
557 deltas
.push_back(delta_micros
/ 1000.0);
559 return MeanAndError(deltas
);
562 void RunTest(const std::string
& test_name
) {
563 if (HasFlag(kUseGpu
) && !IsGpuAvailable()) {
565 "Test skipped: requires gpu. Pass --enable-gpu on the command "
566 "line if use of GPU is desired.";
571 (HasFlag(k24fps
) ? 1 : 0) +
572 (HasFlag(k30fps
) ? 1 : 0) +
573 (HasFlag(k60fps
) ? 1 : 0));
575 net::IPEndPoint receiver_end_point
= GetFreeLocalPort();
577 // Start the in-process receiver that examines audio/video for the expected
579 base::TimeDelta delta
= base::TimeDelta::FromSeconds(0);
580 if (HasFlag(kFastClock
)) {
581 delta
= base::TimeDelta::FromSeconds(10);
583 if (HasFlag(kSlowClock
)) {
584 delta
= base::TimeDelta::FromSeconds(-10);
586 scoped_refptr
<media::cast::StandaloneCastEnvironment
> cast_environment(
587 new SkewedCastEnvironment(delta
));
588 TestPatternReceiver
* const receiver
=
589 new TestPatternReceiver(cast_environment
, receiver_end_point
);
592 scoped_ptr
<media::cast::test::UDPProxy
> udp_proxy
;
593 if (HasFlag(kProxyWifi
) || HasFlag(kProxyBad
)) {
594 net::IPEndPoint proxy_end_point
= GetFreeLocalPort();
595 if (HasFlag(kProxyWifi
)) {
596 udp_proxy
= media::cast::test::UDPProxy::Create(
599 media::cast::test::WifiNetwork().Pass(),
600 media::cast::test::WifiNetwork().Pass(),
602 } else if (HasFlag(kProxyBad
)) {
603 udp_proxy
= media::cast::test::UDPProxy::Create(
606 media::cast::test::BadNetwork().Pass(),
607 media::cast::test::BadNetwork().Pass(),
610 receiver_end_point
= proxy_end_point
;
613 std::string json_events
;
614 ASSERT_TRUE(tracing::BeginTracing(
615 "test_fps,mirroring,gpu.capture,cast_perf_test"));
616 const std::string page_url
= base::StringPrintf(
617 "performance%d.html?port=%d",
619 receiver_end_point
.port());
620 ASSERT_TRUE(RunExtensionSubtest("cast_streaming", page_url
)) << message_
;
621 ASSERT_TRUE(tracing::EndTracing(&json_events
));
624 // Stop all threads, removes the need for synchronization when analyzing
626 cast_environment
->Shutdown();
627 scoped_ptr
<trace_analyzer::TraceAnalyzer
> analyzer
;
628 analyzer
.reset(trace_analyzer::TraceAnalyzer::Create(json_events
));
629 analyzer
->AssociateAsyncBeginEndEvents();
631 MeanAndError frame_data
= AnalyzeTraceDistance(
633 "OnSwapCompositorFrame");
635 EXPECT_GT(frame_data
.num_values
, 0UL);
637 frame_data
.Print(test_name
,
638 GetSuffixForTestFlags(),
639 "time_between_frames",
642 // This prints out the average time between capture events.
643 // As the capture frame rate is capped at 30fps, this score
644 // cannot get any better than (lower) 33.33 ms.
645 MeanAndError capture_data
= AnalyzeTraceDistance(analyzer
.get(), "Capture");
647 capture_data
.Print(test_name
,
648 GetSuffixForTestFlags(),
649 "time_between_captures",
652 receiver
->Analyze(test_name
, GetSuffixForTestFlags());
654 AnalyzeLatency(test_name
, analyzer
.get());
660 IN_PROC_BROWSER_TEST_P(CastV2PerformanceTest
, Performance
) {
661 RunTest("CastV2Performance");
664 // Note: First argument is optional and intentionally left blank.
665 // (it's a prefix for the generated test cases)
666 INSTANTIATE_TEST_CASE_P(
668 CastV2PerformanceTest
,
673 kUseGpu
| k24fps
| kDisableVsync
,
674 kUseGpu
| k30fps
| kProxyWifi
,
675 kUseGpu
| k30fps
| kProxyBad
,
676 kUseGpu
| k30fps
| kSlowClock
,
677 kUseGpu
| k30fps
| kFastClock
));