1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
8 #include "base/basictypes.h"
9 #include "base/command_line.h"
10 #include "base/strings/stringprintf.h"
11 #include "base/test/trace_event_analyzer.h"
12 #include "base/time/default_tick_clock.h"
13 #include "base/win/windows_version.h"
14 #include "chrome/browser/extensions/extension_apitest.h"
15 #include "chrome/browser/extensions/extension_service.h"
16 #include "chrome/browser/extensions/tab_helper.h"
17 #include "chrome/browser/profiles/profile.h"
18 #include "chrome/browser/ui/exclusive_access/fullscreen_controller.h"
19 #include "chrome/common/chrome_switches.h"
20 #include "chrome/test/base/test_launcher_utils.h"
21 #include "chrome/test/base/test_switches.h"
22 #include "chrome/test/base/tracing.h"
23 #include "content/public/browser/render_process_host.h"
24 #include "content/public/browser/render_view_host.h"
25 #include "content/public/common/content_switches.h"
26 #include "extensions/common/feature_switch.h"
27 #include "extensions/common/features/base_feature_provider.h"
28 #include "extensions/common/features/complex_feature.h"
29 #include "extensions/common/features/feature.h"
30 #include "extensions/common/features/simple_feature.h"
31 #include "extensions/common/switches.h"
32 #include "extensions/test/extension_test_message_listener.h"
33 #include "media/base/audio_bus.h"
34 #include "media/base/video_frame.h"
35 #include "media/cast/cast_config.h"
36 #include "media/cast/cast_environment.h"
37 #include "media/cast/test/utility/audio_utility.h"
38 #include "media/cast/test/utility/barcode.h"
39 #include "media/cast/test/utility/default_config.h"
40 #include "media/cast/test/utility/in_process_receiver.h"
41 #include "media/cast/test/utility/standalone_cast_environment.h"
42 #include "media/cast/test/utility/udp_proxy.h"
43 #include "net/base/ip_endpoint.h"
44 #include "net/base/net_errors.h"
45 #include "net/base/net_util.h"
46 #include "net/base/rand_callback.h"
47 #include "net/udp/udp_server_socket.h"
48 #include "testing/gtest/include/gtest/gtest.h"
49 #include "testing/perf/perf_test.h"
50 #include "ui/compositor/compositor_switches.h"
51 #include "ui/gl/gl_switches.h"
55 const char kExtensionId
[] = "ddchlicdkolnonkihahngkmmmjnjlkkf";
57 // Skip a few events from the beginning.
58 static const size_t kSkipEvents
= 3;
61 kUseGpu
= 1 << 0, // Only execute test if --enable-gpu was given
62 // on the command line. This is required for
63 // tests that run on GPU.
64 kDisableVsync
= 1 << 1, // Do not limit framerate to vertical refresh.
65 // when on GPU, nor to 60hz when not on GPU.
66 kSmallWindow
= 1 << 2, // 1 = 800x600, 0 = 2000x1000
67 k24fps
= 1 << 3, // use 24 fps video
68 k30fps
= 1 << 4, // use 30 fps video
69 k60fps
= 1 << 5, // use 60 fps video
70 kProxyWifi
= 1 << 6, // Run UDP through UDPProxy wifi profile
71 kProxyBad
= 1 << 7, // Run UDP through UDPProxy bad profile
72 kSlowClock
= 1 << 8, // Receiver clock is 10 seconds slow
73 kFastClock
= 1 << 9, // Receiver clock is 10 seconds fast
76 class SkewedTickClock
: public base::DefaultTickClock
{
78 explicit SkewedTickClock(const base::TimeDelta
& delta
) : delta_(delta
) {
80 base::TimeTicks
NowTicks() override
{
81 return DefaultTickClock::NowTicks() + delta_
;
84 base::TimeDelta delta_
;
87 class SkewedCastEnvironment
: public media::cast::StandaloneCastEnvironment
{
89 explicit SkewedCastEnvironment(const base::TimeDelta
& delta
) :
90 StandaloneCastEnvironment() {
91 clock_
.reset(new SkewedTickClock(delta
));
95 ~SkewedCastEnvironment() override
{}
98 // We log one of these for each call to OnAudioFrame/OnVideoFrame.
100 TimeData(uint16 frame_no_
, base::TimeTicks render_time_
) :
102 render_time(render_time_
) {
104 // The unit here is video frames, for audio data there can be duplicates.
105 // This was decoded from the actual audio/video data.
107 // This is when we should play this data, according to the sender.
108 base::TimeTicks render_time
;
111 // TODO(hubbe): Move to media/cast to use for offline log analysis.
115 explicit MeanAndError(const std::vector
<double>& values
) {
117 double sqr_sum
= 0.0;
118 num_values
= values
.size();
120 for (size_t i
= 0; i
< num_values
; i
++) {
122 sqr_sum
+= values
[i
] * values
[i
];
124 mean
= sum
/ num_values
;
125 std_dev
= sqrt(std::max(0.0, num_values
* sqr_sum
- sum
* sum
)) /
129 std::string
AsString() const {
130 return base::StringPrintf("%f,%f", mean
, std_dev
);
133 void Print(const std::string
& measurement
,
134 const std::string
& modifier
,
135 const std::string
& trace
,
136 const std::string
& unit
) {
137 if (num_values
>= 20) {
138 perf_test::PrintResultMeanAndError(measurement
,
145 LOG(ERROR
) << "Not enough events for "
146 << measurement
<< modifier
<< " " << trace
;
155 // This function checks how smooth the data in |data| is.
156 // It computes the average error of deltas and the average delta.
157 // If data[x] == x * A + B, then this function returns zero.
158 // The unit is milliseconds.
159 static MeanAndError
AnalyzeJitter(const std::vector
<TimeData
>& data
) {
160 CHECK_GT(data
.size(), 1UL);
161 VLOG(0) << "Jitter analyzis on " << data
.size() << " values.";
162 std::vector
<double> deltas
;
164 for (size_t i
= 1; i
< data
.size(); i
++) {
165 double delta
= (data
[i
].render_time
-
166 data
[i
- 1].render_time
).InMillisecondsF();
167 deltas
.push_back(delta
);
170 double mean
= sum
/ deltas
.size();
171 for (size_t i
= 0; i
< deltas
.size(); i
++) {
172 deltas
[i
] = fabs(mean
- deltas
[i
]);
175 return MeanAndError(deltas
);
178 // An in-process Cast receiver that examines the audio/video frames being
179 // received and logs some data about each received audio/video frame.
180 class TestPatternReceiver
: public media::cast::InProcessReceiver
{
182 explicit TestPatternReceiver(
183 const scoped_refptr
<media::cast::CastEnvironment
>& cast_environment
,
184 const net::IPEndPoint
& local_end_point
)
185 : InProcessReceiver(cast_environment
,
188 media::cast::GetDefaultAudioReceiverConfig(),
189 media::cast::GetDefaultVideoReceiverConfig()) {
192 typedef std::map
<uint16
, base::TimeTicks
> TimeMap
;
194 // Build a map from frame ID (as encoded in the audio and video data)
195 // to the rtp timestamp for that frame. Note that there will be multiple
196 // audio frames which all have the same frame ID. When that happens we
197 // want the minimum rtp timestamp, because that audio frame is supposed
198 // to play at the same time that the corresponding image is presented.
199 void MapFrameTimes(const std::vector
<TimeData
>& events
, TimeMap
* map
) {
200 for (size_t i
= kSkipEvents
; i
< events
.size(); i
++) {
201 base::TimeTicks
& frame_tick
= (*map
)[events
[i
].frame_no
];
202 if (frame_tick
.is_null()) {
203 frame_tick
= events
[i
].render_time
;
205 frame_tick
= std::min(events
[i
].render_time
, frame_tick
);
210 void Analyze(const std::string
& name
, const std::string
& modifier
) {
211 // First, find the minimum rtp timestamp for each audio and video frame.
212 // Note that the data encoded in the audio stream contains video frame
213 // numbers. So in a 30-fps video stream, there will be 1/30s of "1", then
214 // 1/30s of "2", etc.
215 TimeMap audio_frame_times
, video_frame_times
;
216 MapFrameTimes(audio_events_
, &audio_frame_times
);
217 MapFrameTimes(video_events_
, &video_frame_times
);
218 std::vector
<double> deltas
;
219 for (TimeMap::const_iterator i
= audio_frame_times
.begin();
220 i
!= audio_frame_times
.end();
222 TimeMap::const_iterator j
= video_frame_times
.find(i
->first
);
223 if (j
!= video_frame_times
.end()) {
224 deltas
.push_back((i
->second
- j
->second
).InMillisecondsF());
228 // Close to zero is better. (can be negative)
229 MeanAndError(deltas
).Print(name
, modifier
, "av_sync", "ms");
231 AnalyzeJitter(audio_events_
).Print(name
, modifier
, "audio_jitter", "ms");
233 AnalyzeJitter(video_events_
).Print(name
, modifier
, "video_jitter", "ms");
237 // Invoked by InProcessReceiver for each received audio frame.
238 void OnAudioFrame(scoped_ptr
<media::AudioBus
> audio_frame
,
239 const base::TimeTicks
& playout_time
,
240 bool is_continuous
) override
{
241 CHECK(cast_env()->CurrentlyOn(media::cast::CastEnvironment::MAIN
));
243 if (audio_frame
->frames() <= 0) {
244 NOTREACHED() << "OnAudioFrame called with no samples?!?";
248 // Note: This is the number of the video frame that this audio belongs to.
250 if (media::cast::DecodeTimestamp(audio_frame
->channel(0),
251 audio_frame
->frames(),
253 audio_events_
.push_back(TimeData(frame_no
, playout_time
));
255 VLOG(0) << "Failed to decode audio timestamp!";
259 void OnVideoFrame(const scoped_refptr
<media::VideoFrame
>& video_frame
,
260 const base::TimeTicks
& render_time
,
261 bool is_continuous
) override
{
262 CHECK(cast_env()->CurrentlyOn(media::cast::CastEnvironment::MAIN
));
264 TRACE_EVENT_INSTANT1(
265 "mirroring", "TestPatternReceiver::OnVideoFrame",
266 TRACE_EVENT_SCOPE_THREAD
,
267 "render_time", render_time
.ToInternalValue());
270 if (media::cast::test::DecodeBarcode(video_frame
, &frame_no
)) {
271 video_events_
.push_back(TimeData(frame_no
, render_time
));
273 VLOG(0) << "Failed to decode barcode!";
277 std::vector
<TimeData
> audio_events_
;
278 std::vector
<TimeData
> video_events_
;
280 DISALLOW_COPY_AND_ASSIGN(TestPatternReceiver
);
283 class CastV2PerformanceTest
284 : public ExtensionApiTest
,
285 public testing::WithParamInterface
<int> {
287 CastV2PerformanceTest() {}
289 bool HasFlag(TestFlags flag
) const {
290 return (GetParam() & flag
) == flag
;
293 bool IsGpuAvailable() const {
294 return base::CommandLine::ForCurrentProcess()->HasSwitch("enable-gpu");
297 std::string
GetSuffixForTestFlags() {
299 if (HasFlag(kUseGpu
))
301 if (HasFlag(kDisableVsync
))
302 suffix
+= "_novsync";
303 if (HasFlag(kSmallWindow
))
311 if (HasFlag(kProxyWifi
))
313 if (HasFlag(kProxyBad
))
315 if (HasFlag(kSlowClock
))
317 if (HasFlag(kFastClock
))
333 net::IPEndPoint
GetFreeLocalPort() {
334 // Determine a unused UDP port for the in-process receiver to listen on.
335 // Method: Bind a UDP socket on port 0, and then check which port the
336 // operating system assigned to it.
337 net::IPAddressNumber localhost
;
338 localhost
.push_back(127);
339 localhost
.push_back(0);
340 localhost
.push_back(0);
341 localhost
.push_back(1);
342 scoped_ptr
<net::UDPServerSocket
> receive_socket(
343 new net::UDPServerSocket(NULL
, net::NetLog::Source()));
344 receive_socket
->AllowAddressReuse();
345 CHECK_EQ(net::OK
, receive_socket
->Listen(net::IPEndPoint(localhost
, 0)));
346 net::IPEndPoint endpoint
;
347 CHECK_EQ(net::OK
, receive_socket
->GetLocalAddress(&endpoint
));
351 void SetUp() override
{
353 ExtensionApiTest::SetUp();
356 void SetUpCommandLine(base::CommandLine
* command_line
) override
{
357 // Some of the tests may launch http requests through JSON or AJAX
358 // which causes a security error (cross domain request) when the page
359 // is loaded from the local file system ( file:// ). The following switch
361 command_line
->AppendSwitch(switches::kAllowFileAccessFromFiles
);
363 if (HasFlag(kSmallWindow
)) {
364 command_line
->AppendSwitchASCII(switches::kWindowSize
, "800,600");
366 command_line
->AppendSwitchASCII(switches::kWindowSize
, "2000,1500");
369 if (!HasFlag(kUseGpu
))
370 command_line
->AppendSwitch(switches::kDisableGpu
);
372 if (HasFlag(kDisableVsync
))
373 command_line
->AppendSwitch(switches::kDisableGpuVsync
);
375 command_line
->AppendSwitchASCII(
376 extensions::switches::kWhitelistedExtensionID
,
378 ExtensionApiTest::SetUpCommandLine(command_line
);
381 void GetTraceEvents(trace_analyzer::TraceAnalyzer
* analyzer
,
382 const std::string
& event_name
,
383 trace_analyzer::TraceEventVector
* events
) {
384 trace_analyzer::Query query
=
385 trace_analyzer::Query::EventNameIs(event_name
) &&
386 (trace_analyzer::Query::EventPhaseIs(TRACE_EVENT_PHASE_BEGIN
) ||
387 trace_analyzer::Query::EventPhaseIs(TRACE_EVENT_PHASE_ASYNC_BEGIN
) ||
388 trace_analyzer::Query::EventPhaseIs(TRACE_EVENT_PHASE_FLOW_BEGIN
) ||
389 trace_analyzer::Query::EventPhaseIs(TRACE_EVENT_PHASE_INSTANT
) ||
390 trace_analyzer::Query::EventPhaseIs(TRACE_EVENT_PHASE_COMPLETE
));
391 analyzer
->FindEvents(query
, events
);
394 // The key contains the name of the argument and the argument.
395 typedef std::pair
<std::string
, double> EventMapKey
;
396 typedef std::map
<EventMapKey
, const trace_analyzer::TraceEvent
*> EventMap
;
398 // Make events findable by their arguments, for instance, if an
399 // event has a "timestamp": 238724 argument, the map will contain
400 // pair<"timestamp", 238724> -> &event. All arguments are indexed.
401 void IndexEvents(trace_analyzer::TraceAnalyzer
* analyzer
,
402 const std::string
& event_name
,
403 EventMap
* event_map
) {
404 trace_analyzer::TraceEventVector events
;
405 GetTraceEvents(analyzer
, event_name
, &events
);
406 for (size_t i
= 0; i
< events
.size(); i
++) {
407 std::map
<std::string
, double>::const_iterator j
;
408 for (j
= events
[i
]->arg_numbers
.begin();
409 j
!= events
[i
]->arg_numbers
.end();
411 (*event_map
)[*j
] = events
[i
];
416 // Look up an event in |event_map|. The return event will have the same
417 // value for the argument |key_name| as |prev_event|. Note that if
418 // the |key_name| is "time_delta", then we allow some fuzzy logic since
419 // the time deltas are truncated to milliseconds in the code.
420 const trace_analyzer::TraceEvent
* FindNextEvent(
421 const EventMap
& event_map
,
422 std::vector
<const trace_analyzer::TraceEvent
*> prev_events
,
423 std::string key_name
) {
425 for (size_t i
= prev_events
.size(); i
;) {
427 std::map
<std::string
, double>::const_iterator j
=
428 prev_events
[i
]->arg_numbers
.find(key_name
);
429 if (j
!= prev_events
[i
]->arg_numbers
.end()) {
434 EventMap::const_iterator i
= event_map
.lower_bound(key
);
435 if (i
== event_map
.end())
437 if (i
->first
.second
== key
.second
)
439 if (key_name
!= "time_delta")
441 if (fabs(i
->first
.second
- key
.second
) < 1000)
443 if (i
== event_map
.begin())
446 if (fabs(i
->first
.second
- key
.second
) < 1000)
451 // Given a vector of vector of data, extract the difference between
452 // two columns (|col_a| and |col_b|) and output the result as a
453 // performance metric.
454 void OutputMeasurement(const std::string
& test_name
,
455 const std::vector
<std::vector
<double> > data
,
456 const std::string
& measurement_name
,
459 std::vector
<double> tmp
;
460 for (size_t i
= 0; i
< data
.size(); i
++) {
461 tmp
.push_back((data
[i
][col_b
] - data
[i
][col_a
]) / 1000.0);
463 return MeanAndError(tmp
).Print(test_name
,
464 GetSuffixForTestFlags(),
469 // Analyzing latency is hard, because there is no unifying identifier for
470 // frames throughout the code. At first, we have a capture timestamp, which
471 // gets converted to a time delta, then back to a timestamp. Once it enters
472 // the cast library it gets converted to an rtp_timestamp, and when it leaves
473 // the cast library, all we have is the render_time.
475 // To be able to follow the frame throughout all this, we insert TRACE
476 // calls that tracks each conversion as it happens. Then we extract all
477 // these events and link them together.
478 void AnalyzeLatency(const std::string
& test_name
,
479 trace_analyzer::TraceAnalyzer
* analyzer
) {
480 EventMap onbuffer
, sink
, inserted
, encoded
, transmitted
, decoded
, done
;
481 IndexEvents(analyzer
, "OnBufferReceived", &onbuffer
);
482 IndexEvents(analyzer
, "MediaStreamVideoSink::OnVideoFrame", &sink
);
483 IndexEvents(analyzer
, "InsertRawVideoFrame", &inserted
);
484 IndexEvents(analyzer
, "VideoFrameEncoded", &encoded
);
485 IndexEvents(analyzer
, "PullEncodedVideoFrame", &transmitted
);
486 IndexEvents(analyzer
, "FrameDecoded", &decoded
);
487 IndexEvents(analyzer
, "TestPatternReceiver::OnVideoFrame", &done
);
488 std::vector
<std::pair
<EventMap
*, std::string
> > event_maps
;
489 event_maps
.push_back(std::make_pair(&onbuffer
, "timestamp"));
490 event_maps
.push_back(std::make_pair(&sink
, "time_delta"));
491 event_maps
.push_back(std::make_pair(&inserted
, "timestamp"));
492 event_maps
.push_back(std::make_pair(&encoded
, "rtp_timestamp"));
493 event_maps
.push_back(std::make_pair(&transmitted
, "rtp_timestamp"));
494 event_maps
.push_back(std::make_pair(&decoded
, "rtp_timestamp"));
495 event_maps
.push_back(std::make_pair(&done
, "render_time"));
497 trace_analyzer::TraceEventVector capture_events
;
498 GetTraceEvents(analyzer
, "Capture" , &capture_events
);
499 EXPECT_GT(capture_events
.size(), 0UL);
500 std::vector
<std::vector
<double> > traced_frames
;
501 for (size_t i
= kSkipEvents
; i
< capture_events
.size(); i
++) {
502 std::vector
<double> times
;
503 const trace_analyzer::TraceEvent
*event
= capture_events
[i
];
504 times
.push_back(event
->timestamp
); // begin capture
505 event
= event
->other_event
;
509 times
.push_back(event
->timestamp
); // end capture (with timestamp)
510 std::vector
<const trace_analyzer::TraceEvent
*> prev_events
;
511 prev_events
.push_back(event
);
512 for (size_t j
= 0; j
< event_maps
.size(); j
++) {
513 event
= FindNextEvent(*event_maps
[j
].first
,
515 event_maps
[j
].second
);
519 prev_events
.push_back(event
);
520 times
.push_back(event
->timestamp
);
523 // Successfully traced frame from beginning to end
524 traced_frames
.push_back(times
);
538 // Lower is better for all of these.
539 OutputMeasurement(test_name
, traced_frames
, "total_latency", 0, 8);
540 OutputMeasurement(test_name
, traced_frames
, "capture_duration", 0, 1);
541 OutputMeasurement(test_name
, traced_frames
, "send_to_renderer", 1, 3);
542 OutputMeasurement(test_name
, traced_frames
, "encode", 3, 5);
543 OutputMeasurement(test_name
, traced_frames
, "transmit", 5, 6);
544 OutputMeasurement(test_name
, traced_frames
, "decode", 6, 7);
545 OutputMeasurement(test_name
, traced_frames
, "cast_latency", 3, 8);
548 MeanAndError
AnalyzeTraceDistance(trace_analyzer::TraceAnalyzer
* analyzer
,
549 const std::string
& event_name
) {
550 trace_analyzer::TraceEventVector events
;
551 GetTraceEvents(analyzer
, event_name
, &events
);
553 std::vector
<double> deltas
;
554 for (size_t i
= kSkipEvents
+ 1; i
< events
.size(); ++i
) {
555 double delta_micros
= events
[i
]->timestamp
- events
[i
- 1]->timestamp
;
556 deltas
.push_back(delta_micros
/ 1000.0);
558 return MeanAndError(deltas
);
561 void RunTest(const std::string
& test_name
) {
562 if (HasFlag(kUseGpu
) && !IsGpuAvailable()) {
564 "Test skipped: requires gpu. Pass --enable-gpu on the command "
565 "line if use of GPU is desired.";
570 (HasFlag(k24fps
) ? 1 : 0) +
571 (HasFlag(k30fps
) ? 1 : 0) +
572 (HasFlag(k60fps
) ? 1 : 0));
574 net::IPEndPoint receiver_end_point
= GetFreeLocalPort();
576 // Start the in-process receiver that examines audio/video for the expected
578 base::TimeDelta delta
= base::TimeDelta::FromSeconds(0);
579 if (HasFlag(kFastClock
)) {
580 delta
= base::TimeDelta::FromSeconds(10);
582 if (HasFlag(kSlowClock
)) {
583 delta
= base::TimeDelta::FromSeconds(-10);
585 scoped_refptr
<media::cast::StandaloneCastEnvironment
> cast_environment(
586 new SkewedCastEnvironment(delta
));
587 TestPatternReceiver
* const receiver
=
588 new TestPatternReceiver(cast_environment
, receiver_end_point
);
591 scoped_ptr
<media::cast::test::UDPProxy
> udp_proxy
;
592 if (HasFlag(kProxyWifi
) || HasFlag(kProxyBad
)) {
593 net::IPEndPoint proxy_end_point
= GetFreeLocalPort();
594 if (HasFlag(kProxyWifi
)) {
595 udp_proxy
= media::cast::test::UDPProxy::Create(
598 media::cast::test::WifiNetwork().Pass(),
599 media::cast::test::WifiNetwork().Pass(),
601 } else if (HasFlag(kProxyBad
)) {
602 udp_proxy
= media::cast::test::UDPProxy::Create(
605 media::cast::test::BadNetwork().Pass(),
606 media::cast::test::BadNetwork().Pass(),
609 receiver_end_point
= proxy_end_point
;
612 std::string json_events
;
613 ASSERT_TRUE(tracing::BeginTracing(
614 "test_fps,mirroring,gpu.capture,cast_perf_test"));
615 const std::string page_url
= base::StringPrintf(
616 "performance%d.html?port=%d",
618 receiver_end_point
.port());
619 ASSERT_TRUE(RunExtensionSubtest("cast_streaming", page_url
)) << message_
;
620 ASSERT_TRUE(tracing::EndTracing(&json_events
));
623 // Stop all threads, removes the need for synchronization when analyzing
625 cast_environment
->Shutdown();
626 scoped_ptr
<trace_analyzer::TraceAnalyzer
> analyzer
;
627 analyzer
.reset(trace_analyzer::TraceAnalyzer::Create(json_events
));
628 analyzer
->AssociateAsyncBeginEndEvents();
630 MeanAndError frame_data
= AnalyzeTraceDistance(
632 "OnSwapCompositorFrame");
634 EXPECT_GT(frame_data
.num_values
, 0UL);
636 frame_data
.Print(test_name
,
637 GetSuffixForTestFlags(),
638 "time_between_frames",
641 // This prints out the average time between capture events.
642 // As the capture frame rate is capped at 30fps, this score
643 // cannot get any better than (lower) 33.33 ms.
644 MeanAndError capture_data
= AnalyzeTraceDistance(analyzer
.get(), "Capture");
646 capture_data
.Print(test_name
,
647 GetSuffixForTestFlags(),
648 "time_between_captures",
651 receiver
->Analyze(test_name
, GetSuffixForTestFlags());
653 AnalyzeLatency(test_name
, analyzer
.get());
659 IN_PROC_BROWSER_TEST_P(CastV2PerformanceTest
, Performance
) {
660 RunTest("CastV2Performance");
663 // Note: First argument is optional and intentionally left blank.
664 // (it's a prefix for the generated test cases)
665 INSTANTIATE_TEST_CASE_P(
667 CastV2PerformanceTest
,
672 kUseGpu
| k24fps
| kDisableVsync
,
673 kUseGpu
| k30fps
| kProxyWifi
,
674 kUseGpu
| k30fps
| kProxyBad
,
675 kUseGpu
| k30fps
| kSlowClock
,
676 kUseGpu
| k30fps
| kFastClock
));