1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "components/metrics/metrics_log.h"
9 #include "base/base64.h"
10 #include "base/basictypes.h"
11 #include "base/memory/scoped_vector.h"
12 #include "base/metrics/bucket_ranges.h"
13 #include "base/metrics/sample_vector.h"
14 #include "base/prefs/pref_service.h"
15 #include "base/prefs/testing_pref_service.h"
16 #include "base/strings/string_number_conversions.h"
17 #include "base/time/time.h"
18 #include "components/metrics/metrics_pref_names.h"
19 #include "components/metrics/metrics_state_manager.h"
20 #include "components/metrics/proto/chrome_user_metrics_extension.pb.h"
21 #include "components/metrics/test_metrics_provider.h"
22 #include "components/metrics/test_metrics_service_client.h"
23 #include "components/variations/active_field_trials.h"
24 #include "testing/gtest/include/gtest/gtest.h"
30 const char kClientId
[] = "bogus client ID";
31 const int64 kInstallDate
= 1373051956;
32 const int64 kInstallDateExpected
= 1373050800; // Computed from kInstallDate.
33 const int64 kEnabledDate
= 1373001211;
34 const int64 kEnabledDateExpected
= 1373000400; // Computed from kEnabledDate.
35 const int kSessionId
= 127;
36 const variations::ActiveGroupId kFieldTrialIds
[] = {
41 const variations::ActiveGroupId kSyntheticTrials
[] = {
46 class TestMetricsLog
: public MetricsLog
{
48 TestMetricsLog(const std::string
& client_id
,
51 MetricsServiceClient
* client
,
52 TestingPrefServiceSimple
* prefs
)
53 : MetricsLog(client_id
, session_id
, log_type
, client
, prefs
),
58 ~TestMetricsLog() override
{}
60 const ChromeUserMetricsExtension
& uma_proto() const {
61 return *MetricsLog::uma_proto();
64 const SystemProfileProto
& system_profile() const {
65 return uma_proto().system_profile();
70 prefs_
->SetString(prefs::kMetricsReportingEnabledTimestamp
,
71 base::Int64ToString(kEnabledDate
));
74 void GetFieldTrialIds(
75 std::vector
<variations::ActiveGroupId
>* field_trial_ids
) const override
{
76 ASSERT_TRUE(field_trial_ids
->empty());
78 for (size_t i
= 0; i
< arraysize(kFieldTrialIds
); ++i
) {
79 field_trial_ids
->push_back(kFieldTrialIds
[i
]);
83 // Weak pointer to the PrefsService used by this log.
84 TestingPrefServiceSimple
* prefs_
;
86 DISALLOW_COPY_AND_ASSIGN(TestMetricsLog
);
91 class MetricsLogTest
: public testing::Test
{
94 MetricsLog::RegisterPrefs(prefs_
.registry());
95 MetricsStateManager::RegisterPrefs(prefs_
.registry());
98 ~MetricsLogTest() override
{}
101 // Check that the values in |system_values| correspond to the test data
102 // defined at the top of this file.
103 void CheckSystemProfile(const SystemProfileProto
& system_profile
) {
104 EXPECT_EQ(kInstallDateExpected
, system_profile
.install_date());
105 EXPECT_EQ(kEnabledDateExpected
, system_profile
.uma_enabled_date());
107 ASSERT_EQ(arraysize(kFieldTrialIds
) + arraysize(kSyntheticTrials
),
108 static_cast<size_t>(system_profile
.field_trial_size()));
109 for (size_t i
= 0; i
< arraysize(kFieldTrialIds
); ++i
) {
110 const SystemProfileProto::FieldTrial
& field_trial
=
111 system_profile
.field_trial(i
);
112 EXPECT_EQ(kFieldTrialIds
[i
].name
, field_trial
.name_id());
113 EXPECT_EQ(kFieldTrialIds
[i
].group
, field_trial
.group_id());
115 // Verify the right data is present for the synthetic trials.
116 for (size_t i
= 0; i
< arraysize(kSyntheticTrials
); ++i
) {
117 const SystemProfileProto::FieldTrial
& field_trial
=
118 system_profile
.field_trial(i
+ arraysize(kFieldTrialIds
));
119 EXPECT_EQ(kSyntheticTrials
[i
].name
, field_trial
.name_id());
120 EXPECT_EQ(kSyntheticTrials
[i
].group
, field_trial
.group_id());
123 EXPECT_EQ(TestMetricsServiceClient::kBrandForTesting
,
124 system_profile
.brand_code());
126 const SystemProfileProto::Hardware
& hardware
=
127 system_profile
.hardware();
129 EXPECT_TRUE(hardware
.has_cpu());
130 EXPECT_TRUE(hardware
.cpu().has_vendor_name());
131 EXPECT_TRUE(hardware
.cpu().has_signature());
132 EXPECT_TRUE(hardware
.cpu().has_num_cores());
134 // TODO(isherman): Verify other data written into the protobuf as a result
139 TestingPrefServiceSimple prefs_
;
142 DISALLOW_COPY_AND_ASSIGN(MetricsLogTest
);
145 TEST_F(MetricsLogTest
, LogType
) {
146 TestMetricsServiceClient client
;
147 TestingPrefServiceSimple prefs
;
149 MetricsLog
log1("id", 0, MetricsLog::ONGOING_LOG
, &client
, &prefs
);
150 EXPECT_EQ(MetricsLog::ONGOING_LOG
, log1
.log_type());
152 MetricsLog
log2("id", 0, MetricsLog::INITIAL_STABILITY_LOG
, &client
, &prefs
);
153 EXPECT_EQ(MetricsLog::INITIAL_STABILITY_LOG
, log2
.log_type());
156 TEST_F(MetricsLogTest
, EmptyRecord
) {
157 TestMetricsServiceClient client
;
158 client
.set_version_string("bogus version");
159 TestingPrefServiceSimple prefs
;
160 MetricsLog
log("totally bogus client ID", 137, MetricsLog::ONGOING_LOG
,
165 log
.GetEncodedLog(&encoded
);
167 // A couple of fields are hard to mock, so these will be copied over directly
168 // for the expected output.
169 ChromeUserMetricsExtension parsed
;
170 ASSERT_TRUE(parsed
.ParseFromString(encoded
));
172 ChromeUserMetricsExtension expected
;
173 expected
.set_client_id(5217101509553811875); // Hashed bogus client ID
174 expected
.set_session_id(137);
175 expected
.mutable_system_profile()->set_build_timestamp(
176 parsed
.system_profile().build_timestamp());
177 expected
.mutable_system_profile()->set_app_version("bogus version");
178 expected
.mutable_system_profile()->set_channel(client
.GetChannel());
180 EXPECT_EQ(expected
.SerializeAsString(), encoded
);
183 TEST_F(MetricsLogTest
, HistogramBucketFields
) {
184 // Create buckets: 1-5, 5-7, 7-8, 8-9, 9-10, 10-11, 11-12.
185 base::BucketRanges
ranges(8);
186 ranges
.set_range(0, 1);
187 ranges
.set_range(1, 5);
188 ranges
.set_range(2, 7);
189 ranges
.set_range(3, 8);
190 ranges
.set_range(4, 9);
191 ranges
.set_range(5, 10);
192 ranges
.set_range(6, 11);
193 ranges
.set_range(7, 12);
195 base::SampleVector
samples(&ranges
);
196 samples
.Accumulate(3, 1); // Bucket 1-5.
197 samples
.Accumulate(6, 1); // Bucket 5-7.
198 samples
.Accumulate(8, 1); // Bucket 8-9. (7-8 skipped)
199 samples
.Accumulate(10, 1); // Bucket 10-11. (9-10 skipped)
200 samples
.Accumulate(11, 1); // Bucket 11-12.
202 TestMetricsServiceClient client
;
203 TestingPrefServiceSimple prefs
;
205 kClientId
, kSessionId
, MetricsLog::ONGOING_LOG
, &client
, &prefs_
);
206 log
.RecordHistogramDelta("Test", samples
);
208 const ChromeUserMetricsExtension
& uma_proto
= log
.uma_proto();
209 const HistogramEventProto
& histogram_proto
=
210 uma_proto
.histogram_event(uma_proto
.histogram_event_size() - 1);
212 // Buckets with samples: 1-5, 5-7, 8-9, 10-11, 11-12.
213 // Should become: 1-/, 5-7, /-9, 10-/, /-12.
214 ASSERT_EQ(5, histogram_proto
.bucket_size());
216 // 1-5 becomes 1-/ (max is same as next min).
217 EXPECT_TRUE(histogram_proto
.bucket(0).has_min());
218 EXPECT_FALSE(histogram_proto
.bucket(0).has_max());
219 EXPECT_EQ(1, histogram_proto
.bucket(0).min());
221 // 5-7 stays 5-7 (no optimization possible).
222 EXPECT_TRUE(histogram_proto
.bucket(1).has_min());
223 EXPECT_TRUE(histogram_proto
.bucket(1).has_max());
224 EXPECT_EQ(5, histogram_proto
.bucket(1).min());
225 EXPECT_EQ(7, histogram_proto
.bucket(1).max());
227 // 8-9 becomes /-9 (min is same as max - 1).
228 EXPECT_FALSE(histogram_proto
.bucket(2).has_min());
229 EXPECT_TRUE(histogram_proto
.bucket(2).has_max());
230 EXPECT_EQ(9, histogram_proto
.bucket(2).max());
232 // 10-11 becomes 10-/ (both optimizations apply, omit max is prioritized).
233 EXPECT_TRUE(histogram_proto
.bucket(3).has_min());
234 EXPECT_FALSE(histogram_proto
.bucket(3).has_max());
235 EXPECT_EQ(10, histogram_proto
.bucket(3).min());
237 // 11-12 becomes /-12 (last record must keep max, min is same as max - 1).
238 EXPECT_FALSE(histogram_proto
.bucket(4).has_min());
239 EXPECT_TRUE(histogram_proto
.bucket(4).has_max());
240 EXPECT_EQ(12, histogram_proto
.bucket(4).max());
243 TEST_F(MetricsLogTest
, RecordEnvironment
) {
244 TestMetricsServiceClient client
;
246 kClientId
, kSessionId
, MetricsLog::ONGOING_LOG
, &client
, &prefs_
);
248 std::vector
<variations::ActiveGroupId
> synthetic_trials
;
249 // Add two synthetic trials.
250 synthetic_trials
.push_back(kSyntheticTrials
[0]);
251 synthetic_trials
.push_back(kSyntheticTrials
[1]);
253 log
.RecordEnvironment(std::vector
<MetricsProvider
*>(),
255 kInstallDate
, kEnabledDate
);
256 // Check that the system profile on the log has the correct values set.
257 CheckSystemProfile(log
.system_profile());
259 // Check that the system profile has also been written to prefs.
260 const std::string base64_system_profile
=
261 prefs_
.GetString(prefs::kStabilitySavedSystemProfile
);
262 EXPECT_FALSE(base64_system_profile
.empty());
263 std::string serialied_system_profile
;
264 EXPECT_TRUE(base::Base64Decode(base64_system_profile
,
265 &serialied_system_profile
));
266 SystemProfileProto decoded_system_profile
;
267 EXPECT_TRUE(decoded_system_profile
.ParseFromString(serialied_system_profile
));
268 CheckSystemProfile(decoded_system_profile
);
271 TEST_F(MetricsLogTest
, LoadSavedEnvironmentFromPrefs
) {
272 const char* kSystemProfilePref
= prefs::kStabilitySavedSystemProfile
;
273 const char* kSystemProfileHashPref
=
274 prefs::kStabilitySavedSystemProfileHash
;
276 TestMetricsServiceClient client
;
278 // The pref value is empty, so loading it from prefs should fail.
281 kClientId
, kSessionId
, MetricsLog::ONGOING_LOG
, &client
, &prefs_
);
282 EXPECT_FALSE(log
.LoadSavedEnvironmentFromPrefs());
285 // Do a RecordEnvironment() call and check whether the pref is recorded.
288 kClientId
, kSessionId
, MetricsLog::ONGOING_LOG
, &client
, &prefs_
);
289 log
.RecordEnvironment(std::vector
<MetricsProvider
*>(),
290 std::vector
<variations::ActiveGroupId
>(),
291 kInstallDate
, kEnabledDate
);
292 EXPECT_FALSE(prefs_
.GetString(kSystemProfilePref
).empty());
293 EXPECT_FALSE(prefs_
.GetString(kSystemProfileHashPref
).empty());
298 kClientId
, kSessionId
, MetricsLog::ONGOING_LOG
, &client
, &prefs_
);
299 EXPECT_TRUE(log
.LoadSavedEnvironmentFromPrefs());
300 // Check some values in the system profile.
301 EXPECT_EQ(kInstallDateExpected
, log
.system_profile().install_date());
302 EXPECT_EQ(kEnabledDateExpected
, log
.system_profile().uma_enabled_date());
303 // Ensure that the call cleared the prefs.
304 EXPECT_TRUE(prefs_
.GetString(kSystemProfilePref
).empty());
305 EXPECT_TRUE(prefs_
.GetString(kSystemProfileHashPref
).empty());
308 // Ensure that a non-matching hash results in the pref being invalid.
311 kClientId
, kSessionId
, MetricsLog::ONGOING_LOG
, &client
, &prefs_
);
312 // Call RecordEnvironment() to record the pref again.
313 log
.RecordEnvironment(std::vector
<MetricsProvider
*>(),
314 std::vector
<variations::ActiveGroupId
>(),
315 kInstallDate
, kEnabledDate
);
319 // Set the hash to a bad value.
320 prefs_
.SetString(kSystemProfileHashPref
, "deadbeef");
322 kClientId
, kSessionId
, MetricsLog::ONGOING_LOG
, &client
, &prefs_
);
323 EXPECT_FALSE(log
.LoadSavedEnvironmentFromPrefs());
324 // Ensure that the prefs are cleared, even if the call failed.
325 EXPECT_TRUE(prefs_
.GetString(kSystemProfilePref
).empty());
326 EXPECT_TRUE(prefs_
.GetString(kSystemProfileHashPref
).empty());
330 TEST_F(MetricsLogTest
, InitialLogStabilityMetrics
) {
331 TestMetricsServiceClient client
;
332 TestMetricsLog
log(kClientId
,
334 MetricsLog::INITIAL_STABILITY_LOG
,
337 TestMetricsProvider
* test_provider
= new TestMetricsProvider();
338 ScopedVector
<MetricsProvider
> metrics_providers
;
339 metrics_providers
.push_back(test_provider
);
340 log
.RecordEnvironment(metrics_providers
.get(),
341 std::vector
<variations::ActiveGroupId
>(), kInstallDate
,
343 log
.RecordStabilityMetrics(metrics_providers
.get(), base::TimeDelta(),
345 const SystemProfileProto_Stability
& stability
=
346 log
.system_profile().stability();
348 EXPECT_TRUE(stability
.has_launch_count());
349 EXPECT_TRUE(stability
.has_crash_count());
350 // Initial log metrics:
351 EXPECT_TRUE(stability
.has_incomplete_shutdown_count());
352 EXPECT_TRUE(stability
.has_breakpad_registration_success_count());
353 EXPECT_TRUE(stability
.has_breakpad_registration_failure_count());
354 EXPECT_TRUE(stability
.has_debugger_present_count());
355 EXPECT_TRUE(stability
.has_debugger_not_present_count());
357 // The test provider should have been called upon to provide initial
358 // stability and regular stability metrics.
359 EXPECT_TRUE(test_provider
->provide_initial_stability_metrics_called());
360 EXPECT_TRUE(test_provider
->provide_stability_metrics_called());
363 TEST_F(MetricsLogTest
, OngoingLogStabilityMetrics
) {
364 TestMetricsServiceClient client
;
366 kClientId
, kSessionId
, MetricsLog::ONGOING_LOG
, &client
, &prefs_
);
367 TestMetricsProvider
* test_provider
= new TestMetricsProvider();
368 ScopedVector
<MetricsProvider
> metrics_providers
;
369 metrics_providers
.push_back(test_provider
);
370 log
.RecordEnvironment(metrics_providers
.get(),
371 std::vector
<variations::ActiveGroupId
>(), kInstallDate
,
373 log
.RecordStabilityMetrics(metrics_providers
.get(), base::TimeDelta(),
375 const SystemProfileProto_Stability
& stability
=
376 log
.system_profile().stability();
378 EXPECT_TRUE(stability
.has_launch_count());
379 EXPECT_TRUE(stability
.has_crash_count());
380 // Initial log metrics:
381 EXPECT_FALSE(stability
.has_incomplete_shutdown_count());
382 EXPECT_FALSE(stability
.has_breakpad_registration_success_count());
383 EXPECT_FALSE(stability
.has_breakpad_registration_failure_count());
384 EXPECT_FALSE(stability
.has_debugger_present_count());
385 EXPECT_FALSE(stability
.has_debugger_not_present_count());
387 // The test provider should have been called upon to provide regular but not
388 // initial stability metrics.
389 EXPECT_FALSE(test_provider
->provide_initial_stability_metrics_called());
390 EXPECT_TRUE(test_provider
->provide_stability_metrics_called());
393 TEST_F(MetricsLogTest
, ChromeChannelWrittenToProtobuf
) {
394 TestMetricsServiceClient client
;
396 kClientId
, kSessionId
, MetricsLog::ONGOING_LOG
, &client
, &prefs_
);
397 EXPECT_TRUE(log
.uma_proto().system_profile().has_channel());
400 TEST_F(MetricsLogTest
, ProductNotSetIfDefault
) {
401 TestMetricsServiceClient client
;
402 EXPECT_EQ(ChromeUserMetricsExtension::CHROME
, client
.GetProduct());
404 kClientId
, kSessionId
, MetricsLog::ONGOING_LOG
, &client
, &prefs_
);
405 // Check that the product isn't set, since it's default and also verify the
406 // default value is indeed equal to Chrome.
407 EXPECT_FALSE(log
.uma_proto().has_product());
408 EXPECT_EQ(ChromeUserMetricsExtension::CHROME
, log
.uma_proto().product());
411 TEST_F(MetricsLogTest
, ProductSetIfNotDefault
) {
412 const int32_t kTestProduct
= 100;
413 EXPECT_NE(ChromeUserMetricsExtension::CHROME
, kTestProduct
);
415 TestMetricsServiceClient client
;
416 client
.set_product(kTestProduct
);
418 kClientId
, kSessionId
, MetricsLog::ONGOING_LOG
, &client
, &prefs_
);
419 // Check that the product is set to |kTestProduct|.
420 EXPECT_TRUE(log
.uma_proto().has_product());
421 EXPECT_EQ(kTestProduct
, log
.uma_proto().product());
424 } // namespace metrics