1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "components/metrics/metrics_log.h"
9 #include "base/base64.h"
10 #include "base/basictypes.h"
11 #include "base/metrics/bucket_ranges.h"
12 #include "base/metrics/sample_vector.h"
13 #include "base/prefs/pref_service.h"
14 #include "base/prefs/testing_pref_service.h"
15 #include "base/strings/string_number_conversions.h"
16 #include "base/time/time.h"
17 #include "components/metrics/metrics_pref_names.h"
18 #include "components/metrics/metrics_state_manager.h"
19 #include "components/metrics/proto/chrome_user_metrics_extension.pb.h"
20 #include "components/metrics/test_metrics_service_client.h"
21 #include "components/variations/active_field_trials.h"
22 #include "testing/gtest/include/gtest/gtest.h"
28 const char kClientId
[] = "bogus client ID";
29 const int64 kInstallDate
= 1373051956;
30 const int64 kInstallDateExpected
= 1373050800; // Computed from kInstallDate.
31 const int64 kEnabledDate
= 1373001211;
32 const int64 kEnabledDateExpected
= 1373000400; // Computed from kEnabledDate.
33 const int kSessionId
= 127;
34 const variations::ActiveGroupId kFieldTrialIds
[] = {
39 const variations::ActiveGroupId kSyntheticTrials
[] = {
44 class TestMetricsLog
: public MetricsLog
{
46 TestMetricsLog(const std::string
& client_id
,
49 MetricsServiceClient
* client
,
50 TestingPrefServiceSimple
* prefs
)
51 : MetricsLog(client_id
, session_id
, log_type
, client
, prefs
),
56 ~TestMetricsLog() override
{}
58 const ChromeUserMetricsExtension
& uma_proto() const {
59 return *MetricsLog::uma_proto();
62 const SystemProfileProto
& system_profile() const {
63 return uma_proto().system_profile();
68 prefs_
->SetString(prefs::kMetricsReportingEnabledTimestamp
,
69 base::Int64ToString(kEnabledDate
));
72 void GetFieldTrialIds(
73 std::vector
<variations::ActiveGroupId
>* field_trial_ids
) const override
{
74 ASSERT_TRUE(field_trial_ids
->empty());
76 for (size_t i
= 0; i
< arraysize(kFieldTrialIds
); ++i
) {
77 field_trial_ids
->push_back(kFieldTrialIds
[i
]);
81 // Weak pointer to the PrefsService used by this log.
82 TestingPrefServiceSimple
* prefs_
;
84 DISALLOW_COPY_AND_ASSIGN(TestMetricsLog
);
89 class MetricsLogTest
: public testing::Test
{
92 MetricsLog::RegisterPrefs(prefs_
.registry());
93 MetricsStateManager::RegisterPrefs(prefs_
.registry());
96 ~MetricsLogTest() override
{}
99 // Check that the values in |system_values| correspond to the test data
100 // defined at the top of this file.
101 void CheckSystemProfile(const SystemProfileProto
& system_profile
) {
102 EXPECT_EQ(kInstallDateExpected
, system_profile
.install_date());
103 EXPECT_EQ(kEnabledDateExpected
, system_profile
.uma_enabled_date());
105 ASSERT_EQ(arraysize(kFieldTrialIds
) + arraysize(kSyntheticTrials
),
106 static_cast<size_t>(system_profile
.field_trial_size()));
107 for (size_t i
= 0; i
< arraysize(kFieldTrialIds
); ++i
) {
108 const SystemProfileProto::FieldTrial
& field_trial
=
109 system_profile
.field_trial(i
);
110 EXPECT_EQ(kFieldTrialIds
[i
].name
, field_trial
.name_id());
111 EXPECT_EQ(kFieldTrialIds
[i
].group
, field_trial
.group_id());
113 // Verify the right data is present for the synthetic trials.
114 for (size_t i
= 0; i
< arraysize(kSyntheticTrials
); ++i
) {
115 const SystemProfileProto::FieldTrial
& field_trial
=
116 system_profile
.field_trial(i
+ arraysize(kFieldTrialIds
));
117 EXPECT_EQ(kSyntheticTrials
[i
].name
, field_trial
.name_id());
118 EXPECT_EQ(kSyntheticTrials
[i
].group
, field_trial
.group_id());
121 EXPECT_EQ(TestMetricsServiceClient::kBrandForTesting
,
122 system_profile
.brand_code());
124 const SystemProfileProto::Hardware
& hardware
=
125 system_profile
.hardware();
127 EXPECT_TRUE(hardware
.has_cpu());
128 EXPECT_TRUE(hardware
.cpu().has_vendor_name());
129 EXPECT_TRUE(hardware
.cpu().has_signature());
130 EXPECT_TRUE(hardware
.cpu().has_num_cores());
132 // TODO(isherman): Verify other data written into the protobuf as a result
137 TestingPrefServiceSimple prefs_
;
140 DISALLOW_COPY_AND_ASSIGN(MetricsLogTest
);
143 TEST_F(MetricsLogTest
, LogType
) {
144 TestMetricsServiceClient client
;
145 TestingPrefServiceSimple prefs
;
147 MetricsLog
log1("id", 0, MetricsLog::ONGOING_LOG
, &client
, &prefs
);
148 EXPECT_EQ(MetricsLog::ONGOING_LOG
, log1
.log_type());
150 MetricsLog
log2("id", 0, MetricsLog::INITIAL_STABILITY_LOG
, &client
, &prefs
);
151 EXPECT_EQ(MetricsLog::INITIAL_STABILITY_LOG
, log2
.log_type());
154 TEST_F(MetricsLogTest
, EmptyRecord
) {
155 TestMetricsServiceClient client
;
156 client
.set_version_string("bogus version");
157 TestingPrefServiceSimple prefs
;
158 MetricsLog
log("totally bogus client ID", 137, MetricsLog::ONGOING_LOG
,
163 log
.GetEncodedLog(&encoded
);
165 // A couple of fields are hard to mock, so these will be copied over directly
166 // for the expected output.
167 ChromeUserMetricsExtension parsed
;
168 ASSERT_TRUE(parsed
.ParseFromString(encoded
));
170 ChromeUserMetricsExtension expected
;
171 expected
.set_client_id(5217101509553811875); // Hashed bogus client ID
172 expected
.set_session_id(137);
173 expected
.mutable_system_profile()->set_build_timestamp(
174 parsed
.system_profile().build_timestamp());
175 expected
.mutable_system_profile()->set_app_version("bogus version");
176 expected
.mutable_system_profile()->set_channel(client
.GetChannel());
178 EXPECT_EQ(expected
.SerializeAsString(), encoded
);
181 TEST_F(MetricsLogTest
, HistogramBucketFields
) {
182 // Create buckets: 1-5, 5-7, 7-8, 8-9, 9-10, 10-11, 11-12.
183 base::BucketRanges
ranges(8);
184 ranges
.set_range(0, 1);
185 ranges
.set_range(1, 5);
186 ranges
.set_range(2, 7);
187 ranges
.set_range(3, 8);
188 ranges
.set_range(4, 9);
189 ranges
.set_range(5, 10);
190 ranges
.set_range(6, 11);
191 ranges
.set_range(7, 12);
193 base::SampleVector
samples(&ranges
);
194 samples
.Accumulate(3, 1); // Bucket 1-5.
195 samples
.Accumulate(6, 1); // Bucket 5-7.
196 samples
.Accumulate(8, 1); // Bucket 8-9. (7-8 skipped)
197 samples
.Accumulate(10, 1); // Bucket 10-11. (9-10 skipped)
198 samples
.Accumulate(11, 1); // Bucket 11-12.
200 TestMetricsServiceClient client
;
201 TestingPrefServiceSimple prefs
;
203 kClientId
, kSessionId
, MetricsLog::ONGOING_LOG
, &client
, &prefs_
);
204 log
.RecordHistogramDelta("Test", samples
);
206 const ChromeUserMetricsExtension
& uma_proto
= log
.uma_proto();
207 const HistogramEventProto
& histogram_proto
=
208 uma_proto
.histogram_event(uma_proto
.histogram_event_size() - 1);
210 // Buckets with samples: 1-5, 5-7, 8-9, 10-11, 11-12.
211 // Should become: 1-/, 5-7, /-9, 10-/, /-12.
212 ASSERT_EQ(5, histogram_proto
.bucket_size());
214 // 1-5 becomes 1-/ (max is same as next min).
215 EXPECT_TRUE(histogram_proto
.bucket(0).has_min());
216 EXPECT_FALSE(histogram_proto
.bucket(0).has_max());
217 EXPECT_EQ(1, histogram_proto
.bucket(0).min());
219 // 5-7 stays 5-7 (no optimization possible).
220 EXPECT_TRUE(histogram_proto
.bucket(1).has_min());
221 EXPECT_TRUE(histogram_proto
.bucket(1).has_max());
222 EXPECT_EQ(5, histogram_proto
.bucket(1).min());
223 EXPECT_EQ(7, histogram_proto
.bucket(1).max());
225 // 8-9 becomes /-9 (min is same as max - 1).
226 EXPECT_FALSE(histogram_proto
.bucket(2).has_min());
227 EXPECT_TRUE(histogram_proto
.bucket(2).has_max());
228 EXPECT_EQ(9, histogram_proto
.bucket(2).max());
230 // 10-11 becomes 10-/ (both optimizations apply, omit max is prioritized).
231 EXPECT_TRUE(histogram_proto
.bucket(3).has_min());
232 EXPECT_FALSE(histogram_proto
.bucket(3).has_max());
233 EXPECT_EQ(10, histogram_proto
.bucket(3).min());
235 // 11-12 becomes /-12 (last record must keep max, min is same as max - 1).
236 EXPECT_FALSE(histogram_proto
.bucket(4).has_min());
237 EXPECT_TRUE(histogram_proto
.bucket(4).has_max());
238 EXPECT_EQ(12, histogram_proto
.bucket(4).max());
241 TEST_F(MetricsLogTest
, RecordEnvironment
) {
242 TestMetricsServiceClient client
;
244 kClientId
, kSessionId
, MetricsLog::ONGOING_LOG
, &client
, &prefs_
);
246 std::vector
<variations::ActiveGroupId
> synthetic_trials
;
247 // Add two synthetic trials.
248 synthetic_trials
.push_back(kSyntheticTrials
[0]);
249 synthetic_trials
.push_back(kSyntheticTrials
[1]);
251 log
.RecordEnvironment(std::vector
<MetricsProvider
*>(),
254 // Check that the system profile on the log has the correct values set.
255 CheckSystemProfile(log
.system_profile());
257 // Check that the system profile has also been written to prefs.
258 const std::string base64_system_profile
=
259 prefs_
.GetString(prefs::kStabilitySavedSystemProfile
);
260 EXPECT_FALSE(base64_system_profile
.empty());
261 std::string serialied_system_profile
;
262 EXPECT_TRUE(base::Base64Decode(base64_system_profile
,
263 &serialied_system_profile
));
264 SystemProfileProto decoded_system_profile
;
265 EXPECT_TRUE(decoded_system_profile
.ParseFromString(serialied_system_profile
));
266 CheckSystemProfile(decoded_system_profile
);
269 TEST_F(MetricsLogTest
, LoadSavedEnvironmentFromPrefs
) {
270 const char* kSystemProfilePref
= prefs::kStabilitySavedSystemProfile
;
271 const char* kSystemProfileHashPref
=
272 prefs::kStabilitySavedSystemProfileHash
;
274 TestMetricsServiceClient client
;
276 // The pref value is empty, so loading it from prefs should fail.
279 kClientId
, kSessionId
, MetricsLog::ONGOING_LOG
, &client
, &prefs_
);
280 EXPECT_FALSE(log
.LoadSavedEnvironmentFromPrefs());
283 // Do a RecordEnvironment() call and check whether the pref is recorded.
286 kClientId
, kSessionId
, MetricsLog::ONGOING_LOG
, &client
, &prefs_
);
287 log
.RecordEnvironment(std::vector
<MetricsProvider
*>(),
288 std::vector
<variations::ActiveGroupId
>(),
290 EXPECT_FALSE(prefs_
.GetString(kSystemProfilePref
).empty());
291 EXPECT_FALSE(prefs_
.GetString(kSystemProfileHashPref
).empty());
296 kClientId
, kSessionId
, MetricsLog::ONGOING_LOG
, &client
, &prefs_
);
297 EXPECT_TRUE(log
.LoadSavedEnvironmentFromPrefs());
298 // Check some values in the system profile.
299 EXPECT_EQ(kInstallDateExpected
, log
.system_profile().install_date());
300 EXPECT_EQ(kEnabledDateExpected
, log
.system_profile().uma_enabled_date());
301 // Ensure that the call cleared the prefs.
302 EXPECT_TRUE(prefs_
.GetString(kSystemProfilePref
).empty());
303 EXPECT_TRUE(prefs_
.GetString(kSystemProfileHashPref
).empty());
306 // Ensure that a non-matching hash results in the pref being invalid.
309 kClientId
, kSessionId
, MetricsLog::ONGOING_LOG
, &client
, &prefs_
);
310 // Call RecordEnvironment() to record the pref again.
311 log
.RecordEnvironment(std::vector
<MetricsProvider
*>(),
312 std::vector
<variations::ActiveGroupId
>(),
317 // Set the hash to a bad value.
318 prefs_
.SetString(kSystemProfileHashPref
, "deadbeef");
320 kClientId
, kSessionId
, MetricsLog::ONGOING_LOG
, &client
, &prefs_
);
321 EXPECT_FALSE(log
.LoadSavedEnvironmentFromPrefs());
322 // Ensure that the prefs are cleared, even if the call failed.
323 EXPECT_TRUE(prefs_
.GetString(kSystemProfilePref
).empty());
324 EXPECT_TRUE(prefs_
.GetString(kSystemProfileHashPref
).empty());
328 TEST_F(MetricsLogTest
, InitialLogStabilityMetrics
) {
329 TestMetricsServiceClient client
;
330 TestMetricsLog
log(kClientId
,
332 MetricsLog::INITIAL_STABILITY_LOG
,
335 std::vector
<MetricsProvider
*> metrics_providers
;
336 log
.RecordEnvironment(metrics_providers
,
337 std::vector
<variations::ActiveGroupId
>(),
339 log
.RecordStabilityMetrics(metrics_providers
, base::TimeDelta(),
341 const SystemProfileProto_Stability
& stability
=
342 log
.system_profile().stability();
344 EXPECT_TRUE(stability
.has_launch_count());
345 EXPECT_TRUE(stability
.has_crash_count());
346 // Initial log metrics:
347 EXPECT_TRUE(stability
.has_incomplete_shutdown_count());
348 EXPECT_TRUE(stability
.has_breakpad_registration_success_count());
349 EXPECT_TRUE(stability
.has_breakpad_registration_failure_count());
350 EXPECT_TRUE(stability
.has_debugger_present_count());
351 EXPECT_TRUE(stability
.has_debugger_not_present_count());
354 TEST_F(MetricsLogTest
, OngoingLogStabilityMetrics
) {
355 TestMetricsServiceClient client
;
357 kClientId
, kSessionId
, MetricsLog::ONGOING_LOG
, &client
, &prefs_
);
358 std::vector
<MetricsProvider
*> metrics_providers
;
359 log
.RecordEnvironment(metrics_providers
,
360 std::vector
<variations::ActiveGroupId
>(),
362 log
.RecordStabilityMetrics(metrics_providers
, base::TimeDelta(),
364 const SystemProfileProto_Stability
& stability
=
365 log
.system_profile().stability();
367 EXPECT_TRUE(stability
.has_launch_count());
368 EXPECT_TRUE(stability
.has_crash_count());
369 // Initial log metrics:
370 EXPECT_FALSE(stability
.has_incomplete_shutdown_count());
371 EXPECT_FALSE(stability
.has_breakpad_registration_success_count());
372 EXPECT_FALSE(stability
.has_breakpad_registration_failure_count());
373 EXPECT_FALSE(stability
.has_debugger_present_count());
374 EXPECT_FALSE(stability
.has_debugger_not_present_count());
377 TEST_F(MetricsLogTest
, ChromeChannelWrittenToProtobuf
) {
378 TestMetricsServiceClient client
;
380 kClientId
, kSessionId
, MetricsLog::ONGOING_LOG
, &client
, &prefs_
);
381 EXPECT_TRUE(log
.uma_proto().system_profile().has_channel());
384 TEST_F(MetricsLogTest
, ProductNotSetIfDefault
) {
385 TestMetricsServiceClient client
;
386 EXPECT_EQ(ChromeUserMetricsExtension::CHROME
, client
.GetProduct());
388 kClientId
, kSessionId
, MetricsLog::ONGOING_LOG
, &client
, &prefs_
);
389 // Check that the product isn't set, since it's default and also verify the
390 // default value is indeed equal to Chrome.
391 EXPECT_FALSE(log
.uma_proto().has_product());
392 EXPECT_EQ(ChromeUserMetricsExtension::CHROME
, log
.uma_proto().product());
395 TEST_F(MetricsLogTest
, ProductSetIfNotDefault
) {
396 const int32_t kTestProduct
= 100;
397 EXPECT_NE(ChromeUserMetricsExtension::CHROME
, kTestProduct
);
399 TestMetricsServiceClient client
;
400 client
.set_product(kTestProduct
);
402 kClientId
, kSessionId
, MetricsLog::ONGOING_LOG
, &client
, &prefs_
);
403 // Check that the product is set to |kTestProduct|.
404 EXPECT_TRUE(log
.uma_proto().has_product());
405 EXPECT_EQ(kTestProduct
, log
.uma_proto().product());
408 } // namespace metrics