1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "components/metrics/metrics_log.h"
9 #include "base/base64.h"
10 #include "base/basictypes.h"
11 #include "base/metrics/bucket_ranges.h"
12 #include "base/metrics/sample_vector.h"
13 #include "base/prefs/pref_service.h"
14 #include "base/prefs/testing_pref_service.h"
15 #include "base/strings/string_number_conversions.h"
16 #include "base/time/time.h"
17 #include "components/metrics/metrics_pref_names.h"
18 #include "components/metrics/metrics_state_manager.h"
19 #include "components/metrics/proto/chrome_user_metrics_extension.pb.h"
20 #include "components/metrics/test_metrics_service_client.h"
21 #include "components/variations/active_field_trials.h"
22 #include "testing/gtest/include/gtest/gtest.h"
28 const char kClientId
[] = "bogus client ID";
29 const int64 kInstallDate
= 1373051956;
30 const int64 kInstallDateExpected
= 1373050800; // Computed from kInstallDate.
31 const int64 kEnabledDate
= 1373001211;
32 const int64 kEnabledDateExpected
= 1373000400; // Computed from kEnabledDate.
33 const int kSessionId
= 127;
34 const variations::ActiveGroupId kFieldTrialIds
[] = {
39 const variations::ActiveGroupId kSyntheticTrials
[] = {
44 class TestMetricsLog
: public MetricsLog
{
46 TestMetricsLog(const std::string
& client_id
,
49 metrics::MetricsServiceClient
* client
,
50 TestingPrefServiceSimple
* prefs
)
51 : MetricsLog(client_id
, session_id
, log_type
, client
, prefs
),
56 virtual ~TestMetricsLog() {}
58 const metrics::ChromeUserMetricsExtension
& uma_proto() const {
59 return *MetricsLog::uma_proto();
62 const metrics::SystemProfileProto
& system_profile() const {
63 return uma_proto().system_profile();
68 prefs_
->SetString(metrics::prefs::kMetricsReportingEnabledTimestamp
,
69 base::Int64ToString(kEnabledDate
));
72 virtual void GetFieldTrialIds(
73 std::vector
<variations::ActiveGroupId
>* field_trial_ids
) const
75 ASSERT_TRUE(field_trial_ids
->empty());
77 for (size_t i
= 0; i
< arraysize(kFieldTrialIds
); ++i
) {
78 field_trial_ids
->push_back(kFieldTrialIds
[i
]);
82 // Weak pointer to the PrefsService used by this log.
83 TestingPrefServiceSimple
* prefs_
;
85 DISALLOW_COPY_AND_ASSIGN(TestMetricsLog
);
90 class MetricsLogTest
: public testing::Test
{
93 MetricsLog::RegisterPrefs(prefs_
.registry());
94 metrics::MetricsStateManager::RegisterPrefs(prefs_
.registry());
97 virtual ~MetricsLogTest() {
101 // Check that the values in |system_values| correspond to the test data
102 // defined at the top of this file.
103 void CheckSystemProfile(const metrics::SystemProfileProto
& system_profile
) {
104 EXPECT_EQ(kInstallDateExpected
, system_profile
.install_date());
105 EXPECT_EQ(kEnabledDateExpected
, system_profile
.uma_enabled_date());
107 ASSERT_EQ(arraysize(kFieldTrialIds
) + arraysize(kSyntheticTrials
),
108 static_cast<size_t>(system_profile
.field_trial_size()));
109 for (size_t i
= 0; i
< arraysize(kFieldTrialIds
); ++i
) {
110 const metrics::SystemProfileProto::FieldTrial
& field_trial
=
111 system_profile
.field_trial(i
);
112 EXPECT_EQ(kFieldTrialIds
[i
].name
, field_trial
.name_id());
113 EXPECT_EQ(kFieldTrialIds
[i
].group
, field_trial
.group_id());
115 // Verify the right data is present for the synthetic trials.
116 for (size_t i
= 0; i
< arraysize(kSyntheticTrials
); ++i
) {
117 const metrics::SystemProfileProto::FieldTrial
& field_trial
=
118 system_profile
.field_trial(i
+ arraysize(kFieldTrialIds
));
119 EXPECT_EQ(kSyntheticTrials
[i
].name
, field_trial
.name_id());
120 EXPECT_EQ(kSyntheticTrials
[i
].group
, field_trial
.group_id());
123 EXPECT_EQ(metrics::TestMetricsServiceClient::kBrandForTesting
,
124 system_profile
.brand_code());
126 const metrics::SystemProfileProto::Hardware
& hardware
=
127 system_profile
.hardware();
129 EXPECT_TRUE(hardware
.has_cpu());
130 EXPECT_TRUE(hardware
.cpu().has_vendor_name());
131 EXPECT_TRUE(hardware
.cpu().has_signature());
133 // TODO(isherman): Verify other data written into the protobuf as a result
138 TestingPrefServiceSimple prefs_
;
141 DISALLOW_COPY_AND_ASSIGN(MetricsLogTest
);
144 TEST_F(MetricsLogTest
, LogType
) {
145 TestMetricsServiceClient client
;
146 TestingPrefServiceSimple prefs
;
148 MetricsLog
log1("id", 0, MetricsLog::ONGOING_LOG
, &client
, &prefs
);
149 EXPECT_EQ(MetricsLog::ONGOING_LOG
, log1
.log_type());
151 MetricsLog
log2("id", 0, MetricsLog::INITIAL_STABILITY_LOG
, &client
, &prefs
);
152 EXPECT_EQ(MetricsLog::INITIAL_STABILITY_LOG
, log2
.log_type());
155 TEST_F(MetricsLogTest
, EmptyRecord
) {
156 TestMetricsServiceClient client
;
157 client
.set_version_string("bogus version");
158 TestingPrefServiceSimple prefs
;
159 MetricsLog
log("totally bogus client ID", 137, MetricsLog::ONGOING_LOG
,
164 log
.GetEncodedLog(&encoded
);
166 // A couple of fields are hard to mock, so these will be copied over directly
167 // for the expected output.
168 ChromeUserMetricsExtension parsed
;
169 ASSERT_TRUE(parsed
.ParseFromString(encoded
));
171 ChromeUserMetricsExtension expected
;
172 expected
.set_client_id(5217101509553811875); // Hashed bogus client ID
173 expected
.set_session_id(137);
174 expected
.mutable_system_profile()->set_build_timestamp(
175 parsed
.system_profile().build_timestamp());
176 expected
.mutable_system_profile()->set_app_version("bogus version");
177 expected
.mutable_system_profile()->set_channel(client
.GetChannel());
179 EXPECT_EQ(expected
.SerializeAsString(), encoded
);
182 TEST_F(MetricsLogTest
, HistogramBucketFields
) {
183 // Create buckets: 1-5, 5-7, 7-8, 8-9, 9-10, 10-11, 11-12.
184 base::BucketRanges
ranges(8);
185 ranges
.set_range(0, 1);
186 ranges
.set_range(1, 5);
187 ranges
.set_range(2, 7);
188 ranges
.set_range(3, 8);
189 ranges
.set_range(4, 9);
190 ranges
.set_range(5, 10);
191 ranges
.set_range(6, 11);
192 ranges
.set_range(7, 12);
194 base::SampleVector
samples(&ranges
);
195 samples
.Accumulate(3, 1); // Bucket 1-5.
196 samples
.Accumulate(6, 1); // Bucket 5-7.
197 samples
.Accumulate(8, 1); // Bucket 8-9. (7-8 skipped)
198 samples
.Accumulate(10, 1); // Bucket 10-11. (9-10 skipped)
199 samples
.Accumulate(11, 1); // Bucket 11-12.
201 TestMetricsServiceClient client
;
202 TestingPrefServiceSimple prefs
;
204 kClientId
, kSessionId
, MetricsLog::ONGOING_LOG
, &client
, &prefs_
);
205 log
.RecordHistogramDelta("Test", samples
);
207 const ChromeUserMetricsExtension
& uma_proto
= log
.uma_proto();
208 const HistogramEventProto
& histogram_proto
=
209 uma_proto
.histogram_event(uma_proto
.histogram_event_size() - 1);
211 // Buckets with samples: 1-5, 5-7, 8-9, 10-11, 11-12.
212 // Should become: 1-/, 5-7, /-9, 10-/, /-12.
213 ASSERT_EQ(5, histogram_proto
.bucket_size());
215 // 1-5 becomes 1-/ (max is same as next min).
216 EXPECT_TRUE(histogram_proto
.bucket(0).has_min());
217 EXPECT_FALSE(histogram_proto
.bucket(0).has_max());
218 EXPECT_EQ(1, histogram_proto
.bucket(0).min());
220 // 5-7 stays 5-7 (no optimization possible).
221 EXPECT_TRUE(histogram_proto
.bucket(1).has_min());
222 EXPECT_TRUE(histogram_proto
.bucket(1).has_max());
223 EXPECT_EQ(5, histogram_proto
.bucket(1).min());
224 EXPECT_EQ(7, histogram_proto
.bucket(1).max());
226 // 8-9 becomes /-9 (min is same as max - 1).
227 EXPECT_FALSE(histogram_proto
.bucket(2).has_min());
228 EXPECT_TRUE(histogram_proto
.bucket(2).has_max());
229 EXPECT_EQ(9, histogram_proto
.bucket(2).max());
231 // 10-11 becomes 10-/ (both optimizations apply, omit max is prioritized).
232 EXPECT_TRUE(histogram_proto
.bucket(3).has_min());
233 EXPECT_FALSE(histogram_proto
.bucket(3).has_max());
234 EXPECT_EQ(10, histogram_proto
.bucket(3).min());
236 // 11-12 becomes /-12 (last record must keep max, min is same as max - 1).
237 EXPECT_FALSE(histogram_proto
.bucket(4).has_min());
238 EXPECT_TRUE(histogram_proto
.bucket(4).has_max());
239 EXPECT_EQ(12, histogram_proto
.bucket(4).max());
242 TEST_F(MetricsLogTest
, RecordEnvironment
) {
243 TestMetricsServiceClient client
;
245 kClientId
, kSessionId
, MetricsLog::ONGOING_LOG
, &client
, &prefs_
);
247 std::vector
<variations::ActiveGroupId
> synthetic_trials
;
248 // Add two synthetic trials.
249 synthetic_trials
.push_back(kSyntheticTrials
[0]);
250 synthetic_trials
.push_back(kSyntheticTrials
[1]);
252 log
.RecordEnvironment(std::vector
<MetricsProvider
*>(),
255 // Check that the system profile on the log has the correct values set.
256 CheckSystemProfile(log
.system_profile());
258 // Check that the system profile has also been written to prefs.
259 const std::string base64_system_profile
=
260 prefs_
.GetString(prefs::kStabilitySavedSystemProfile
);
261 EXPECT_FALSE(base64_system_profile
.empty());
262 std::string serialied_system_profile
;
263 EXPECT_TRUE(base::Base64Decode(base64_system_profile
,
264 &serialied_system_profile
));
265 SystemProfileProto decoded_system_profile
;
266 EXPECT_TRUE(decoded_system_profile
.ParseFromString(serialied_system_profile
));
267 CheckSystemProfile(decoded_system_profile
);
270 TEST_F(MetricsLogTest
, LoadSavedEnvironmentFromPrefs
) {
271 const char* kSystemProfilePref
= prefs::kStabilitySavedSystemProfile
;
272 const char* kSystemProfileHashPref
=
273 prefs::kStabilitySavedSystemProfileHash
;
275 TestMetricsServiceClient client
;
277 // The pref value is empty, so loading it from prefs should fail.
280 kClientId
, kSessionId
, MetricsLog::ONGOING_LOG
, &client
, &prefs_
);
281 EXPECT_FALSE(log
.LoadSavedEnvironmentFromPrefs());
284 // Do a RecordEnvironment() call and check whether the pref is recorded.
287 kClientId
, kSessionId
, MetricsLog::ONGOING_LOG
, &client
, &prefs_
);
288 log
.RecordEnvironment(std::vector
<MetricsProvider
*>(),
289 std::vector
<variations::ActiveGroupId
>(),
291 EXPECT_FALSE(prefs_
.GetString(kSystemProfilePref
).empty());
292 EXPECT_FALSE(prefs_
.GetString(kSystemProfileHashPref
).empty());
297 kClientId
, kSessionId
, MetricsLog::ONGOING_LOG
, &client
, &prefs_
);
298 EXPECT_TRUE(log
.LoadSavedEnvironmentFromPrefs());
299 // Check some values in the system profile.
300 EXPECT_EQ(kInstallDateExpected
, log
.system_profile().install_date());
301 EXPECT_EQ(kEnabledDateExpected
, log
.system_profile().uma_enabled_date());
302 // Ensure that the call cleared the prefs.
303 EXPECT_TRUE(prefs_
.GetString(kSystemProfilePref
).empty());
304 EXPECT_TRUE(prefs_
.GetString(kSystemProfileHashPref
).empty());
307 // Ensure that a non-matching hash results in the pref being invalid.
310 kClientId
, kSessionId
, MetricsLog::ONGOING_LOG
, &client
, &prefs_
);
311 // Call RecordEnvironment() to record the pref again.
312 log
.RecordEnvironment(std::vector
<MetricsProvider
*>(),
313 std::vector
<variations::ActiveGroupId
>(),
318 // Set the hash to a bad value.
319 prefs_
.SetString(kSystemProfileHashPref
, "deadbeef");
321 kClientId
, kSessionId
, MetricsLog::ONGOING_LOG
, &client
, &prefs_
);
322 EXPECT_FALSE(log
.LoadSavedEnvironmentFromPrefs());
323 // Ensure that the prefs are cleared, even if the call failed.
324 EXPECT_TRUE(prefs_
.GetString(kSystemProfilePref
).empty());
325 EXPECT_TRUE(prefs_
.GetString(kSystemProfileHashPref
).empty());
329 TEST_F(MetricsLogTest
, InitialLogStabilityMetrics
) {
330 TestMetricsServiceClient client
;
331 TestMetricsLog
log(kClientId
,
333 MetricsLog::INITIAL_STABILITY_LOG
,
336 std::vector
<MetricsProvider
*> metrics_providers
;
337 log
.RecordEnvironment(metrics_providers
,
338 std::vector
<variations::ActiveGroupId
>(),
340 log
.RecordStabilityMetrics(metrics_providers
, base::TimeDelta(),
342 const SystemProfileProto_Stability
& stability
=
343 log
.system_profile().stability();
345 EXPECT_TRUE(stability
.has_launch_count());
346 EXPECT_TRUE(stability
.has_crash_count());
347 // Initial log metrics:
348 EXPECT_TRUE(stability
.has_incomplete_shutdown_count());
349 EXPECT_TRUE(stability
.has_breakpad_registration_success_count());
350 EXPECT_TRUE(stability
.has_breakpad_registration_failure_count());
351 EXPECT_TRUE(stability
.has_debugger_present_count());
352 EXPECT_TRUE(stability
.has_debugger_not_present_count());
355 TEST_F(MetricsLogTest
, OngoingLogStabilityMetrics
) {
356 TestMetricsServiceClient client
;
358 kClientId
, kSessionId
, MetricsLog::ONGOING_LOG
, &client
, &prefs_
);
359 std::vector
<MetricsProvider
*> metrics_providers
;
360 log
.RecordEnvironment(metrics_providers
,
361 std::vector
<variations::ActiveGroupId
>(),
363 log
.RecordStabilityMetrics(metrics_providers
, base::TimeDelta(),
365 const SystemProfileProto_Stability
& stability
=
366 log
.system_profile().stability();
368 EXPECT_TRUE(stability
.has_launch_count());
369 EXPECT_TRUE(stability
.has_crash_count());
370 // Initial log metrics:
371 EXPECT_FALSE(stability
.has_incomplete_shutdown_count());
372 EXPECT_FALSE(stability
.has_breakpad_registration_success_count());
373 EXPECT_FALSE(stability
.has_breakpad_registration_failure_count());
374 EXPECT_FALSE(stability
.has_debugger_present_count());
375 EXPECT_FALSE(stability
.has_debugger_not_present_count());
378 TEST_F(MetricsLogTest
, ChromeChannelWrittenToProtobuf
) {
379 TestMetricsServiceClient client
;
381 "user@test.com", kSessionId
, MetricsLog::ONGOING_LOG
, &client
, &prefs_
);
382 EXPECT_TRUE(log
.uma_proto().system_profile().has_channel());
385 } // namespace metrics