1 // Copyright 2015 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "base/profiler/stack_sampling_profiler.h"
10 #include "base/callback.h"
11 #include "base/memory/singleton.h"
12 #include "base/profiler/native_stack_sampler.h"
13 #include "base/synchronization/lock.h"
14 #include "base/timer/elapsed_timer.h"
18 // PendingProfiles ------------------------------------------------------------
22 // Thread-safe singleton class that stores collected call stack profiles waiting
24 class PendingProfiles
{
28 static PendingProfiles
* GetInstance();
30 // Appends |profiles| to |profiles_|. This function may be called on any
33 const std::vector
<StackSamplingProfiler::CallStackProfile
>& profiles
);
35 // Copies the pending profiles from |profiles_| into |profiles|, and clears
36 // |profiles_|. This function may be called on any thread.
37 void GetAndClearPendingProfiles(
38 std::vector
<StackSamplingProfiler::CallStackProfile
>* profiles
);
41 friend struct DefaultSingletonTraits
<PendingProfiles
>;
46 std::vector
<StackSamplingProfiler::CallStackProfile
> profiles_
;
48 DISALLOW_COPY_AND_ASSIGN(PendingProfiles
);
51 PendingProfiles::PendingProfiles() {}
53 PendingProfiles::~PendingProfiles() {}
56 PendingProfiles
* PendingProfiles::GetInstance() {
57 return Singleton
<PendingProfiles
>::get();
60 void PendingProfiles::AppendProfiles(
61 const std::vector
<StackSamplingProfiler::CallStackProfile
>& profiles
) {
62 AutoLock
scoped_lock(profiles_lock_
);
63 profiles_
.insert(profiles_
.end(), profiles
.begin(), profiles
.end());
66 void PendingProfiles::GetAndClearPendingProfiles(
67 std::vector
<StackSamplingProfiler::CallStackProfile
>* profiles
) {
70 AutoLock
scoped_lock(profiles_lock_
);
71 profiles_
.swap(*profiles
);
76 // StackSamplingProfiler::Module ----------------------------------------------
78 StackSamplingProfiler::Module::Module() : base_address(nullptr) {}
79 StackSamplingProfiler::Module::Module(const void* base_address
,
80 const std::string
& id
,
81 const FilePath
& filename
)
82 : base_address(base_address
), id(id
), filename(filename
) {}
84 StackSamplingProfiler::Module::~Module() {}
86 // StackSamplingProfiler::Frame -----------------------------------------------
88 StackSamplingProfiler::Frame::Frame(const void* instruction_pointer
,
90 : instruction_pointer(instruction_pointer
),
91 module_index(module_index
) {}
93 StackSamplingProfiler::Frame::~Frame() {}
95 // StackSamplingProfiler::CallStackProfile ------------------------------------
97 StackSamplingProfiler::CallStackProfile::CallStackProfile()
98 : preserve_sample_ordering(false) {}
100 StackSamplingProfiler::CallStackProfile::~CallStackProfile() {}
102 // StackSamplingProfiler::SamplingThread --------------------------------------
104 StackSamplingProfiler::SamplingThread::SamplingThread(
105 scoped_ptr
<NativeStackSampler
> native_sampler
,
106 const SamplingParams
& params
,
107 CompletedCallback completed_callback
)
108 : native_sampler_(native_sampler
.Pass()),
110 stop_event_(false, false),
111 completed_callback_(completed_callback
) {
114 StackSamplingProfiler::SamplingThread::~SamplingThread() {}
116 void StackSamplingProfiler::SamplingThread::ThreadMain() {
117 PlatformThread::SetName("Chrome_SamplingProfilerThread");
119 CallStackProfiles profiles
;
120 CollectProfiles(&profiles
);
121 completed_callback_
.Run(profiles
);
124 // Depending on how long the sampling takes and the length of the sampling
125 // interval, a burst of samples could take arbitrarily longer than
126 // samples_per_burst * sampling_interval. In this case, we (somewhat
127 // arbitrarily) honor the number of samples requested rather than strictly
128 // adhering to the sampling intervals. Once we have established users for the
129 // StackSamplingProfiler and the collected data to judge, we may go the other
130 // way or make this behavior configurable.
131 bool StackSamplingProfiler::SamplingThread::CollectProfile(
132 CallStackProfile
* profile
,
133 TimeDelta
* elapsed_time
) {
134 ElapsedTimer profile_timer
;
135 CallStackProfile current_profile
;
136 native_sampler_
->ProfileRecordingStarting(¤t_profile
.modules
);
137 current_profile
.sampling_period
= params_
.sampling_interval
;
138 bool burst_completed
= true;
139 TimeDelta previous_elapsed_sample_time
;
140 for (int i
= 0; i
< params_
.samples_per_burst
; ++i
) {
142 // Always wait, even if for 0 seconds, so we can observe a signal on
144 if (stop_event_
.TimedWait(
145 std::max(params_
.sampling_interval
- previous_elapsed_sample_time
,
147 burst_completed
= false;
151 ElapsedTimer sample_timer
;
152 current_profile
.samples
.push_back(Sample());
153 native_sampler_
->RecordStackSample(¤t_profile
.samples
.back());
154 previous_elapsed_sample_time
= sample_timer
.Elapsed();
157 *elapsed_time
= profile_timer
.Elapsed();
158 current_profile
.profile_duration
= *elapsed_time
;
159 native_sampler_
->ProfileRecordingStopped();
162 *profile
= current_profile
;
164 return burst_completed
;
167 // In an analogous manner to CollectProfile() and samples exceeding the expected
168 // total sampling time, bursts may also exceed the burst_interval. We adopt the
169 // same wait-and-see approach here.
170 void StackSamplingProfiler::SamplingThread::CollectProfiles(
171 CallStackProfiles
* profiles
) {
172 if (stop_event_
.TimedWait(params_
.initial_delay
))
175 TimeDelta previous_elapsed_profile_time
;
176 for (int i
= 0; i
< params_
.bursts
; ++i
) {
178 // Always wait, even if for 0 seconds, so we can observe a signal on
180 if (stop_event_
.TimedWait(
181 std::max(params_
.burst_interval
- previous_elapsed_profile_time
,
186 CallStackProfile profile
;
187 if (!CollectProfile(&profile
, &previous_elapsed_profile_time
))
189 profiles
->push_back(profile
);
193 void StackSamplingProfiler::SamplingThread::Stop() {
194 stop_event_
.Signal();
197 // StackSamplingProfiler ------------------------------------------------------
199 StackSamplingProfiler::SamplingParams::SamplingParams()
200 : initial_delay(TimeDelta::FromMilliseconds(0)),
202 burst_interval(TimeDelta::FromMilliseconds(10000)),
203 samples_per_burst(300),
204 sampling_interval(TimeDelta::FromMilliseconds(100)),
205 preserve_sample_ordering(false) {
208 StackSamplingProfiler::StackSamplingProfiler(PlatformThreadId thread_id
,
209 const SamplingParams
& params
)
210 : thread_id_(thread_id
), params_(params
) {}
212 StackSamplingProfiler::~StackSamplingProfiler() {}
214 void StackSamplingProfiler::Start() {
215 scoped_ptr
<NativeStackSampler
> native_sampler
=
216 NativeStackSampler::Create(thread_id_
);
220 sampling_thread_
.reset(
222 native_sampler
.Pass(), params_
,
223 (custom_completed_callback_
.is_null() ?
224 Bind(&PendingProfiles::AppendProfiles
,
225 Unretained(PendingProfiles::GetInstance())) :
226 custom_completed_callback_
)));
227 if (!PlatformThread::CreateNonJoinable(0, sampling_thread_
.get()))
228 sampling_thread_
.reset();
231 void StackSamplingProfiler::Stop() {
232 if (sampling_thread_
)
233 sampling_thread_
->Stop();
237 void StackSamplingProfiler::GetPendingProfiles(CallStackProfiles
* profiles
) {
238 PendingProfiles::GetInstance()->GetAndClearPendingProfiles(profiles
);
241 // StackSamplingProfiler::Frame global functions ------------------------------
243 bool operator==(const StackSamplingProfiler::Frame
&a
,
244 const StackSamplingProfiler::Frame
&b
) {
245 return a
.instruction_pointer
== b
.instruction_pointer
&&
246 a
.module_index
== b
.module_index
;
249 bool operator<(const StackSamplingProfiler::Frame
&a
,
250 const StackSamplingProfiler::Frame
&b
) {
251 return (a
.module_index
< b
.module_index
) ||
252 (a
.module_index
== b
.module_index
&&
253 a
.instruction_pointer
< b
.instruction_pointer
);